diff --git a/.flake8 b/.flake8 index 90316de214..028b384a3d 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ [flake8] # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): # Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 +ignore = E203, E231, E266, E501, W503, E704 exclude = # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): # Ensure that generated code passes flake8 lint diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 7161b392c4..aeaa0d7cf6 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.111.0" + ".": "1.112.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c57315fe1..634f1d5d06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## [1.112.0](https://github.com/googleapis/python-aiplatform/compare/v1.111.0...v1.112.0) (2025-09-09) + + +### Features + +* GenAI SDK client - Add A2A support in Agent Engine ([d500945](https://github.com/googleapis/python-aiplatform/commit/d5009458599c4e5eb1b9c4fea1346eafdaa778bc)) +* GenAI SDK client - Add Observability GenAI data format converter for evals ([0665566](https://github.com/googleapis/python-aiplatform/commit/0665566ac606b599177d4dca0643559df0540439)) +* GenAI SDK client - Add orderBy to ListMemories ([c6872f0](https://github.com/googleapis/python-aiplatform/commit/c6872f02fe8b45eb07cdd64c5d4bf9e3ac33226b)) +* GenAI SDK client - Add support for loading evaluation datasets from Observability GCS sources ([bfcccc9](https://github.com/googleapis/python-aiplatform/commit/bfcccc975c7720286908d609688e01c902b675c5)) +* GenAI SDK client - Add the service account option for the agent engine ([219c33f](https://github.com/googleapis/python-aiplatform/commit/219c33fd5fb628ec3945df91347e74710c1318b3)) +* Adding `gpu_partition_size` parameter to Model.deploy() method. ([966c236](https://github.com/googleapis/python-aiplatform/commit/966c236fef9bb800b8ce6a9235c6bb06b3203c66)) +* Change `gpu_partition_size` type hint to `str`. ([910016d](https://github.com/googleapis/python-aiplatform/commit/910016d71a7bc1eb380613e44b7ae2d7683d9769)) +* GenAI SDK client - Add `api_key` parameter to vertexai.Client ([a9ffc60](https://github.com/googleapis/python-aiplatform/commit/a9ffc6030dc58ed4acd269632033cbee23dd470a)) +* GenAI SDK client - Add experimental prompt_management module with create_version and get methods ([d5a14ba](https://github.com/googleapis/python-aiplatform/commit/d5a14ba462684a23afb7f15002fb603038562e7c)) +* GenAI SDK client - Add live/bidi agent deployment support for Agent Engine ([74e3f25](https://github.com/googleapis/python-aiplatform/commit/74e3f25e1b2e50935d0ef6a8bc06472921cae32d)) +* GenAI SDK client - Add sandbox code execution SDK support ([74e52d9](https://github.com/googleapis/python-aiplatform/commit/74e52d957035d5bc9d63bc0912b9785522930487)) +* GenAI SDK client - Remove experimental warning from agent_engines module ([42c3c9c](https://github.com/googleapis/python-aiplatform/commit/42c3c9c5546b7f02c07818934fcc804a882a9703)) +* GenAI SDK client - Support bidi stream query in agent engines and ADK template. ([456249e](https://github.com/googleapis/python-aiplatform/commit/456249efacd7cebf274f6f28316e475a6271dfa3)) + + +### Bug Fixes + +* Allow for inheritance of clone behavior in AdkApp ([e5ced93](https://github.com/googleapis/python-aiplatform/commit/e5ced937033f0b194ccad6acc65ea03869d1847b)) +* GenAI SDK client - Decrease polling interval for GenerateMemories and CreateSession (currently 10s) ([77a3933](https://github.com/googleapis/python-aiplatform/commit/77a39330473b9d04fdad6bf2ee87b5b5ce9cd696)) +* GenAI SDK client - Fix deepcopy for RubricGenerationConfig in RubricBasedMetric. ([e39f0bd](https://github.com/googleapis/python-aiplatform/commit/e39f0bd4afbf2cc9f32bffd2a801c2f48c1f9039)) +* GenAI SDK client - Handle empty GenerateMemories response (no memories generated) as valid response. ([99f0078](https://github.com/googleapis/python-aiplatform/commit/99f0078f0165bc33c8f001e310ea9b6823b00974)) + ## [1.111.0](https://github.com/googleapis/python-aiplatform/compare/v1.110.0...v1.111.0) (2025-08-27) diff --git a/README.rst b/README.rst index 71c84a1fe2..70aa34f40b 100644 --- a/README.rst +++ b/README.rst @@ -1,14 +1,6 @@ Vertex AI SDK for Python ================================================= -.. note:: - - The following Generative AI modules in the Vertex AI SDK are deprecated as of June 24, 2025 and will be removed on June 24, 2026: - `vertexai.generative_models`, `vertexai.language_models`, `vertexai.vision_models`, `vertexai.tuning`, `vertexai.caching`. Please use the - [Google Gen AI SDK](https://pypi.org/project/google-genai/) to access these features. See - [the migration guide](https://cloud.google.com/vertex-ai/generative-ai/docs/deprecations/genai-vertexai-sdk) for details. - You can continue using all other Vertex AI SDK modules, as they are the recommended way to use the API. - |GA| |pypi| |versions| |unit-tests| |system-tests| |sample-tests| `Vertex AI`_: Google Vertex AI is an integrated suite of machine learning tools and services for building and using ML models with AutoML or custom code. It offers both novices and experts the best workbench for the entire machine learning development lifecycle. @@ -32,18 +24,10 @@ Vertex AI SDK for Python .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/aiplatform/latest .. _Product Documentation: https://cloud.google.com/vertex-ai/docs -Gemini API and Generative AI on Vertex AI ------------------------------------------ - -.. note:: - - For Gemini API and Generative AI on Vertex AI, please reference `Vertex Generative AI SDK for Python`_ -.. _Vertex Generative AI SDK for Python: https://cloud.google.com/vertex-ai/generative-ai/docs/reference/python/latest - -Using the Google Gen AI SDK client from the Vertex AI SDK (Experimental) +Generative AI in the Vertex AI SDK ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To use features from the Google Gen AI SDK from the Vertex AI SDK, you can instantiate the client with the following: +To use Gen AI features from the Vertex AI SDK, you can instantiate a Gen AI client with the following: .. code-block:: Python @@ -99,8 +83,70 @@ Then run evaluation by providing the inference results and specifying the metric ] ) +Prompt optimization +^^^^^^^^^^^^^^^^^^^ + +To do a zero-shot prompt optimization, use the `optimize_prompt` +method. + +.. code-block:: Python + + prompt = "Generate system instructions for a question-answering assistant" + response = client.prompt_optimizer.optimize_prompt(prompt=prompt) + + print(response.suggested_prompt) + +To call the data-driven prompt optimization, call the `optimize` method. +In this case however, we need to provide `vapo_config`. This config needs to +have either service account or project **number** and the config path. +Please refer to this [tutorial](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/prompts/data-driven-optimizer) +for more details on config parameter. + +.. code-block:: Python + + from vertexai import types + + project_number = PROJECT_NUMBER # replace with your project number + service_account = f"{project_number}-compute@developer.gserviceaccount.com" + + vapo_config = vertexai.types.PromptOptimizerVAPOConfig( + config_path="gs://your-bucket/config.json", + service_account_project_number=project_number, + wait_for_completion=False + ) + + # Set up logging to see the progress of the optimization job + logging.basicConfig(encoding='utf-8', level=logging.INFO, force=True) + + result = client.prompt_optimizer.optimize(method="vapo", config=vapo_config) + +If you want to use the project number instead of the service account, you can +instead use the following config: + +.. code-block:: Python + + vapo_config = vertexai.types.PromptOptimizerVAPOConfig( + config_path="gs://your-bucket/config.json", + service_account_project_number=project_number, + wait_for_completion=False + ) + +We can also call optimize method async. + +.. code-block:: Python + + await client.aio.prompt_optimizer.optimize(method="vapo", config=vapo_config) + ----------------------------------------- +.. note:: + + The following Generative AI modules in the Vertex AI SDK are deprecated as of June 24, 2025 and will be removed on June 24, 2026: + `vertexai.generative_models`, `vertexai.language_models`, `vertexai.vision_models`, `vertexai.tuning`, `vertexai.caching`. Please use the + [Google Gen AI SDK](https://pypi.org/project/google-genai/) to access these features. See + [the migration guide](https://cloud.google.com/vertex-ai/generative-ai/docs/deprecations/genai-vertexai-sdk) for details. + You can continue using all other Vertex AI SDK modules, as they are the recommended way to use the API. + Quick Start ----------- diff --git a/google/cloud/aiplatform/_streaming_prediction.py b/google/cloud/aiplatform/_streaming_prediction.py index 16a1d1331b..9225519c26 100644 --- a/google/cloud/aiplatform/_streaming_prediction.py +++ b/google/cloud/aiplatform/_streaming_prediction.py @@ -130,7 +130,9 @@ async def predict_stream_of_tensor_lists_from_single_tensor_list_async( inputs=tensor_list, parameters=parameters_tensor, ) - async for response in await prediction_service_async_client.server_streaming_predict( + async for ( + response + ) in await prediction_service_async_client.server_streaming_predict( request=request ): yield response.outputs @@ -183,7 +185,9 @@ async def predict_stream_of_dict_lists_from_single_dict_list_async( """ tensor_list = [value_to_tensor(d) for d in dict_list] parameters_tensor = value_to_tensor(parameters) if parameters else None - async for tensor_list in predict_stream_of_tensor_lists_from_single_tensor_list_async( + async for ( + tensor_list + ) in predict_stream_of_tensor_lists_from_single_tensor_list_async( prediction_service_async_client=prediction_service_async_client, endpoint_name=endpoint_name, tensor_list=tensor_list, diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 778e0f4c67..cca3d69064 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -639,7 +639,6 @@ def _get_and_validate_project_location( project: Optional[str] = None, location: Optional[str] = None, ) -> Tuple[str, str]: - """Validate the project and location for the resource. Args: @@ -1505,14 +1504,10 @@ class PreviewMixin(abc.ABC): class allows the child class to introduce preview features. """ - @classmethod - @property - @abc.abstractmethod - def _preview_class(cls: Type[PreviewClass]) -> Type[PreviewClass]: - """Class that is currently in preview or has a preview feature. - Class must have `resource_name` and `credentials` attributes. - """ - pass + _preview_class: Type[PreviewClass] + """Class that is currently in preview or has a preview feature. + Class must have `resource_name` and `credentials` attributes. + """ @property def preview(self) -> PreviewClass: diff --git a/google/cloud/aiplatform/datasets/__init__.py b/google/cloud/aiplatform/datasets/__init__.py index 0f6b7f42fa..10d2663ea9 100644 --- a/google/cloud/aiplatform/datasets/__init__.py +++ b/google/cloud/aiplatform/datasets/__init__.py @@ -16,9 +16,13 @@ # from google.cloud.aiplatform.datasets.dataset import _Dataset -from google.cloud.aiplatform.datasets.column_names_dataset import _ColumnNamesDataset +from google.cloud.aiplatform.datasets.column_names_dataset import ( + _ColumnNamesDataset, +) from google.cloud.aiplatform.datasets.tabular_dataset import TabularDataset -from google.cloud.aiplatform.datasets.time_series_dataset import TimeSeriesDataset +from google.cloud.aiplatform.datasets.time_series_dataset import ( + TimeSeriesDataset, +) from google.cloud.aiplatform.datasets.image_dataset import ImageDataset from google.cloud.aiplatform.datasets.text_dataset import TextDataset from google.cloud.aiplatform.datasets.video_dataset import VideoDataset diff --git a/google/cloud/aiplatform/docker_utils/build.py b/google/cloud/aiplatform/docker_utils/build.py index e9c7d63579..acf7ca0393 100644 --- a/google/cloud/aiplatform/docker_utils/build.py +++ b/google/cloud/aiplatform/docker_utils/build.py @@ -223,7 +223,7 @@ def _prepare_exposed_ports(exposed_ports: Optional[List[int]] = None) -> str: def _prepare_environment_variables( - environment_variables: Optional[Dict[str, str]] = None + environment_variables: Optional[Dict[str, str]] = None, ) -> str: """Returns the Dockerfile entries required to set environment variables in containers. diff --git a/google/cloud/aiplatform/docker_utils/run.py b/google/cloud/aiplatform/docker_utils/run.py index 496d977a7d..155ee39f3a 100644 --- a/google/cloud/aiplatform/docker_utils/run.py +++ b/google/cloud/aiplatform/docker_utils/run.py @@ -30,7 +30,9 @@ ) from google.cloud.aiplatform.constants import prediction -from google.cloud.aiplatform.docker_utils.utils import DEFAULT_MOUNTED_MODEL_DIRECTORY +from google.cloud.aiplatform.docker_utils.utils import ( + DEFAULT_MOUNTED_MODEL_DIRECTORY, +) from google.cloud.aiplatform.utils import prediction_utils _logger = logging.getLogger(__name__) diff --git a/google/cloud/aiplatform/explain/lit.py b/google/cloud/aiplatform/explain/lit.py index 6d388f4559..4e2ca9c1b3 100644 --- a/google/cloud/aiplatform/explain/lit.py +++ b/google/cloud/aiplatform/explain/lit.py @@ -311,7 +311,9 @@ def _set_up_attribution_explainer( """ try: import explainable_ai_sdk - from explainable_ai_sdk.metadata.tf.v2 import SavedModelMetadataBuilder + from google3.third_party.explainable_ai_sdk.sdk.metadata.tf.v2.saved_model_metadata_builder import ( + SavedModelMetadataBuilder, + ) except ImportError: logging.info( "Skipping explanations because the Explainable AI SDK is not installed." diff --git a/google/cloud/aiplatform/explain/metadata/tf/v1/saved_model_metadata_builder.py b/google/cloud/aiplatform/explain/metadata/tf/v1/saved_model_metadata_builder.py index c9fc2d0e22..ab98600c0d 100644 --- a/google/cloud/aiplatform/explain/metadata/tf/v1/saved_model_metadata_builder.py +++ b/google/cloud/aiplatform/explain/metadata/tf/v1/saved_model_metadata_builder.py @@ -126,7 +126,7 @@ def get_metadata_protobuf(self) -> explanation_metadata.ExplanationMetadata: def _create_input_metadata_from_signature( - signature_inputs: Dict[str, "tf.Tensor"] # noqa: F821 + signature_inputs: Dict[str, "tf.Tensor"], # noqa: F821 ) -> Dict[str, explanation_metadata.ExplanationMetadata.InputMetadata]: """Creates InputMetadata from signature inputs. diff --git a/google/cloud/aiplatform/explain/metadata/tf/v2/saved_model_metadata_builder.py b/google/cloud/aiplatform/explain/metadata/tf/v2/saved_model_metadata_builder.py index 7d19e5680d..2df03b3044 100644 --- a/google/cloud/aiplatform/explain/metadata/tf/v2/saved_model_metadata_builder.py +++ b/google/cloud/aiplatform/explain/metadata/tf/v2/saved_model_metadata_builder.py @@ -75,9 +75,7 @@ def __init__( signature_name ) - def _infer_metadata_entries_from_model( - self, signature_name: str - ) -> Tuple[ + def _infer_metadata_entries_from_model(self, signature_name: str) -> Tuple[ Dict[str, explanation_metadata.ExplanationMetadata.InputMetadata], Dict[str, explanation_metadata.ExplanationMetadata.OutputMetadata], ]: @@ -107,10 +105,10 @@ def _infer_metadata_entries_from_model( output_mds = {} for name in output_sig: if not self._explain_output or self._explain_output[0] == name: - output_mds[ - name - ] = explanation_metadata.ExplanationMetadata.OutputMetadata( - output_tensor_name=name, + output_mds[name] = ( + explanation_metadata.ExplanationMetadata.OutputMetadata( + output_tensor_name=name, + ) ) break else: diff --git a/google/cloud/aiplatform/featurestore/_entity_type.py b/google/cloud/aiplatform/featurestore/_entity_type.py index bfbaa494e4..6891c6a599 100644 --- a/google/cloud/aiplatform/featurestore/_entity_type.py +++ b/google/cloud/aiplatform/featurestore/_entity_type.py @@ -116,11 +116,11 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=entity_type_name, - parent_resource_name_fields={ - featurestore.Featurestore._resource_noun: featurestore_id - } - if featurestore_id - else featurestore_id, + parent_resource_name_fields=( + {featurestore.Featurestore._resource_noun: featurestore_id} + if featurestore_id + else featurestore_id + ), ) self._featurestore_online_client = self._instantiate_featurestore_online_client( @@ -1842,7 +1842,7 @@ def _apply_feature_timestamp( @staticmethod def _is_timestamp( - timestamp: Union[datetime.datetime, timestamp_pb2.Timestamp] + timestamp: Union[datetime.datetime, timestamp_pb2.Timestamp], ) -> bool: return isinstance(timestamp, datetime.datetime) or isinstance( timestamp, timestamp_pb2.Timestamp diff --git a/google/cloud/aiplatform/featurestore/feature.py b/google/cloud/aiplatform/featurestore/feature.py index dd53b7756c..39b8f8d812 100644 --- a/google/cloud/aiplatform/featurestore/feature.py +++ b/google/cloud/aiplatform/featurestore/feature.py @@ -114,12 +114,14 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=feature_name, - parent_resource_name_fields={ - featurestore.Featurestore._resource_noun: featurestore_id, - featurestore.EntityType._resource_noun: entity_type_id, - } - if featurestore_id - else featurestore_id, + parent_resource_name_fields=( + { + featurestore.Featurestore._resource_noun: featurestore_id, + featurestore.EntityType._resource_noun: entity_type_id, + } + if featurestore_id + else featurestore_id + ), ) def _get_featurestore_name(self) -> str: @@ -338,11 +340,11 @@ def list( resource_noun=featurestore.EntityType._resource_noun, parse_resource_name_method=featurestore.EntityType._parse_resource_name, format_resource_name_method=featurestore.EntityType._format_resource_name, - parent_resource_name_fields={ - featurestore.Featurestore._resource_noun: featurestore_id - } - if featurestore_id - else featurestore_id, + parent_resource_name_fields=( + {featurestore.Featurestore._resource_noun: featurestore_id} + if featurestore_id + else featurestore_id + ), project=project, location=location, resource_id_validator=featurestore.EntityType._resource_id_validator, @@ -585,11 +587,11 @@ def create( resource_noun=featurestore.EntityType._resource_noun, parse_resource_name_method=featurestore.EntityType._parse_resource_name, format_resource_name_method=featurestore.EntityType._format_resource_name, - parent_resource_name_fields={ - featurestore.Featurestore._resource_noun: featurestore_id - } - if featurestore_id - else featurestore_id, + parent_resource_name_fields=( + {featurestore.Featurestore._resource_noun: featurestore_id} + if featurestore_id + else featurestore_id + ), project=project, location=location, resource_id_validator=featurestore.EntityType._resource_id_validator, diff --git a/google/cloud/aiplatform/gapic/schema/__init__.py b/google/cloud/aiplatform/gapic/schema/__init__.py index 5d31a70f1f..3722e6ddd7 100644 --- a/google/cloud/aiplatform/gapic/schema/__init__.py +++ b/google/cloud/aiplatform/gapic/schema/__init__.py @@ -18,12 +18,20 @@ from google.cloud.aiplatform.v1.schema import predict from google.cloud.aiplatform.v1.schema import trainingjob from google.cloud.aiplatform.v1beta1.schema import predict as predict_v1beta1 -from google.cloud.aiplatform.v1beta1.schema import predict as trainingjob_v1beta1 +from google.cloud.aiplatform.v1beta1.schema import ( + predict as trainingjob_v1beta1, +) # import the v1 submodules for enhancement -from google.cloud.aiplatform.v1.schema.predict.instance_v1 import types as instance -from google.cloud.aiplatform.v1.schema.predict.params_v1 import types as params -from google.cloud.aiplatform.v1.schema.predict.prediction_v1 import types as prediction +from google.cloud.aiplatform.v1.schema.predict.instance_v1 import ( + types as instance, +) +from google.cloud.aiplatform.v1.schema.predict.params_v1 import ( + types as params, +) +from google.cloud.aiplatform.v1.schema.predict.prediction_v1 import ( + types as prediction, +) from google.cloud.aiplatform.v1.schema.trainingjob.definition_v1 import ( types as definition, ) diff --git a/google/cloud/aiplatform/gapic_version.py b/google/cloud/aiplatform/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/gapic_version.py +++ b/google/cloud/aiplatform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/hyperparameter_tuning.py b/google/cloud/aiplatform/hyperparameter_tuning.py index 871ae11790..b88f5a9488 100644 --- a/google/cloud/aiplatform/hyperparameter_tuning.py +++ b/google/cloud/aiplatform/hyperparameter_tuning.py @@ -140,7 +140,7 @@ def _to_parameter_spec( """Converts this parameter to ParameterSpec.""" conditions = [] if self.conditional_parameter_spec is not None: - for (conditional_param_id, spec) in self.conditional_parameter_spec.items(): + for conditional_param_id, spec in self.conditional_parameter_spec.items(): condition = ( gca_study_compat.StudySpec.ParameterSpec.ConditionalParameterSpec() ) @@ -178,7 +178,7 @@ def _to_parameter_spec_v1beta1( """Converts this parameter to ParameterSpec.""" conditions = [] if self.conditional_parameter_spec is not None: - for (conditional_param_id, spec) in self.conditional_parameter_spec.items(): + for conditional_param_id, spec in self.conditional_parameter_spec.items(): condition = ( gca_study_compat_v1beta1.StudySpec.ParameterSpec.ConditionalParameterSpec() ) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 73f5997b6d..e854faa3e6 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -1325,12 +1325,16 @@ def _submit_impl( model_monitoring_alert_config._config_for_bp = True gapic_mm_config = gca_model_monitoring_v1beta1.ModelMonitoringConfig( objective_configs=[model_monitoring_objective_config.as_proto()], - alert_config=model_monitoring_alert_config.as_proto() - if model_monitoring_alert_config is not None - else None, - analysis_instance_schema_uri=analysis_instance_schema_uri - if analysis_instance_schema_uri is not None - else None, + alert_config=( + model_monitoring_alert_config.as_proto() + if model_monitoring_alert_config is not None + else None + ), + analysis_instance_schema_uri=( + analysis_instance_schema_uri + if analysis_instance_schema_uri is not None + else None + ), ) gapic_batch_prediction_job.model_monitoring_config = gapic_mm_config @@ -3297,7 +3301,7 @@ def _parse_configs( + "]. Note that deployed model IDs are different from the uploaded model's ID" ) raise ValueError(error_string) - for (deployed_model, objective_config) in objective_configs.items(): + for deployed_model, objective_config in objective_configs.items(): if ( deployed_model not in xai_enabled and objective_config.explanation_config is not None diff --git a/google/cloud/aiplatform/matching_engine/matching_engine_index_endpoint.py b/google/cloud/aiplatform/matching_engine/matching_engine_index_endpoint.py index df5e370db9..d81c3e60d6 100644 --- a/google/cloud/aiplatform/matching_engine/matching_engine_index_endpoint.py +++ b/google/cloud/aiplatform/matching_engine/matching_engine_index_endpoint.py @@ -831,9 +831,9 @@ def _instantiate_private_match_service_stub( if ip_address not in self._match_grpc_stub_cache: # Set up channel and stub channel = grpc.insecure_channel("{}:10000".format(ip_address)) - self._match_grpc_stub_cache[ - ip_address - ] = match_service_pb2_grpc.MatchServiceStub(channel) + self._match_grpc_stub_cache[ip_address] = ( + match_service_pb2_grpc.MatchServiceStub(channel) + ) return self._match_grpc_stub_cache[ip_address] @property @@ -1863,9 +1863,9 @@ def find_neighbors( MatchNeighbor( id=neighbor.datapoint.datapoint_id, distance=neighbor.distance, - sparse_distance=neighbor.sparse_distance - if neighbor.sparse_distance - else None, + sparse_distance=( + neighbor.sparse_distance if neighbor.sparse_distance else None + ), ).from_index_datapoint(index_datapoint=neighbor.datapoint) for neighbor in embedding_neighbors.neighbors ] @@ -2163,17 +2163,21 @@ def match( approx_num_neighbors=approx_num_neighbors, fraction_leaf_nodes_to_search_override=fraction_leaf_nodes_to_search_override, numeric_restricts=numeric_restricts, - sparse_embedding=match_service_pb2.SparseEmbedding( - float_val=query.sparse_embedding_values, - dimension=query.sparse_embedding_dimensions, - ) - if query_is_hybrid - else None, - rrf=match_service_pb2.MatchRequest.RRF( - alpha=query.rrf_ranking_alpha, - ) - if query_is_hybrid and query.rrf_ranking_alpha - else None, + sparse_embedding=( + match_service_pb2.SparseEmbedding( + float_val=query.sparse_embedding_values, + dimension=query.sparse_embedding_dimensions, + ) + if query_is_hybrid + else None + ), + rrf=( + match_service_pb2.MatchRequest.RRF( + alpha=query.rrf_ranking_alpha, + ) + if query_is_hybrid and query.rrf_ranking_alpha + else None + ), ) requests.append(request) else: @@ -2199,9 +2203,9 @@ def match( match_neighbors_id_map[neighbor.id] = MatchNeighbor( id=neighbor.id, distance=neighbor.distance, - sparse_distance=neighbor.sparse_distance - if neighbor.sparse_distance - else None, + sparse_distance=( + neighbor.sparse_distance if neighbor.sparse_distance else None + ), ) for embedding in resp.embeddings: if embedding.id in match_neighbors_id_map: diff --git a/google/cloud/aiplatform/metadata/_models.py b/google/cloud/aiplatform/metadata/_models.py index 08440a6603..0f006bcf2e 100644 --- a/google/cloud/aiplatform/metadata/_models.py +++ b/google/cloud/aiplatform/metadata/_models.py @@ -538,7 +538,7 @@ def save_model( def load_model( - model: Union[str, google_artifact_schema.ExperimentModel] + model: Union[str, google_artifact_schema.ExperimentModel], ) -> Union["sklearn.base.BaseEstimator", "xgb.Booster", "tf.Module"]: # noqa: F821 """Retrieves the original ML model from an ExperimentModel resource. @@ -853,7 +853,7 @@ def register_model( def get_experiment_model_info( - model: Union[str, google_artifact_schema.ExperimentModel] + model: Union[str, google_artifact_schema.ExperimentModel], ) -> Dict[str, Any]: """Get the model's info from an experiment model artifact. diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 27ef4d96b9..78846646c9 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -324,9 +324,9 @@ def _add_artifact( events = [ gca_event.Event( artifact=artifact_resource_name, - type_=gca_event.Event.Type.INPUT - if input - else gca_event.Event.Type.OUTPUT, + type_=( + gca_event.Event.Type.INPUT if input else gca_event.Event.Type.OUTPUT + ), ) for artifact_resource_name in artifact_resource_names ] diff --git a/google/cloud/aiplatform/metadata/metadata_store.py b/google/cloud/aiplatform/metadata/metadata_store.py index ab2e7ec305..e9ea6f421d 100644 --- a/google/cloud/aiplatform/metadata/metadata_store.py +++ b/google/cloud/aiplatform/metadata/metadata_store.py @@ -24,7 +24,9 @@ from google.cloud.aiplatform import base, initializer from google.cloud.aiplatform import compat from google.cloud.aiplatform import utils -from google.cloud.aiplatform.compat.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform.compat.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform.constants import base as base_constants diff --git a/google/cloud/aiplatform/metadata/schema/base_artifact.py b/google/cloud/aiplatform/metadata/schema/base_artifact.py index 81225b72fa..316653b511 100644 --- a/google/cloud/aiplatform/metadata/schema/base_artifact.py +++ b/google/cloud/aiplatform/metadata/schema/base_artifact.py @@ -47,7 +47,6 @@ def __init__( metadata: Optional[Dict] = None, state: Optional[gca_artifact.Artifact.State] = gca_artifact.Artifact.State.LIVE, ): - """Initializes the Artifact with the given name, URI and metadata. This is the base class for defining various artifact types, which can be @@ -112,7 +111,6 @@ def _init_with_resource_name( location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, ): - """Initializes the Artifact instance using an existing resource. Args: diff --git a/google/cloud/aiplatform/metadata/schema/base_context.py b/google/cloud/aiplatform/metadata/schema/base_context.py index e9b331095a..d22e3ec2b6 100644 --- a/google/cloud/aiplatform/metadata/schema/base_context.py +++ b/google/cloud/aiplatform/metadata/schema/base_context.py @@ -49,7 +49,6 @@ def __init__( metadata: Optional[Dict] = None, description: Optional[str] = None, ): - """Initializes the Context with the given name, URI and metadata. Args: diff --git a/google/cloud/aiplatform/metadata/schema/base_execution.py b/google/cloud/aiplatform/metadata/schema/base_execution.py index 6076ea204c..c514077c65 100644 --- a/google/cloud/aiplatform/metadata/schema/base_execution.py +++ b/google/cloud/aiplatform/metadata/schema/base_execution.py @@ -52,7 +52,6 @@ def __init__( metadata: Optional[Dict] = None, description: Optional[str] = None, ): - """Initializes the Execution with the given name, URI and metadata. Args: @@ -97,7 +96,6 @@ def _init_with_resource_name( *, execution_name: str, ): - """Initializes the Execution instance using an existing resource. Args: execution_name (str): diff --git a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py index 4b8b9e0ed0..d54f2adc73 100644 --- a/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py +++ b/google/cloud/aiplatform/metadata/schema/google/artifact_schema.py @@ -463,9 +463,9 @@ def __init__( if mean_absolute_error: extended_metadata["meanAbsoluteError"] = mean_absolute_error if mean_absolute_percentage_error: - extended_metadata[ - "meanAbsolutePercentageError" - ] = mean_absolute_percentage_error + extended_metadata["meanAbsolutePercentageError"] = ( + mean_absolute_percentage_error + ) if r_squared: extended_metadata["rSquared"] = r_squared if root_mean_squared_log_error: @@ -557,25 +557,25 @@ def __init__( if mean_absolute_error: extended_metadata["meanAbsoluteError"] = mean_absolute_error if mean_absolute_percentage_error: - extended_metadata[ - "meanAbsolutePercentageError" - ] = mean_absolute_percentage_error + extended_metadata["meanAbsolutePercentageError"] = ( + mean_absolute_percentage_error + ) if r_squared: extended_metadata["rSquared"] = r_squared if root_mean_squared_log_error: extended_metadata["rootMeanSquaredLogError"] = root_mean_squared_log_error if weighted_absolute_percentage_error: - extended_metadata[ - "weightedAbsolutePercentageError" - ] = weighted_absolute_percentage_error + extended_metadata["weightedAbsolutePercentageError"] = ( + weighted_absolute_percentage_error + ) if root_mean_squared_percentage_error: - extended_metadata[ - "rootMeanSquaredPercentageError" - ] = root_mean_squared_percentage_error + extended_metadata["rootMeanSquaredPercentageError"] = ( + root_mean_squared_percentage_error + ) if symmetric_mean_absolute_percentage_error: - extended_metadata[ - "symmetricMeanAbsolutePercentageError" - ] = symmetric_mean_absolute_percentage_error + extended_metadata["symmetricMeanAbsolutePercentageError"] = ( + symmetric_mean_absolute_percentage_error + ) super(ForecastingMetrics, self).__init__( uri=uri, diff --git a/google/cloud/aiplatform/model_evaluation/model_evaluation.py b/google/cloud/aiplatform/model_evaluation/model_evaluation.py index e574ecad78..ae8ee06ab7 100644 --- a/google/cloud/aiplatform/model_evaluation/model_evaluation.py +++ b/google/cloud/aiplatform/model_evaluation/model_evaluation.py @@ -107,9 +107,9 @@ def __init__( self._gca_resource = self._get_gca_resource( resource_name=evaluation_name, - parent_resource_name_fields={models.Model._resource_noun: model_id} - if model_id - else model_id, + parent_resource_name_fields=( + {models.Model._resource_noun: model_id} if model_id else model_id + ), ) def delete(self): diff --git a/google/cloud/aiplatform/model_evaluation/model_evaluation_job.py b/google/cloud/aiplatform/model_evaluation/model_evaluation_job.py index 87f4e44b1f..7ce555d3db 100644 --- a/google/cloud/aiplatform/model_evaluation/model_evaluation_job.py +++ b/google/cloud/aiplatform/model_evaluation/model_evaluation_job.py @@ -320,9 +320,9 @@ def submit( if bigquery_source_uri: template_params["batch_predict_predictions_format"] = "bigquery" template_params["batch_predict_bigquery_source_uri"] = bigquery_source_uri - template_params[ - "batch_predict_bigquery_destination_output_uri" - ] = batch_predict_bigquery_destination_output_uri + template_params["batch_predict_bigquery_destination_output_uri"] = ( + batch_predict_bigquery_destination_output_uri + ) elif gcs_source_uris: template_params["batch_predict_gcs_source_uris"] = gcs_source_uris @@ -330,14 +330,14 @@ def submit( template_params["evaluation_class_labels"] = class_labels if prediction_label_column: - template_params[ - "evaluation_prediction_label_column" - ] = prediction_label_column + template_params["evaluation_prediction_label_column"] = ( + prediction_label_column + ) if prediction_score_column: - template_params[ - "evaluation_prediction_score_column" - ] = prediction_score_column + template_params["evaluation_prediction_score_column"] = ( + prediction_score_column + ) # If the user provides a SA, use it for the Dataflow job as well if service_account is not None: diff --git a/google/cloud/aiplatform/model_monitoring/objective.py b/google/cloud/aiplatform/model_monitoring/objective.py index 48e45d734d..cf90b6cba3 100644 --- a/google/cloud/aiplatform/model_monitoring/objective.py +++ b/google/cloud/aiplatform/model_monitoring/objective.py @@ -92,7 +92,9 @@ def __init__( def as_proto( self, - ) -> gca_model_monitoring.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig: + ) -> ( + gca_model_monitoring.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig + ): """Converts _SkewDetectionConfig to a proto message. Returns: @@ -117,9 +119,9 @@ def as_proto( attribution_score_skew_threshold = gca_model_monitoring.ThresholdConfig( value=self.attribute_skew_thresholds[key] ) - attribution_score_skew_thresholds_mapping[ - key - ] = attribution_score_skew_threshold + attribution_score_skew_thresholds_mapping[key] = ( + attribution_score_skew_threshold + ) return gca_model_monitoring.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig( skew_thresholds=skew_thresholds_mapping, attribution_score_skew_thresholds=attribution_score_skew_thresholds_mapping, @@ -153,7 +155,9 @@ def __init__( def as_proto( self, - ) -> gca_model_monitoring.ModelMonitoringObjectiveConfig.PredictionDriftDetectionConfig: + ) -> ( + gca_model_monitoring.ModelMonitoringObjectiveConfig.PredictionDriftDetectionConfig + ): """Converts _DriftDetectionConfig to a proto message. Returns: @@ -174,9 +178,9 @@ def as_proto( value=self.attribute_drift_thresholds[key] ) ) - attribution_score_drift_thresholds_mapping[ - key - ] = attribution_score_drift_threshold + attribution_score_drift_thresholds_mapping[key] = ( + attribution_score_drift_threshold + ) return gca_model_monitoring.ModelMonitoringObjectiveConfig.PredictionDriftDetectionConfig( drift_thresholds=drift_thresholds_mapping, attribution_score_drift_thresholds=attribution_score_drift_thresholds_mapping, @@ -264,15 +268,21 @@ def as_proto(self) -> gca_model_monitoring.ModelMonitoringObjectiveConfig: # TODO(b/242108750): remove temporary logic once model monitoring for batch prediction is GA gapic_config = gca_model_monitoring.ModelMonitoringObjectiveConfig( training_dataset=training_dataset, - training_prediction_skew_detection_config=self.skew_detection_config.as_proto() - if self.skew_detection_config is not None - else None, - prediction_drift_detection_config=self.drift_detection_config.as_proto() - if self.drift_detection_config is not None - else None, - explanation_config=self.explanation_config.as_proto() - if self.explanation_config is not None - else None, + training_prediction_skew_detection_config=( + self.skew_detection_config.as_proto() + if self.skew_detection_config is not None + else None + ), + prediction_drift_detection_config=( + self.drift_detection_config.as_proto() + if self.drift_detection_config is not None + else None + ), + explanation_config=( + self.explanation_config.as_proto() + if self.explanation_config is not None + else None + ), ) if self._config_for_bp: return ( diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 5df070d37a..e186b140ba 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -3603,7 +3603,9 @@ def health_http_uri(self) -> Optional[str]: class PrivateServiceConnectConfig: """Represents a Vertex AI PrivateServiceConnectConfig resource.""" - _gapic_private_service_connect_config: gca_service_networking.PrivateServiceConnectConfig + _gapic_private_service_connect_config: ( + gca_service_networking.PrivateServiceConnectConfig + ) def __init__( self, @@ -4469,6 +4471,7 @@ def deploy( max_replica_count: int = 1, accelerator_type: Optional[str] = None, accelerator_count: Optional[int] = None, + gpu_partition_size: Optional[str] = None, tpu_topology: Optional[str] = None, service_account: Optional[str] = None, explanation_metadata: Optional[aiplatform.explain.ExplanationMetadata] = None, @@ -4549,6 +4552,8 @@ def deploy( NVIDIA_TESLA_V100, NVIDIA_TESLA_P4, NVIDIA_TESLA_T4 accelerator_count (int): Optional. The number of accelerators to attach to a worker replica. + gpu_partition_size (str): + Optional. The GPU partition Size for Nvidia MIG. tpu_topology (str): Optional. The TPU topology to use for the DeployedModel. Required for CloudTPU multihost deployments. @@ -4652,6 +4657,7 @@ def deploy( max_replica_count=max_replica_count, accelerator_type=accelerator_type, accelerator_count=accelerator_count, + gpu_partition_size=gpu_partition_size, tpu_topology=tpu_topology, reservation_affinity_type=reservation_affinity_type, reservation_affinity_key=reservation_affinity_key, @@ -5727,6 +5733,7 @@ def deploy( max_replica_count: int = 1, accelerator_type: Optional[str] = None, accelerator_count: Optional[int] = None, + gpu_partition_size: Optional[str] = None, tpu_topology: Optional[str] = None, service_account: Optional[str] = None, explanation_metadata: Optional[aiplatform.explain.ExplanationMetadata] = None, @@ -5805,6 +5812,8 @@ def deploy( NVIDIA_TESLA_V100, NVIDIA_TESLA_P4, NVIDIA_TESLA_T4 accelerator_count (int): Optional. The number of accelerators to attach to a worker replica. + gpu_partition_size (str): + Optional. The GPU partition Size for Nvidia MIG. tpu_topology (str): Optional. The TPU topology to use for the DeployedModel. Requireid for CloudTPU multihost deployments. @@ -5976,6 +5985,7 @@ def deploy( max_replica_count=max_replica_count, accelerator_type=accelerator_type, accelerator_count=accelerator_count, + gpu_partition_size=gpu_partition_size, tpu_topology=tpu_topology, reservation_affinity_type=reservation_affinity_type, reservation_affinity_key=reservation_affinity_key, @@ -6022,6 +6032,7 @@ def _deploy( max_replica_count: int = 1, accelerator_type: Optional[str] = None, accelerator_count: Optional[int] = None, + gpu_partition_size: Optional[str] = None, tpu_topology: Optional[str] = None, reservation_affinity_type: Optional[str] = None, reservation_affinity_key: Optional[str] = None, @@ -6097,6 +6108,8 @@ def _deploy( NVIDIA_TESLA_V100, NVIDIA_TESLA_P4, NVIDIA_TESLA_T4 accelerator_count (int): Optional. The number of accelerators to attach to a worker replica. + gpu_partition_size (str): + Optional. The GPU partition Size for Nvidia MIG. tpu_topology (str): Optional. The TPU topology to use for the DeployedModel. Requireid for CloudTPU multihost deployments. @@ -6242,6 +6255,7 @@ def _deploy( max_replica_count=max_replica_count, accelerator_type=accelerator_type, accelerator_count=accelerator_count, + gpu_partition_size=gpu_partition_size, tpu_topology=tpu_topology, reservation_affinity_type=reservation_affinity_type, reservation_affinity_key=reservation_affinity_key, diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index 19d2bfe949..2075761163 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -1018,9 +1018,9 @@ def _query_experiment_row( row.params = {} for key, value in system_run_executions[0].metadata.items(): if key.startswith(metadata_constants.PIPELINE_PARAM_PREFIX): - row.params[ - key[len(metadata_constants.PIPELINE_PARAM_PREFIX) :] - ] = value + row.params[key[len(metadata_constants.PIPELINE_PARAM_PREFIX) :]] = ( + value + ) row.state = system_run_executions[0].state.name for metric_artifact in metric_artifacts: diff --git a/google/cloud/aiplatform/prediction/serializer.py b/google/cloud/aiplatform/prediction/serializer.py index 8016e46115..569dcda6aa 100644 --- a/google/cloud/aiplatform/prediction/serializer.py +++ b/google/cloud/aiplatform/prediction/serializer.py @@ -27,7 +27,9 @@ 'Please install the SDK using `pip install "google-cloud-aiplatform[prediction]>=1.16.0"`.' ) -from google.cloud.aiplatform.constants import prediction as prediction_constants +from google.cloud.aiplatform.constants import ( + prediction as prediction_constants, +) from google.cloud.aiplatform.prediction import handler_utils diff --git a/google/cloud/aiplatform/prediction/sklearn/__init__.py b/google/cloud/aiplatform/prediction/sklearn/__init__.py index d7144d1426..4d35c0f20e 100644 --- a/google/cloud/aiplatform/prediction/sklearn/__init__.py +++ b/google/cloud/aiplatform/prediction/sklearn/__init__.py @@ -15,6 +15,8 @@ # limitations under the License. # -from google.cloud.aiplatform.prediction.sklearn.predictor import SklearnPredictor +from google.cloud.aiplatform.prediction.sklearn.predictor import ( + SklearnPredictor, +) __all__ = ("SklearnPredictor",) diff --git a/google/cloud/aiplatform/prediction/xgboost/__init__.py b/google/cloud/aiplatform/prediction/xgboost/__init__.py index 53cd28bb6c..d65b388d35 100644 --- a/google/cloud/aiplatform/prediction/xgboost/__init__.py +++ b/google/cloud/aiplatform/prediction/xgboost/__init__.py @@ -15,6 +15,8 @@ # limitations under the License. # -from google.cloud.aiplatform.prediction.xgboost.predictor import XgboostPredictor +from google.cloud.aiplatform.prediction.xgboost.predictor import ( + XgboostPredictor, +) __all__ = ("XgboostPredictor",) diff --git a/google/cloud/aiplatform/tensorboard/plugins/tf_profiler/profile_uploader.py b/google/cloud/aiplatform/tensorboard/plugins/tf_profiler/profile_uploader.py index cb8f119172..4c9dbf937a 100644 --- a/google/cloud/aiplatform/tensorboard/plugins/tf_profiler/profile_uploader.py +++ b/google/cloud/aiplatform/tensorboard/plugins/tf_profiler/profile_uploader.py @@ -24,7 +24,9 @@ from typing import DefaultDict, Dict, Generator, List, Optional, Set, Tuple from google.cloud import storage -from google.cloud.aiplatform.compat.services import tensorboard_service_client +from google.cloud.aiplatform.compat.services import ( + tensorboard_service_client, +) from google.cloud.aiplatform.compat.types import tensorboard_data from google.cloud.aiplatform.compat.types import tensorboard_service from google.cloud.aiplatform.compat.types import tensorboard_time_series @@ -182,9 +184,9 @@ def send_request(self, run_name: str): ) if run_name not in self._run_to_file_request_sender: - self._run_to_file_request_sender[ - run_name - ] = self._file_request_sender_factory(tb_run) + self._run_to_file_request_sender[run_name] = ( + self._file_request_sender_factory(tb_run) + ) # Loop through any of the profiling sessions within this training run. # A training run can have multiple profile sessions. diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 302df9614d..87f840b3b2 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -388,9 +388,11 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=tensorboard_experiment_name, - parent_resource_name_fields={Tensorboard._resource_noun: tensorboard_id} - if tensorboard_id - else tensorboard_id, + parent_resource_name_fields=( + {Tensorboard._resource_noun: tensorboard_id} + if tensorboard_id + else tensorboard_id + ), ) @classmethod @@ -646,12 +648,14 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=tensorboard_run_name, - parent_resource_name_fields={ - Tensorboard._resource_noun: tensorboard_id, - TensorboardExperiment._resource_noun: tensorboard_experiment_id, - } - if tensorboard_id - else tensorboard_id, + parent_resource_name_fields=( + { + Tensorboard._resource_noun: tensorboard_id, + TensorboardExperiment._resource_noun: tensorboard_experiment_id, + } + if tensorboard_id + else tensorboard_id + ), ) self._time_series_display_name_to_id_mapping = ( @@ -991,9 +995,9 @@ def create_tensorboard_time_series( credentials=self.credentials, ) - self._time_series_display_name_to_id_mapping[ - tb_time_series.display_name - ] = tb_time_series.name + self._time_series_display_name_to_id_mapping[tb_time_series.display_name] = ( + tb_time_series.name + ) return tb_time_series @@ -1128,13 +1132,15 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=tensorboard_time_series_name, - parent_resource_name_fields={ - Tensorboard._resource_noun: tensorboard_id, - TensorboardExperiment._resource_noun: tensorboard_experiment_id, - TensorboardRun._resource_noun: tensorboard_run_id, - } - if tensorboard_id - else tensorboard_id, + parent_resource_name_fields=( + { + Tensorboard._resource_noun: tensorboard_id, + TensorboardExperiment._resource_noun: tensorboard_experiment_id, + TensorboardRun._resource_noun: tensorboard_run_id, + } + if tensorboard_id + else tensorboard_id + ), ) @classmethod diff --git a/google/cloud/aiplatform/tensorboard/uploader.py b/google/cloud/aiplatform/tensorboard/uploader.py index db20b036e2..4f07dab93f 100644 --- a/google/cloud/aiplatform/tensorboard/uploader.py +++ b/google/cloud/aiplatform/tensorboard/uploader.py @@ -386,7 +386,7 @@ def _pre_create_runs_and_time_series(self): run_names = [] run_tag_name_to_time_series_proto = {} - for (run_name, events) in run_to_events.items(): + for run_name, events in run_to_events.items(): run_name = ( run_name if (run_name and run_name != ".") @@ -415,13 +415,13 @@ def _pre_create_runs_and_time_series(self): tensorboard_time_series.TensorboardTimeSeries.ValueType.BLOB_SEQUENCE ) - run_tag_name_to_time_series_proto[ - (run_name, value.tag) - ] = tensorboard_time_series.TensorboardTimeSeries( - display_name=value.tag, - value_type=value_type, - plugin_name=metadata.plugin_data.plugin_name, - plugin_data=metadata.plugin_data.content, + run_tag_name_to_time_series_proto[(run_name, value.tag)] = ( + tensorboard_time_series.TensorboardTimeSeries( + display_name=value.tag, + value_type=value_type, + plugin_name=metadata.plugin_data.plugin_name, + plugin_data=metadata.plugin_data.content, + ) ) experiment_runs = [uploader_utils.reformat_run_name(run) for run in run_names] @@ -717,7 +717,7 @@ def dispatch_requests( run_to_events: Mapping from run name to generator of `tf.compat.v1.Event` values, as returned by `LogdirLoader.get_run_events`. """ - for (run_name, events) in run_to_events.items(): + for run_name, events in run_to_events.items(): self._dispatch_additional_senders(run_name) if events is not None: for event in events: @@ -1402,7 +1402,7 @@ def _prune_empty_time_series( request: tensorboard_service.WriteTensorboardRunDataRequest, ): """Removes empty time_series from request.""" - for (time_series_idx, time_series_data) in reversed( + for time_series_idx, time_series_data in reversed( list(enumerate(request.time_series_data)) ): if not time_series_data.values: diff --git a/google/cloud/aiplatform/tensorboard/uploader_utils.py b/google/cloud/aiplatform/tensorboard/uploader_utils.py index e534220755..1f5ddc639c 100644 --- a/google/cloud/aiplatform/tensorboard/uploader_utils.py +++ b/google/cloud/aiplatform/tensorboard/uploader_utils.py @@ -277,9 +277,9 @@ def get_time_series_resource_name( tag_name, time_series_resource_creator, ) - self._run_tag_name_to_time_series_name[ - (run_name, tag_name) - ] = time_series.name + self._run_tag_name_to_time_series_name[(run_name, tag_name)] = ( + time_series.name + ) return self._run_tag_name_to_time_series_name[(run_name, tag_name)] def _create_or_get_time_series( diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 9a761b2935..71dce9e3f5 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -2636,9 +2636,9 @@ def _run( training_task_inputs_dict["windowConfig"] = window_config if enable_probabilistic_inference: - training_task_inputs_dict[ - "enableProbabilisticInference" - ] = enable_probabilistic_inference + training_task_inputs_dict["enableProbabilisticInference"] = ( + enable_probabilistic_inference + ) final_export_eval_bq_uri = export_evaluated_data_items_bigquery_destination_uri if final_export_eval_bq_uri and not final_export_eval_bq_uri.startswith( @@ -2653,9 +2653,9 @@ def _run( } if self._additional_experiments: - training_task_inputs_dict[ - "additionalExperiments" - ] = self._additional_experiments + training_task_inputs_dict["additionalExperiments"] = ( + self._additional_experiments + ) model = gca_model.Model( display_name=model_display_name or self._display_name, @@ -6462,9 +6462,9 @@ def _run( } if self._additional_experiments: - training_task_inputs_dict[ - "additionalExperiments" - ] = self._additional_experiments + training_task_inputs_dict["additionalExperiments"] = ( + self._additional_experiments + ) model = gca_model.Model( display_name=model_display_name or self._display_name, @@ -7420,9 +7420,9 @@ def _run( training_task_inputs_dict["baseModelId"] = base_model.name if incremental_train_base_model: - training_task_inputs_dict[ - "uptrainBaseModelId" - ] = incremental_train_base_model.name + training_task_inputs_dict["uptrainBaseModelId"] = ( + incremental_train_base_model.name + ) tunable_parameter_dict: Dict[str, any] = {} diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/initializer.py b/google/cloud/aiplatform/training_utils/cloud_profiler/initializer.py index 7abc815078..eb63792e7b 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/initializer.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/initializer.py @@ -19,7 +19,9 @@ import threading from typing import Optional, Type -from google.cloud.aiplatform.training_utils.cloud_profiler import cloud_profiler_utils +from google.cloud.aiplatform.training_utils.cloud_profiler import ( + cloud_profiler_utils, +) try: from werkzeug import serving @@ -29,7 +31,9 @@ from google.cloud.aiplatform.training_utils import environment_variables from google.cloud.aiplatform.training_utils.cloud_profiler import webserver -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import ( tf_profiler, ) diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tensorboard_api.py b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tensorboard_api.py index d778176355..0dc1959a35 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tensorboard_api.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tensorboard_api.py @@ -29,7 +29,9 @@ from google.cloud.aiplatform.tensorboard import upload_tracker from google.cloud.aiplatform.tensorboard import uploader_constants from google.cloud.aiplatform.tensorboard import uploader_utils -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) from google.cloud.aiplatform.utils import TensorboardClientWithOverride from tensorboard.util import tb_logging diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py index e532db4222..bfbcc65221 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py @@ -131,7 +131,7 @@ def _check_tf() -> bool: return False # Check for the tf profiler plugin - if importlib.util.find_spec("tensorboard_plugin_profile") is None: + if importlib.util.find_spec("tensorboard_plugin_profile") is None: # fmt: skip logger.warning( "Could not import tensorboard_plugin_profile, will not run tf profiling service" ) diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/webserver.py b/google/cloud/aiplatform/training_utils/cloud_profiler/webserver.py index 3f7706bb34..16e39de22b 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/webserver.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/webserver.py @@ -20,7 +20,9 @@ import os from google.cloud.aiplatform.training_utils.cloud_profiler import wsgi_types -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from typing import List from werkzeug import wrappers, Response diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index 060aff9797..593222ed0a 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -561,15 +561,17 @@ def __init__( kwargs["transport"] = transport self._clients = { - version: self.WrappedClient( - client_class=client_class, - client_options=client_options, - client_info=client_info, - credentials=credentials, - transport=transport, + version: ( + self.WrappedClient( + client_class=client_class, + client_options=client_options, + client_info=client_info, + credentials=credentials, + transport=transport, + ) + if self._is_temporary + else client_class(**kwargs) ) - if self._is_temporary - else client_class(**kwargs) for version, client_class in self._version_map } diff --git a/google/cloud/aiplatform/utils/featurestore_utils.py b/google/cloud/aiplatform/utils/featurestore_utils.py index f40cff1f26..7cc190752e 100644 --- a/google/cloud/aiplatform/utils/featurestore_utils.py +++ b/google/cloud/aiplatform/utils/featurestore_utils.py @@ -18,7 +18,9 @@ import re from typing import Dict, NamedTuple, Optional -from google.cloud.aiplatform.compat.services import featurestore_service_client +from google.cloud.aiplatform.compat.services import ( + featurestore_service_client, +) from google.cloud.aiplatform.compat.types import ( feature as gca_feature, featurestore_service as gca_featurestore_service, diff --git a/google/cloud/aiplatform/utils/pipeline_utils.py b/google/cloud/aiplatform/utils/pipeline_utils.py index bc86b73237..88323efe76 100644 --- a/google/cloud/aiplatform/utils/pipeline_utils.py +++ b/google/cloud/aiplatform/utils/pipeline_utils.py @@ -261,7 +261,7 @@ def _get_vertex_value( def _parse_runtime_parameters( - runtime_config_spec: Mapping[str, Any] + runtime_config_spec: Mapping[str, Any], ) -> Optional[Dict[str, Any]]: """Extracts runtime parameters from runtime config json spec. diff --git a/google/cloud/aiplatform/v1/schema/predict/instance/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/instance/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/instance/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/instance/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/predict/instance_v1/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/instance_v1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/instance_v1/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/instance_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/predict/params/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/params/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/params/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/params/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/predict/params_v1/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/params_v1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/params_v1/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/params_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/predict/prediction/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/prediction/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/prediction/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/prediction/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/predict/prediction_v1/gapic_version.py b/google/cloud/aiplatform/v1/schema/predict/prediction_v1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/predict/prediction_v1/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/predict/prediction_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition/gapic_version.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/gapic_version.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/gapic_version.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_classification.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_classification.py index deaca47d26..e2ff695597 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_classification.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_classification.py @@ -131,6 +131,7 @@ class ModelType(proto.Enum): but should also have a higher prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_TF_LOW_LATENCY_1 = 2 @@ -189,6 +190,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_object_detection.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_object_detection.py index b7aa371638..7feb056ea3 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_object_detection.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_object_detection.py @@ -125,6 +125,7 @@ class ModelType(proto.Enum): also have a higher prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_LATENCY_1 = 2 @@ -176,6 +177,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_segmentation.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_segmentation.py index 4e2f961811..3b9f06290c 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_segmentation.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_image_segmentation.py @@ -105,6 +105,7 @@ class ModelType(proto.Enum): Expected to have low latency, but may have lower prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_ACCURACY_1 = 2 @@ -154,6 +155,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_tables.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_tables.py index 054866d952..992e9bafb3 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_tables.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_tables.py @@ -460,13 +460,17 @@ class TextArrayTransformation(proto.Message): oneof="transformation_detail", message="AutoMlTablesInputs.Transformation.TextTransformation", ) - repeated_numeric: "AutoMlTablesInputs.Transformation.NumericArrayTransformation" = proto.Field( + repeated_numeric: ( + "AutoMlTablesInputs.Transformation.NumericArrayTransformation" + ) = proto.Field( proto.MESSAGE, number=6, oneof="transformation_detail", message="AutoMlTablesInputs.Transformation.NumericArrayTransformation", ) - repeated_categorical: "AutoMlTablesInputs.Transformation.CategoricalArrayTransformation" = proto.Field( + repeated_categorical: ( + "AutoMlTablesInputs.Transformation.CategoricalArrayTransformation" + ) = proto.Field( proto.MESSAGE, number=7, oneof="transformation_detail", @@ -520,7 +524,9 @@ class TextArrayTransformation(proto.Message): proto.STRING, number=9, ) - export_evaluated_data_items_config: gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig = proto.Field( + export_evaluated_data_items_config: ( + gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig + ) = proto.Field( proto.MESSAGE, number=10, message=gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig, diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_action_recognition.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_action_recognition.py index 091e1df7ab..c305cf3f6b 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_action_recognition.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_action_recognition.py @@ -81,6 +81,7 @@ class ModelType(proto.Enum): TensorFlow Lite model and used on a Coral device afterwards. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_classification.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_classification.py index c951b5d4fc..1484267000 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_classification.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_classification.py @@ -75,6 +75,7 @@ class ModelType(proto.Enum): ModelService.ExportModel) to a Jetson device afterwards. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_object_tracking.py b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_object_tracking.py index 6bf6c5bded..704d6f4a94 100644 --- a/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_object_tracking.py +++ b/google/cloud/aiplatform/v1/schema/trainingjob/definition_v1/types/automl_video_object_tracking.py @@ -88,6 +88,7 @@ class ModelType(proto.Enum): ModelService.ExportModel) and used on an NVIDIA Jetson device. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/params/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/gapic_version.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/gapic_version.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py index c03164b660..aa30bf1cab 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py @@ -131,6 +131,7 @@ class ModelType(proto.Enum): but should also have a higher prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_TF_LOW_LATENCY_1 = 2 @@ -189,6 +190,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py index b3e754626a..6c363e9a0a 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py @@ -125,6 +125,7 @@ class ModelType(proto.Enum): also have a higher prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_LATENCY_1 = 2 @@ -176,6 +177,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py index 87f52b1f9e..0a190567aa 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py @@ -105,6 +105,7 @@ class ModelType(proto.Enum): Expected to have low latency, but may have lower prediction quality than other mobile models. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_ACCURACY_1 = 2 @@ -154,6 +155,7 @@ class SuccessfulStopReason(proto.Enum): increase its quality, since it already has converged. """ + SUCCESSFUL_STOP_REASON_UNSPECIFIED = 0 BUDGET_REACHED = 1 MODEL_CONVERGED = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py index e0139a8c42..01150fa6fa 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py @@ -460,13 +460,17 @@ class TextArrayTransformation(proto.Message): oneof="transformation_detail", message="AutoMlTablesInputs.Transformation.TextTransformation", ) - repeated_numeric: "AutoMlTablesInputs.Transformation.NumericArrayTransformation" = proto.Field( + repeated_numeric: ( + "AutoMlTablesInputs.Transformation.NumericArrayTransformation" + ) = proto.Field( proto.MESSAGE, number=6, oneof="transformation_detail", message="AutoMlTablesInputs.Transformation.NumericArrayTransformation", ) - repeated_categorical: "AutoMlTablesInputs.Transformation.CategoricalArrayTransformation" = proto.Field( + repeated_categorical: ( + "AutoMlTablesInputs.Transformation.CategoricalArrayTransformation" + ) = proto.Field( proto.MESSAGE, number=7, oneof="transformation_detail", @@ -520,7 +524,9 @@ class TextArrayTransformation(proto.Message): proto.STRING, number=9, ) - export_evaluated_data_items_config: gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig = proto.Field( + export_evaluated_data_items_config: ( + gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig + ) = proto.Field( proto.MESSAGE, number=10, message=gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig, diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_time_series_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_time_series_forecasting.py index ca4fd65085..faa0d9f1e4 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_time_series_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_time_series_forecasting.py @@ -356,17 +356,21 @@ class TextTransformation(proto.Message): message="AutoMlForecastingInputs.Transformation.NumericTransformation", ) ) - categorical: "AutoMlForecastingInputs.Transformation.CategoricalTransformation" = proto.Field( + categorical: ( + "AutoMlForecastingInputs.Transformation.CategoricalTransformation" + ) = proto.Field( proto.MESSAGE, number=3, oneof="transformation_detail", message="AutoMlForecastingInputs.Transformation.CategoricalTransformation", ) - timestamp: "AutoMlForecastingInputs.Transformation.TimestampTransformation" = proto.Field( - proto.MESSAGE, - number=4, - oneof="transformation_detail", - message="AutoMlForecastingInputs.Transformation.TimestampTransformation", + timestamp: "AutoMlForecastingInputs.Transformation.TimestampTransformation" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation_detail", + message="AutoMlForecastingInputs.Transformation.TimestampTransformation", + ) ) text: "AutoMlForecastingInputs.Transformation.TextTransformation" = proto.Field( proto.MESSAGE, @@ -464,7 +468,9 @@ class Granularity(proto.Message): proto.INT64, number=24, ) - export_evaluated_data_items_config: gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig = proto.Field( + export_evaluated_data_items_config: ( + gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig + ) = proto.Field( proto.MESSAGE, number=15, message=gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig, diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py index 11b9c164be..807ec63644 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py @@ -81,6 +81,7 @@ class ModelType(proto.Enum): TensorFlow Lite model and used on a Coral device afterwards. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py index 4a5eb0fefe..3010649dcc 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py @@ -75,6 +75,7 @@ class ModelType(proto.Enum): ModelService.ExportModel) to a Jetson device afterwards. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py index 693935051d..936069d754 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py @@ -88,6 +88,7 @@ class ModelType(proto.Enum): ModelService.ExportModel) and used on an NVIDIA Jetson device. """ + MODEL_TYPE_UNSPECIFIED = 0 CLOUD = 1 MOBILE_VERSATILE_1 = 2 diff --git a/google/cloud/aiplatform/version.py b/google/cloud/aiplatform/version.py index bc1d44b052..05b37ab64f 100644 --- a/google/cloud/aiplatform/version.py +++ b/google/cloud/aiplatform/version.py @@ -15,4 +15,4 @@ # limitations under the License. # -__version__ = "1.111.0" +__version__ = "1.112.0" diff --git a/google/cloud/aiplatform/vertex_ray/cluster_init.py b/google/cloud/aiplatform/vertex_ray/cluster_init.py index 3470ca1b81..357a6d0f4e 100644 --- a/google/cloud/aiplatform/vertex_ray/cluster_init.py +++ b/google/cloud/aiplatform/vertex_ray/cluster_init.py @@ -540,9 +540,9 @@ def update_ray_cluster( not_merged = 0 else: # No duplication w/ head node, write the 2nd worker node to the 2nd resource pool. - current_persistent_resource.resource_pools[ - i + not_merged - ].replica_count = worker_node_types[i].node_count + current_persistent_resource.resource_pools[i + not_merged].replica_count = ( + worker_node_types[i].node_count + ) # New worker_node_type.node_count should be >=1 unless the worker_node_type # and head_node_type are merged due to the same machine specs. if worker_node_types[i].node_count == 0: diff --git a/google/cloud/aiplatform/vertex_ray/predict/torch/register.py b/google/cloud/aiplatform/vertex_ray/predict/torch/register.py index 0f20f1abd0..07b25ed72c 100644 --- a/google/cloud/aiplatform/vertex_ray/predict/torch/register.py +++ b/google/cloud/aiplatform/vertex_ray/predict/torch/register.py @@ -1,4 +1,5 @@ """Regsiter Torch for Ray on Vertex AI.""" + # -*- coding: utf-8 -*- # Copyright 2023 Google LLC diff --git a/google/cloud/aiplatform/vertex_ray/predict/util/predict_utils.py b/google/cloud/aiplatform/vertex_ray/predict/util/predict_utils.py index a18fe4cabd..e105dbd212 100644 --- a/google/cloud/aiplatform/vertex_ray/predict/util/predict_utils.py +++ b/google/cloud/aiplatform/vertex_ray/predict/util/predict_utils.py @@ -15,8 +15,7 @@ # limitations under the License. # -"""Predict Utils. -""" +"""Predict Utils.""" def validate_artifact_uri(artifact_uri: str) -> None: diff --git a/google/cloud/aiplatform/vizier/pyvizier/__init__.py b/google/cloud/aiplatform/vizier/pyvizier/__init__.py index fa2fab13c7..458da46301 100644 --- a/google/cloud/aiplatform/vizier/pyvizier/__init__.py +++ b/google/cloud/aiplatform/vizier/pyvizier/__init__.py @@ -36,7 +36,9 @@ 'Please install the SDK using "pip install google-vizier"' ) -from google.cloud.aiplatform.vizier.pyvizier.proto_converters import TrialConverter +from google.cloud.aiplatform.vizier.pyvizier.proto_converters import ( + TrialConverter, +) from google.cloud.aiplatform.vizier.pyvizier.proto_converters import ( ParameterConfigConverter, ) diff --git a/google/cloud/aiplatform/vizier/pyvizier/automated_stopping.py b/google/cloud/aiplatform/vizier/pyvizier/automated_stopping.py index b46f2b12ec..25a8302e93 100644 --- a/google/cloud/aiplatform/vizier/pyvizier/automated_stopping.py +++ b/google/cloud/aiplatform/vizier/pyvizier/automated_stopping.py @@ -1,4 +1,5 @@ """Convenience classes for configuring Vizier Early-Stopping Configs.""" + import copy from typing import Union diff --git a/google/cloud/aiplatform/vizier/pyvizier/proto_converters.py b/google/cloud/aiplatform/vizier/pyvizier/proto_converters.py index eb5a9292b6..688c007247 100644 --- a/google/cloud/aiplatform/vizier/pyvizier/proto_converters.py +++ b/google/cloud/aiplatform/vizier/pyvizier/proto_converters.py @@ -1,4 +1,5 @@ """Converters for OSS Vizier's protos from/to PyVizier's classes.""" + import logging from datetime import timezone from typing import List, Optional, Sequence, Tuple, Union @@ -234,13 +235,13 @@ def _set_child_parameter_configs( ) if "discrete_value_spec" in parent_proto: - conditional_parameter_spec.parent_discrete_values.values[ - : - ] = parent_values + conditional_parameter_spec.parent_discrete_values.values[:] = ( + parent_values + ) elif "categorical_value_spec" in parent_proto: - conditional_parameter_spec.parent_categorical_values.values[ - : - ] = parent_values + conditional_parameter_spec.parent_categorical_values.values[:] = ( + parent_values + ) elif "integer_value_spec" in parent_proto: conditional_parameter_spec.parent_int_values.values[:] = parent_values else: diff --git a/google/cloud/aiplatform/vizier/pyvizier/study_config.py b/google/cloud/aiplatform/vizier/pyvizier/study_config.py index 0940e56767..73e84ab26d 100644 --- a/google/cloud/aiplatform/vizier/pyvizier/study_config.py +++ b/google/cloud/aiplatform/vizier/pyvizier/study_config.py @@ -12,6 +12,7 @@ * `SearchSpace` and `SearchSpaceSelector` classes deals with Vizier search spaces. Both flat spaces and conditional parameters are supported. """ + import collections import copy import enum @@ -385,9 +386,9 @@ def _pytrial_parameters(self, pytrial: Trial) -> Dict[str, ParameterValueSequenc ValueError: If the trial parameters do not exist in this search space. ValueError: If the trial contains duplicate parameters. """ - trial_external_values: Dict[ - str, Union[float, int, str, bool] - ] = self._trial_to_external_values(pytrial) + trial_external_values: Dict[str, Union[float, int, str, bool]] = ( + self._trial_to_external_values(pytrial) + ) if len(trial_external_values) != len(pytrial.parameters): raise ValueError( "Invalid trial for this search space: failed to convert " diff --git a/google/cloud/aiplatform/vizier/trial.py b/google/cloud/aiplatform/vizier/trial.py index 310f08e36b..1b240f9f69 100644 --- a/google/cloud/aiplatform/vizier/trial.py +++ b/google/cloud/aiplatform/vizier/trial.py @@ -84,11 +84,13 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=trial_name, - parent_resource_name_fields={ - study.Study._resource_noun: study_id, - } - if study_id - else study_id, + parent_resource_name_fields=( + { + study.Study._resource_noun: study_id, + } + if study_id + else study_id + ), ) @property @@ -136,9 +138,9 @@ def complete( complete_trial_request["infeasible_reason"] = infeasible_reason complete_trial_request["trial_infeasible"] = True if measurement is not None: - complete_trial_request[ - "final_measurement" - ] = vz.MeasurementConverter.to_proto(measurement) + complete_trial_request["final_measurement"] = ( + vz.MeasurementConverter.to_proto(measurement) + ) trial = self.api_client.complete_trial(request=complete_trial_request) return ( vz.MeasurementConverter.from_proto(trial.final_measurement) diff --git a/google/cloud/aiplatform_v1/gapic_version.py b/google/cloud/aiplatform_v1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform_v1/gapic_version.py +++ b/google/cloud/aiplatform_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform_v1/services/data_foundry_service/async_client.py b/google/cloud/aiplatform_v1/services/data_foundry_service/async_client.py index 6c7270590f..f300d40ada 100644 --- a/google/cloud/aiplatform_v1/services/data_foundry_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/data_foundry_service/async_client.py @@ -277,21 +277,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DataFoundryServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", + "credentialsType": None, + } + ), ) async def generate_synthetic_data( diff --git a/google/cloud/aiplatform_v1/services/data_foundry_service/client.py b/google/cloud/aiplatform_v1/services/data_foundry_service/client.py index f7415a9381..1e43dcf4d6 100644 --- a/google/cloud/aiplatform_v1/services/data_foundry_service/client.py +++ b/google/cloud/aiplatform_v1/services/data_foundry_service/client.py @@ -719,21 +719,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DataFoundryServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DataFoundryService", + "credentialsType": None, + } + ), ) def generate_synthetic_data( diff --git a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/base.py b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/base.py index f8fe2216d1..9158616b36 100644 --- a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/base.py @@ -239,13 +239,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest.py b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest.py index a9b9adf1dc..a4626d5926 100644 --- a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest.py @@ -695,7 +695,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -838,7 +837,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -981,7 +979,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1125,7 +1122,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1273,7 +1269,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1420,7 +1415,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1537,7 +1531,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1654,7 +1647,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1797,7 +1789,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1940,7 +1931,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest_asyncio.py index 062ac8268f..064d809bf6 100644 --- a/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/data_foundry_service/transports/rest_asyncio.py @@ -760,7 +760,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -913,7 +912,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1066,7 +1064,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1220,7 +1217,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1378,7 +1374,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1529,7 +1524,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1652,7 +1646,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1775,7 +1768,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1928,7 +1920,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2079,7 +2070,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py index 1936ad7bb1..539ca531b3 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import saved_query @@ -306,21 +308,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DatasetServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DatasetService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DatasetService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DatasetService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DatasetService", + "credentialsType": None, + } + ), ) async def create_dataset( diff --git a/google/cloud/aiplatform_v1/services/dataset_service/client.py b/google/cloud/aiplatform_v1/services/dataset_service/client.py index 7c1a769195..28ce3a76dc 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/client.py @@ -71,7 +71,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import saved_query @@ -875,21 +877,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DatasetServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DatasetService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DatasetService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DatasetService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DatasetService", + "credentialsType": None, + } + ), ) def create_dataset( diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/base.py index 204ea73b9a..7bba498257 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/base.py @@ -33,7 +33,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -524,13 +526,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py index 4e22a22dc9..5090565b00 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py @@ -36,7 +36,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py index a31a290d0b..4146c0d9a0 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py @@ -39,7 +39,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py index 4a025fa997..c96dd29a92 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py @@ -42,7 +42,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore @@ -6574,7 +6576,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6716,7 +6717,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6858,7 +6858,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7001,7 +7000,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7148,7 +7146,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7292,7 +7289,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7408,7 +7404,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7524,7 +7519,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7666,7 +7660,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7808,7 +7801,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py index 398bc42edc..8ab79ff345 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore @@ -1505,9 +1507,9 @@ def __init__( self._interceptor = interceptor or AsyncDatasetServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -6867,7 +6869,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7019,7 +7020,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7171,7 +7171,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7324,7 +7323,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7482,7 +7480,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7633,7 +7630,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7758,7 +7754,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7882,7 +7877,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8035,7 +8029,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8187,7 +8180,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_base.py index f04b69be2a..a1cd4c487f 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_base.py @@ -32,7 +32,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/async_client.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/async_client.py index 225719e128..28a92f5415 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import pagers +from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import deployment_resource_pool from google.cloud.aiplatform_v1.types import ( deployment_resource_pool as gca_deployment_resource_pool, @@ -317,21 +319,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DeploymentResourcePoolServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", + "credentialsType": None, + } + ), ) async def create_deployment_resource_pool( diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/client.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/client.py index 995243d8ff..4ac6c47083 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/client.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import pagers +from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import deployment_resource_pool from google.cloud.aiplatform_v1.types import ( deployment_resource_pool as gca_deployment_resource_pool, @@ -106,14 +108,14 @@ class DeploymentResourcePoolServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[DeploymentResourcePoolServiceTransport]] _transport_registry["grpc"] = DeploymentResourcePoolServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = DeploymentResourcePoolServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + DeploymentResourcePoolServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = DeploymentResourcePoolServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncDeploymentResourcePoolServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncDeploymentResourcePoolServiceRestTransport + ) def get_transport_class( cls, @@ -835,21 +837,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.DeploymentResourcePoolServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.DeploymentResourcePoolService", + "credentialsType": None, + } + ), ) def create_deployment_resource_pool( diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/__init__.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/__init__.py index 324e745f88..dad07c12b9 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/__init__.py @@ -45,9 +45,9 @@ _transport_registry["grpc_asyncio"] = DeploymentResourcePoolServiceGrpcAsyncIOTransport _transport_registry["rest"] = DeploymentResourcePoolServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncDeploymentResourcePoolServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncDeploymentResourcePoolServiceRestTransport + ) __all__ = ( "DeploymentResourcePoolServiceTransport", diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/base.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/base.py index cdad35c19e..72e4ee7460 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/base.py @@ -324,13 +324,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc.py index 0776e3c0e3..eae85a72a7 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc.py @@ -368,12 +368,12 @@ def create_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_deployment_resource_pool" not in self._stubs: - self._stubs[ - "create_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/CreateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_deployment_resource_pool"] @@ -399,12 +399,12 @@ def get_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_deployment_resource_pool" not in self._stubs: - self._stubs[ - "get_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/GetDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, - response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + self._stubs["get_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) ) return self._stubs["get_deployment_resource_pool"] @@ -430,12 +430,12 @@ def list_deployment_resource_pools( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_deployment_resource_pools" not in self._stubs: - self._stubs[ - "list_deployment_resource_pools" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/ListDeploymentResourcePools", - request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, - response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + self._stubs["list_deployment_resource_pools"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) ) return self._stubs["list_deployment_resource_pools"] @@ -462,12 +462,12 @@ def update_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_deployment_resource_pool" not in self._stubs: - self._stubs[ - "update_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_deployment_resource_pool"] @@ -494,12 +494,12 @@ def delete_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_deployment_resource_pool" not in self._stubs: - self._stubs[ - "delete_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_deployment_resource_pool"] diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc_asyncio.py index 00386dd3bc..8477d09663 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/grpc_asyncio.py @@ -376,12 +376,12 @@ def create_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_deployment_resource_pool" not in self._stubs: - self._stubs[ - "create_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/CreateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_deployment_resource_pool"] @@ -407,12 +407,12 @@ def get_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_deployment_resource_pool" not in self._stubs: - self._stubs[ - "get_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/GetDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, - response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + self._stubs["get_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) ) return self._stubs["get_deployment_resource_pool"] @@ -438,12 +438,12 @@ def list_deployment_resource_pools( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_deployment_resource_pools" not in self._stubs: - self._stubs[ - "list_deployment_resource_pools" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/ListDeploymentResourcePools", - request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, - response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + self._stubs["list_deployment_resource_pools"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) ) return self._stubs["list_deployment_resource_pools"] @@ -470,12 +470,12 @@ def update_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_deployment_resource_pool" not in self._stubs: - self._stubs[ - "update_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_deployment_resource_pool"] @@ -502,12 +502,12 @@ def delete_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_deployment_resource_pool" not in self._stubs: - self._stubs[ - "delete_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_deployment_resource_pool"] diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest.py index 55d86c89bd..d9e341bbd6 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest.py @@ -3801,7 +3801,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3946,7 +3945,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4091,7 +4089,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4237,7 +4234,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4387,7 +4383,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4532,7 +4527,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4649,7 +4643,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4766,7 +4759,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4911,7 +4903,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5054,7 +5045,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest_asyncio.py index 9238dffdb4..ce2986330b 100644 --- a/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/deployment_resource_pool_service/transports/rest_asyncio.py @@ -771,9 +771,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3936,7 +3936,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4087,7 +4086,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4238,7 +4236,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4390,7 +4387,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4548,7 +4544,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4701,7 +4696,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4826,7 +4820,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4949,7 +4942,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5102,7 +5094,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5253,7 +5244,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py index 7841ada5b0..8313b60861 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/async_client.py @@ -304,21 +304,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.EndpointServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.EndpointService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.EndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.EndpointService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.EndpointService", + "credentialsType": None, + } + ), ) async def create_endpoint( diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1/services/endpoint_service/client.py index 687f55e2f5..741fab91fd 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/client.py @@ -854,21 +854,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.EndpointServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.EndpointService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.EndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.EndpointService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.EndpointService", + "credentialsType": None, + } + ), ) def create_endpoint( diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1/services/endpoint_service/transports/base.py index b6950fb40f..d428e9a210 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/transports/base.py @@ -359,13 +359,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc.py index bda1059494..b49b755df1 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc.py @@ -471,12 +471,12 @@ def update_endpoint_long_running( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_endpoint_long_running" not in self._stubs: - self._stubs[ - "update_endpoint_long_running" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.EndpointService/UpdateEndpointLongRunning", - request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_endpoint_long_running"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.EndpointService/UpdateEndpointLongRunning", + request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_endpoint_long_running"] diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc_asyncio.py index d0e292e983..622aebecda 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/transports/grpc_asyncio.py @@ -485,12 +485,12 @@ def update_endpoint_long_running( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_endpoint_long_running" not in self._stubs: - self._stubs[ - "update_endpoint_long_running" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.EndpointService/UpdateEndpointLongRunning", - request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_endpoint_long_running"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.EndpointService/UpdateEndpointLongRunning", + request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_endpoint_long_running"] diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest.py b/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest.py index a8795d4e65..6222d8a6d3 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest.py @@ -4376,7 +4376,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4518,7 +4517,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4660,7 +4658,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4803,7 +4800,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4951,7 +4947,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5095,7 +5090,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5211,7 +5205,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5327,7 +5320,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5469,7 +5461,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5611,7 +5602,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest_asyncio.py index f6c7f14362..29488aedb4 100644 --- a/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/endpoint_service/transports/rest_asyncio.py @@ -922,9 +922,9 @@ def __init__( self._interceptor = interceptor or AsyncEndpointServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4563,7 +4563,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4716,7 +4715,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4869,7 +4867,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5023,7 +5020,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5181,7 +5177,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5332,7 +5327,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5457,7 +5451,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5582,7 +5575,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5735,7 +5727,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5888,7 +5879,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/evaluation_service/async_client.py b/google/cloud/aiplatform_v1/services/evaluation_service/async_client.py index c4f36af7a3..c7449e2df5 100644 --- a/google/cloud/aiplatform_v1/services/evaluation_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/evaluation_service/async_client.py @@ -275,21 +275,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.EvaluationServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.EvaluationService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.EvaluationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.EvaluationService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.EvaluationService", + "credentialsType": None, + } + ), ) async def evaluate_instances( diff --git a/google/cloud/aiplatform_v1/services/evaluation_service/client.py b/google/cloud/aiplatform_v1/services/evaluation_service/client.py index 1f61d9748d..736d44826e 100644 --- a/google/cloud/aiplatform_v1/services/evaluation_service/client.py +++ b/google/cloud/aiplatform_v1/services/evaluation_service/client.py @@ -717,21 +717,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.EvaluationServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.EvaluationService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.EvaluationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.EvaluationService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.EvaluationService", + "credentialsType": None, + } + ), ) def evaluate_instances( diff --git a/google/cloud/aiplatform_v1/services/evaluation_service/transports/base.py b/google/cloud/aiplatform_v1/services/evaluation_service/transports/base.py index b1da31aabb..a7f2f22d09 100644 --- a/google/cloud/aiplatform_v1/services/evaluation_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/evaluation_service/transports/base.py @@ -239,13 +239,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest.py b/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest.py index 88b18c46e0..8809e2d4f4 100644 --- a/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest.py @@ -689,7 +689,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -832,7 +831,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -974,7 +972,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1117,7 +1114,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1265,7 +1261,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1410,7 +1405,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1527,7 +1521,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1643,7 +1636,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1786,7 +1778,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1929,7 +1920,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest_asyncio.py index 3eee2278dd..5d2d4dbafa 100644 --- a/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/evaluation_service/transports/rest_asyncio.py @@ -754,7 +754,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -907,7 +906,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1060,7 +1058,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1214,7 +1211,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1372,7 +1368,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1523,7 +1518,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1646,7 +1640,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1769,7 +1762,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1922,7 +1914,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2075,7 +2066,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/async_client.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/async_client.py index 281e58473e..cdb11cd2b7 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/async_client.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view as gca_feature_view from google.cloud.aiplatform_v1.types import feature_view_sync @@ -325,21 +327,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureOnlineStoreAdminServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", + "credentialsType": None, + } + ), ) async def create_feature_online_store( diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/client.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/client.py index 27630c6717..eca9390451 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/client.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/client.py @@ -71,7 +71,9 @@ from google.cloud.aiplatform_v1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view as gca_feature_view from google.cloud.aiplatform_v1.types import feature_view_sync @@ -116,14 +118,14 @@ class FeatureOnlineStoreAdminServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[FeatureOnlineStoreAdminServiceTransport]] _transport_registry["grpc"] = FeatureOnlineStoreAdminServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = FeatureOnlineStoreAdminServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreAdminServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreAdminServiceRestTransport + ) def get_transport_class( cls, @@ -829,21 +831,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureOnlineStoreAdminServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService", + "credentialsType": None, + } + ), ) def create_feature_online_store( diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/pagers.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/pagers.py index 2a34fd35e0..24ff412733 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/pagers.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/pagers.py @@ -38,7 +38,9 @@ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/__init__.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/__init__.py index 903ebeb944..31e263b75e 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/__init__.py @@ -45,9 +45,9 @@ _transport_registry["grpc_asyncio"] = FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport _transport_registry["rest"] = FeatureOnlineStoreAdminServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreAdminServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreAdminServiceRestTransport + ) __all__ = ( "FeatureOnlineStoreAdminServiceTransport", diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/base.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/base.py index b8ca734571..092e84db82 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -433,13 +435,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc.py index 5877da9d12..2be8585d01 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -371,12 +373,12 @@ def create_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_online_store" not in self._stubs: - self._stubs[ - "create_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_feature_online_store"] @@ -432,12 +434,12 @@ def list_feature_online_stores( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_feature_online_stores" not in self._stubs: - self._stubs[ - "list_feature_online_stores" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", - request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, - response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + self._stubs["list_feature_online_stores"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", + request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, + response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + ) ) return self._stubs["list_feature_online_stores"] @@ -464,12 +466,12 @@ def update_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_feature_online_store" not in self._stubs: - self._stubs[ - "update_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_feature_online_store"] @@ -496,12 +498,12 @@ def delete_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_feature_online_store" not in self._stubs: - self._stubs[ - "delete_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_feature_online_store"] diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc_asyncio.py index ad9f0b911e..e0a08c05e7 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -379,12 +381,12 @@ def create_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_online_store" not in self._stubs: - self._stubs[ - "create_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_feature_online_store"] @@ -440,12 +442,12 @@ def list_feature_online_stores( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_feature_online_stores" not in self._stubs: - self._stubs[ - "list_feature_online_stores" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", - request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, - response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + self._stubs["list_feature_online_stores"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", + request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, + response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + ) ) return self._stubs["list_feature_online_stores"] @@ -472,12 +474,12 @@ def update_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_feature_online_store" not in self._stubs: - self._stubs[ - "update_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_feature_online_store"] @@ -504,12 +506,12 @@ def delete_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_feature_online_store" not in self._stubs: - self._stubs[ - "delete_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_feature_online_store"] diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest.py index 8e5a79ed90..5897c7748c 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore @@ -5358,7 +5360,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5503,7 +5504,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5646,7 +5646,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5792,7 +5791,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5944,7 +5942,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6089,7 +6086,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6206,7 +6202,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6323,7 +6318,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6468,7 +6462,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6611,7 +6604,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_asyncio.py index bf705bccfe..e7a0e49242 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore @@ -1182,9 +1184,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5576,7 +5578,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5729,7 +5730,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5880,7 +5880,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6032,7 +6031,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6190,7 +6188,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6343,7 +6340,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6468,7 +6464,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6591,7 +6586,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6744,7 +6738,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6897,7 +6890,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_base.py index 135afb37c8..9f36737609 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_admin_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1.types import feature_online_store -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/async_client.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/async_client.py index c2afee5444..0c81a329e7 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/async_client.py @@ -292,21 +292,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureOnlineStoreServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", + "credentialsType": None, + } + ), ) async def fetch_feature_values( diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/client.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/client.py index dcfd2cd10b..6b5be6debb 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/client.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/client.py @@ -99,9 +99,9 @@ class FeatureOnlineStoreServiceClientMeta(type): _transport_registry["grpc_asyncio"] = FeatureOnlineStoreServiceGrpcAsyncIOTransport _transport_registry["rest"] = FeatureOnlineStoreServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreServiceRestTransport + ) def get_transport_class( cls, @@ -751,21 +751,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureOnlineStoreServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureOnlineStoreService", + "credentialsType": None, + } + ), ) def fetch_feature_values( diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/base.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/base.py index f988549423..ebb447bcd7 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/base.py @@ -273,13 +273,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc.py index 3f78e087eb..15fb9a17f8 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc.py @@ -410,12 +410,12 @@ def feature_view_direct_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "feature_view_direct_write" not in self._stubs: - self._stubs[ - "feature_view_direct_write" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreService/FeatureViewDirectWrite", - request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, - response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + self._stubs["feature_view_direct_write"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreService/FeatureViewDirectWrite", + request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, + response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + ) ) return self._stubs["feature_view_direct_write"] diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc_asyncio.py index cf00a43b3b..71c8f92aac 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/grpc_asyncio.py @@ -418,12 +418,12 @@ def feature_view_direct_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "feature_view_direct_write" not in self._stubs: - self._stubs[ - "feature_view_direct_write" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1.FeatureOnlineStoreService/FeatureViewDirectWrite", - request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, - response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + self._stubs["feature_view_direct_write"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1.FeatureOnlineStoreService/FeatureViewDirectWrite", + request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, + response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + ) ) return self._stubs["feature_view_direct_write"] diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest.py index aed52824f2..d7d9573d49 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest.py @@ -958,7 +958,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1103,7 +1102,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1248,7 +1246,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1394,7 +1391,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1544,7 +1540,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1689,7 +1684,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1808,7 +1802,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1927,7 +1920,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2072,7 +2064,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2217,7 +2208,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest_asyncio.py index e9df507e65..9685cc1419 100644 --- a/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/feature_online_store_service/transports/rest_asyncio.py @@ -1045,7 +1045,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1196,7 +1195,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1347,7 +1345,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1499,7 +1496,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1657,7 +1653,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1808,7 +1803,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1931,7 +1925,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2054,7 +2047,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2205,7 +2197,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2356,7 +2347,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_registry_service/async_client.py b/google/cloud/aiplatform_v1/services/feature_registry_service/async_client.py index 3b32223db7..df42e9943a 100644 --- a/google/cloud/aiplatform_v1/services/feature_registry_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/feature_registry_service/async_client.py @@ -47,11 +47,15 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.feature_registry_service import pagers +from google.cloud.aiplatform_v1.services.feature_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import feature from google.cloud.aiplatform_v1.types import feature as gca_feature from google.cloud.aiplatform_v1.types import feature_group -from google.cloud.aiplatform_v1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1.types import feature_registry_service from google.cloud.aiplatform_v1.types import featurestore_service from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -297,21 +301,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureRegistryServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", + "credentialsType": None, + } + ), ) async def create_feature_group( diff --git a/google/cloud/aiplatform_v1/services/feature_registry_service/client.py b/google/cloud/aiplatform_v1/services/feature_registry_service/client.py index 4d9e07f66e..fa50cbc647 100644 --- a/google/cloud/aiplatform_v1/services/feature_registry_service/client.py +++ b/google/cloud/aiplatform_v1/services/feature_registry_service/client.py @@ -63,11 +63,15 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.feature_registry_service import pagers +from google.cloud.aiplatform_v1.services.feature_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import feature from google.cloud.aiplatform_v1.types import feature as gca_feature from google.cloud.aiplatform_v1.types import feature_group -from google.cloud.aiplatform_v1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1.types import feature_registry_service from google.cloud.aiplatform_v1.types import featurestore_service from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -781,21 +785,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeatureRegistryServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeatureRegistryService", + "credentialsType": None, + } + ), ) def create_feature_group( diff --git a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/base.py b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/base.py index 7d2bd7515b..b394270fde 100644 --- a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/base.py @@ -391,13 +391,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest.py b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest.py index 2c94e79c3c..07c68a6d65 100644 --- a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest.py @@ -4865,7 +4865,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5008,7 +5007,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5151,7 +5149,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5295,7 +5292,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5443,7 +5439,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5590,7 +5585,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5709,7 +5703,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5828,7 +5821,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5971,7 +5963,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6116,7 +6107,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest_asyncio.py index 24c42c548c..3f0bdc8e3d 100644 --- a/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/feature_registry_service/transports/rest_asyncio.py @@ -1050,9 +1050,9 @@ def __init__( self._interceptor = interceptor or AsyncFeatureRegistryServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5035,7 +5035,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5186,7 +5185,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5337,7 +5335,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5489,7 +5486,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5645,7 +5641,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5796,7 +5791,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5919,7 +5913,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6042,7 +6035,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6193,7 +6185,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6344,7 +6335,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/async_client.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/async_client.py index def3286c7e..06211d9ea6 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/async_client.py @@ -296,21 +296,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeaturestoreOnlineServingServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", + "credentialsType": None, + } + ), ) async def read_feature_values( diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/client.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/client.py index 86e0e5d02b..c211d6b7ae 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/client.py @@ -100,14 +100,14 @@ class FeaturestoreOnlineServingServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[FeaturestoreOnlineServingServiceTransport]] _transport_registry["grpc"] = FeaturestoreOnlineServingServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = FeaturestoreOnlineServingServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + FeaturestoreOnlineServingServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = FeaturestoreOnlineServingServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeaturestoreOnlineServingServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeaturestoreOnlineServingServiceRestTransport + ) def get_transport_class( cls, @@ -763,21 +763,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeaturestoreOnlineServingServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreOnlineServingService", + "credentialsType": None, + } + ), ) def read_feature_values( diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/__init__.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/__init__.py index 21db02b7e4..c2b0c85800 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/__init__.py @@ -42,14 +42,14 @@ OrderedDict() ) # type: Dict[str, Type[FeaturestoreOnlineServingServiceTransport]] _transport_registry["grpc"] = FeaturestoreOnlineServingServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = FeaturestoreOnlineServingServiceGrpcAsyncIOTransport +_transport_registry["grpc_asyncio"] = ( + FeaturestoreOnlineServingServiceGrpcAsyncIOTransport +) _transport_registry["rest"] = FeaturestoreOnlineServingServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeaturestoreOnlineServingServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeaturestoreOnlineServingServiceRestTransport + ) __all__ = ( "FeaturestoreOnlineServingServiceTransport", diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/base.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/base.py index d2e0c4027e..5e587ba4ae 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/base.py @@ -273,13 +273,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc.py index 218236d434..180f08983e 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc.py @@ -382,12 +382,12 @@ def streaming_read_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_read_feature_values" not in self._stubs: - self._stubs[ - "streaming_read_feature_values" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", - request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, - response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + self._stubs["streaming_read_feature_values"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", + request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, + response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_read_feature_values"] diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc_asyncio.py index 3ad1e18e41..d7865fc527 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/grpc_asyncio.py @@ -390,12 +390,12 @@ def streaming_read_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_read_feature_values" not in self._stubs: - self._stubs[ - "streaming_read_feature_values" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", - request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, - response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + self._stubs["streaming_read_feature_values"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", + request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, + response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_read_feature_values"] diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest.py index c58012e003..e425991fd2 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest.py @@ -1144,7 +1144,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1287,7 +1286,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1430,7 +1428,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1574,7 +1571,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1724,7 +1720,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1869,7 +1864,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1986,7 +1980,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2103,7 +2096,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2246,7 +2238,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2389,7 +2380,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest_asyncio.py index 78d26056ca..cd2a1c7f6f 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/featurestore_online_serving_service/transports/rest_asyncio.py @@ -1232,7 +1232,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1385,7 +1384,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1538,7 +1536,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1692,7 +1689,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1850,7 +1846,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2003,7 +1998,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2128,7 +2122,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2253,7 +2246,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2406,7 +2398,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2559,7 +2550,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py b/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py index 58e2c70ba9..5033ced735 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/async_client.py @@ -302,21 +302,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeaturestoreServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", + "credentialsType": None, + } + ), ) async def create_featurestore( diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/client.py b/google/cloud/aiplatform_v1/services/featurestore_service/client.py index 9c3a803e4c..3cbbe2a5df 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/client.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/client.py @@ -806,21 +806,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.FeaturestoreServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.FeaturestoreService", + "credentialsType": None, + } + ), ) def create_featurestore( diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/transports/base.py b/google/cloud/aiplatform_v1/services/featurestore_service/transports/base.py index 59d1af740c..39629c3e4b 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/transports/base.py @@ -539,13 +539,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest.py b/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest.py index 810b1fb602..9d00ff59bf 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest.py @@ -7098,7 +7098,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7241,7 +7240,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7384,7 +7382,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7528,7 +7525,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7676,7 +7672,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7823,7 +7818,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7940,7 +7934,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8057,7 +8050,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8200,7 +8192,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8343,7 +8334,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest_asyncio.py index dfc4ee417d..5098c678d7 100644 --- a/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/featurestore_service/transports/rest_asyncio.py @@ -1626,9 +1626,9 @@ def __init__( self._interceptor = interceptor or AsyncFeaturestoreServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -7373,7 +7373,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7526,7 +7525,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7677,7 +7675,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7831,7 +7828,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7989,7 +7985,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -8140,7 +8135,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8263,7 +8257,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8386,7 +8379,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8539,7 +8531,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8690,7 +8681,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/async_client.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/async_client.py index 18aa280623..f104a6f3c7 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/async_client.py @@ -47,7 +47,9 @@ from google.cloud.aiplatform_v1.services.gen_ai_cache_service import pagers from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import gen_ai_cache_service @@ -290,21 +292,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.GenAiCacheServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", + "credentialsType": None, + } + ), ) async def create_cached_content( diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/client.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/client.py index 7ad3fcc9cf..1b86298d1b 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/client.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/client.py @@ -63,7 +63,9 @@ from google.cloud.aiplatform_v1.services.gen_ai_cache_service import pagers from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import gen_ai_cache_service @@ -770,21 +772,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.GenAiCacheServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.GenAiCacheService", + "credentialsType": None, + } + ), ) def create_cached_content( diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/base.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/base.py index d8fdc9c91a..a042ce3aff 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/base.py @@ -28,7 +28,9 @@ import google.protobuf from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -304,13 +306,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc.py index 3d355fc3d1..154386373b 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc.py @@ -31,7 +31,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc_asyncio.py index 5e7972915d..bead2bdaa6 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/grpc_asyncio.py @@ -34,7 +34,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest.py index 3f051a5fe1..cbb60ebb02 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest.py @@ -37,7 +37,9 @@ from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -1511,7 +1513,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1654,7 +1655,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1796,7 +1796,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1939,7 +1938,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -2087,7 +2085,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2232,7 +2229,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2349,7 +2345,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2465,7 +2460,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2608,7 +2602,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2751,7 +2744,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_asyncio.py index c573b06d2c..67343c7b9c 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_asyncio.py @@ -49,7 +49,9 @@ from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -1606,7 +1608,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1759,7 +1760,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1912,7 +1912,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -2066,7 +2065,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -2224,7 +2222,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2375,7 +2372,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2498,7 +2494,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2621,7 +2616,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2774,7 +2768,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2927,7 +2920,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_base.py index 723bd93d46..7c3d761912 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_cache_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/async_client.py b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/async_client.py index 80f2433192..9b5f7149bf 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/async_client.py @@ -296,21 +296,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.GenAiTuningServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", + "credentialsType": None, + } + ), ) async def create_tuning_job( diff --git a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/client.py b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/client.py index 8f95eda67d..a4ea54b000 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/client.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/client.py @@ -838,21 +838,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.GenAiTuningServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.GenAiTuningService", + "credentialsType": None, + } + ), ) def create_tuning_job( diff --git a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/base.py b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/base.py index 15e29559d0..46be524254 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/base.py @@ -304,13 +304,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest.py b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest.py index ab7d2b7bb4..93a67c9f4b 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest.py @@ -3448,7 +3448,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3591,7 +3590,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3734,7 +3732,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3878,7 +3875,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4026,7 +4022,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4173,7 +4168,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4290,7 +4284,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4407,7 +4400,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4550,7 +4542,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4693,7 +4684,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest_asyncio.py index 77e42de99c..0a02725785 100644 --- a/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/gen_ai_tuning_service/transports/rest_asyncio.py @@ -667,9 +667,9 @@ def __init__( self._interceptor = interceptor or AsyncGenAiTuningServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3556,7 +3556,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3709,7 +3708,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3862,7 +3860,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4016,7 +4013,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4174,7 +4170,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4325,7 +4320,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4448,7 +4442,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4571,7 +4564,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4724,7 +4716,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4875,7 +4866,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py index 59699b1af4..8f9a542686 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/async_client.py @@ -50,7 +50,9 @@ from google.cloud.aiplatform_v1.services.index_endpoint_service import pagers from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import service_networking @@ -296,21 +298,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.IndexEndpointServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", + "credentialsType": None, + } + ), ) async def create_index_endpoint( diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py index 1171e404de..b82bafabba 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/client.py @@ -66,7 +66,9 @@ from google.cloud.aiplatform_v1.services.index_endpoint_service import pagers from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import service_networking @@ -794,21 +796,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.IndexEndpointServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.IndexEndpointService", + "credentialsType": None, + } + ), ) def create_index_endpoint( diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/base.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/base.py index 180bf3a9a5..3d98fd4348 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -348,13 +350,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc.py index bdd6e1dac5..06ea19c147 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc_asyncio.py index 26b1ccd58d..576d2bc035 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest.py index 99792711d7..68f496bccd 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore @@ -4210,7 +4212,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4353,7 +4354,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4496,7 +4496,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4640,7 +4639,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4788,7 +4786,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4935,7 +4932,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5052,7 +5048,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5169,7 +5164,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5312,7 +5306,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5455,7 +5448,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_asyncio.py index 0c2412919e..f79d24be7a 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore @@ -877,9 +879,9 @@ def __init__( self._interceptor = interceptor or AsyncIndexEndpointServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4343,7 +4345,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4496,7 +4497,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4647,7 +4647,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4799,7 +4798,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4955,7 +4953,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5106,7 +5103,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5229,7 +5225,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5352,7 +5347,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5503,7 +5497,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5654,7 +5647,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_base.py index 5a8c949ec6..055945e912 100644 --- a/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/index_endpoint_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/index_service/async_client.py b/google/cloud/aiplatform_v1/services/index_service/async_client.py index 1a6aff220c..16ac0bd21f 100644 --- a/google/cloud/aiplatform_v1/services/index_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/index_service/async_client.py @@ -287,21 +287,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.IndexServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.IndexService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.IndexService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.IndexService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.IndexService", + "credentialsType": None, + } + ), ) async def create_index( diff --git a/google/cloud/aiplatform_v1/services/index_service/client.py b/google/cloud/aiplatform_v1/services/index_service/client.py index d0183ab8a1..773814a3d5 100644 --- a/google/cloud/aiplatform_v1/services/index_service/client.py +++ b/google/cloud/aiplatform_v1/services/index_service/client.py @@ -765,21 +765,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.IndexServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.IndexService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.IndexService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.IndexService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.IndexService", + "credentialsType": None, + } + ), ) def create_index( diff --git a/google/cloud/aiplatform_v1/services/index_service/transports/base.py b/google/cloud/aiplatform_v1/services/index_service/transports/base.py index e41c1c1be7..8009b13149 100644 --- a/google/cloud/aiplatform_v1/services/index_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/index_service/transports/base.py @@ -335,13 +335,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/index_service/transports/rest.py b/google/cloud/aiplatform_v1/services/index_service/transports/rest.py index a66bfce697..6312672769 100644 --- a/google/cloud/aiplatform_v1/services/index_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/index_service/transports/rest.py @@ -3960,7 +3960,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4106,7 +4105,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4248,7 +4246,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4393,7 +4390,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4544,7 +4540,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4688,7 +4683,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4804,7 +4798,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4920,7 +4913,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5064,7 +5056,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5206,7 +5197,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/index_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/index_service/transports/rest_asyncio.py index 2d2bc696d4..79eff3f107 100644 --- a/google/cloud/aiplatform_v1/services/index_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/index_service/transports/rest_asyncio.py @@ -808,9 +808,9 @@ def __init__( self._interceptor = interceptor or AsyncIndexServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4100,7 +4100,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4254,7 +4253,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4406,7 +4404,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4559,7 +4556,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4719,7 +4715,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4871,7 +4866,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4995,7 +4989,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5119,7 +5112,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5271,7 +5263,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5423,7 +5414,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/job_service/async_client.py b/google/cloud/aiplatform_v1/services/job_service/async_client.py index e3e7c1ad41..5b7e22bcef 100644 --- a/google/cloud/aiplatform_v1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/job_service/async_client.py @@ -56,7 +56,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job @@ -360,21 +362,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.JobServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.JobService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.JobService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.JobService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.JobService", + "credentialsType": None, + } + ), ) async def create_custom_job( diff --git a/google/cloud/aiplatform_v1/services/job_service/client.py b/google/cloud/aiplatform_v1/services/job_service/client.py index 16de2f506c..c8c4a39ef0 100644 --- a/google/cloud/aiplatform_v1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1/services/job_service/client.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job @@ -1145,21 +1147,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.JobServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.JobService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.JobService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.JobService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.JobService", + "credentialsType": None, + } + ), ) def create_custom_job( diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1/services/job_service/transports/base.py index fd2cf9cf9d..767b9da265 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/base.py @@ -35,7 +35,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -788,13 +790,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/job_service/transports/grpc.py index 8ed92c3857..9e1fea3932 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/grpc.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -668,12 +670,12 @@ def create_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "create_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateHyperparameterTuningJob", - request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, - response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["create_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateHyperparameterTuningJob", + request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, + response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["create_hyperparameter_tuning_job"] @@ -699,12 +701,12 @@ def get_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "get_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/GetHyperparameterTuningJob", - request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, - response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["get_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/GetHyperparameterTuningJob", + request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, + response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["get_hyperparameter_tuning_job"] @@ -731,12 +733,12 @@ def list_hyperparameter_tuning_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_hyperparameter_tuning_jobs" not in self._stubs: - self._stubs[ - "list_hyperparameter_tuning_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListHyperparameterTuningJobs", - request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, - response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + self._stubs["list_hyperparameter_tuning_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListHyperparameterTuningJobs", + request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, + response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + ) ) return self._stubs["list_hyperparameter_tuning_jobs"] @@ -762,12 +764,12 @@ def delete_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "delete_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteHyperparameterTuningJob", - request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteHyperparameterTuningJob", + request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_hyperparameter_tuning_job"] @@ -804,12 +806,12 @@ def cancel_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "cancel_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CancelHyperparameterTuningJob", - request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CancelHyperparameterTuningJob", + request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_hyperparameter_tuning_job"] @@ -1031,12 +1033,12 @@ def create_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_batch_prediction_job" not in self._stubs: - self._stubs[ - "create_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateBatchPredictionJob", - request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, - response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + self._stubs["create_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateBatchPredictionJob", + request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, + response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + ) ) return self._stubs["create_batch_prediction_job"] @@ -1091,12 +1093,12 @@ def list_batch_prediction_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_batch_prediction_jobs" not in self._stubs: - self._stubs[ - "list_batch_prediction_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListBatchPredictionJobs", - request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, - response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + self._stubs["list_batch_prediction_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListBatchPredictionJobs", + request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, + response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + ) ) return self._stubs["list_batch_prediction_jobs"] @@ -1122,12 +1124,12 @@ def delete_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_batch_prediction_job" not in self._stubs: - self._stubs[ - "delete_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteBatchPredictionJob", - request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteBatchPredictionJob", + request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_batch_prediction_job"] @@ -1161,12 +1163,12 @@ def cancel_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_batch_prediction_job" not in self._stubs: - self._stubs[ - "cancel_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CancelBatchPredictionJob", - request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CancelBatchPredictionJob", + request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_batch_prediction_job"] @@ -1194,12 +1196,12 @@ def create_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateModelDeploymentMonitoringJob", - request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["create_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateModelDeploymentMonitoringJob", + request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["create_model_deployment_monitoring_job"] @@ -1227,12 +1229,12 @@ def search_model_deployment_monitoring_stats_anomalies( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_deployment_monitoring_stats_anomalies" not in self._stubs: - self._stubs[ - "search_model_deployment_monitoring_stats_anomalies" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", - request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, - response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + self._stubs["search_model_deployment_monitoring_stats_anomalies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", + request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, + response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + ) ) return self._stubs["search_model_deployment_monitoring_stats_anomalies"] @@ -1259,12 +1261,12 @@ def get_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "get_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/GetModelDeploymentMonitoringJob", - request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["get_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/GetModelDeploymentMonitoringJob", + request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["get_model_deployment_monitoring_job"] @@ -1291,12 +1293,12 @@ def list_model_deployment_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_deployment_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_deployment_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListModelDeploymentMonitoringJobs", - request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, - response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + self._stubs["list_model_deployment_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListModelDeploymentMonitoringJobs", + request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, + response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_deployment_monitoring_jobs"] @@ -1323,12 +1325,12 @@ def update_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "update_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/UpdateModelDeploymentMonitoringJob", - request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/UpdateModelDeploymentMonitoringJob", + request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_model_deployment_monitoring_job"] @@ -1355,12 +1357,12 @@ def delete_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteModelDeploymentMonitoringJob", - request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteModelDeploymentMonitoringJob", + request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_deployment_monitoring_job"] @@ -1389,12 +1391,12 @@ def pause_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pause_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "pause_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/PauseModelDeploymentMonitoringJob", - request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["pause_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/PauseModelDeploymentMonitoringJob", + request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["pause_model_deployment_monitoring_job"] @@ -1422,12 +1424,12 @@ def resume_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "resume_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "resume_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ResumeModelDeploymentMonitoringJob", - request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["resume_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ResumeModelDeploymentMonitoringJob", + request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["resume_model_deployment_monitoring_job"] diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/job_service/transports/grpc_asyncio.py index 9dd833209e..55b67f9597 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/grpc_asyncio.py @@ -41,7 +41,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -686,12 +688,12 @@ def create_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "create_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateHyperparameterTuningJob", - request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, - response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["create_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateHyperparameterTuningJob", + request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, + response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["create_hyperparameter_tuning_job"] @@ -717,12 +719,12 @@ def get_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "get_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/GetHyperparameterTuningJob", - request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, - response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["get_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/GetHyperparameterTuningJob", + request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, + response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["get_hyperparameter_tuning_job"] @@ -749,12 +751,12 @@ def list_hyperparameter_tuning_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_hyperparameter_tuning_jobs" not in self._stubs: - self._stubs[ - "list_hyperparameter_tuning_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListHyperparameterTuningJobs", - request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, - response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + self._stubs["list_hyperparameter_tuning_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListHyperparameterTuningJobs", + request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, + response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + ) ) return self._stubs["list_hyperparameter_tuning_jobs"] @@ -781,12 +783,12 @@ def delete_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "delete_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteHyperparameterTuningJob", - request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteHyperparameterTuningJob", + request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_hyperparameter_tuning_job"] @@ -825,12 +827,12 @@ def cancel_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "cancel_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CancelHyperparameterTuningJob", - request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CancelHyperparameterTuningJob", + request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_hyperparameter_tuning_job"] @@ -1060,12 +1062,12 @@ def create_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_batch_prediction_job" not in self._stubs: - self._stubs[ - "create_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateBatchPredictionJob", - request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, - response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + self._stubs["create_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateBatchPredictionJob", + request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, + response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + ) ) return self._stubs["create_batch_prediction_job"] @@ -1120,12 +1122,12 @@ def list_batch_prediction_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_batch_prediction_jobs" not in self._stubs: - self._stubs[ - "list_batch_prediction_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListBatchPredictionJobs", - request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, - response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + self._stubs["list_batch_prediction_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListBatchPredictionJobs", + request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, + response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + ) ) return self._stubs["list_batch_prediction_jobs"] @@ -1152,12 +1154,12 @@ def delete_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_batch_prediction_job" not in self._stubs: - self._stubs[ - "delete_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteBatchPredictionJob", - request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteBatchPredictionJob", + request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_batch_prediction_job"] @@ -1193,12 +1195,12 @@ def cancel_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_batch_prediction_job" not in self._stubs: - self._stubs[ - "cancel_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CancelBatchPredictionJob", - request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CancelBatchPredictionJob", + request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_batch_prediction_job"] @@ -1226,12 +1228,12 @@ def create_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/CreateModelDeploymentMonitoringJob", - request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["create_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/CreateModelDeploymentMonitoringJob", + request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["create_model_deployment_monitoring_job"] @@ -1259,12 +1261,12 @@ def search_model_deployment_monitoring_stats_anomalies( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_deployment_monitoring_stats_anomalies" not in self._stubs: - self._stubs[ - "search_model_deployment_monitoring_stats_anomalies" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", - request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, - response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + self._stubs["search_model_deployment_monitoring_stats_anomalies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", + request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, + response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + ) ) return self._stubs["search_model_deployment_monitoring_stats_anomalies"] @@ -1291,12 +1293,12 @@ def get_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "get_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/GetModelDeploymentMonitoringJob", - request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["get_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/GetModelDeploymentMonitoringJob", + request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["get_model_deployment_monitoring_job"] @@ -1323,12 +1325,12 @@ def list_model_deployment_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_deployment_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_deployment_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ListModelDeploymentMonitoringJobs", - request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, - response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + self._stubs["list_model_deployment_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ListModelDeploymentMonitoringJobs", + request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, + response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_deployment_monitoring_jobs"] @@ -1355,12 +1357,12 @@ def update_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "update_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/UpdateModelDeploymentMonitoringJob", - request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/UpdateModelDeploymentMonitoringJob", + request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_model_deployment_monitoring_job"] @@ -1387,12 +1389,12 @@ def delete_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/DeleteModelDeploymentMonitoringJob", - request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/DeleteModelDeploymentMonitoringJob", + request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_deployment_monitoring_job"] @@ -1422,12 +1424,12 @@ def pause_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pause_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "pause_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/PauseModelDeploymentMonitoringJob", - request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["pause_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/PauseModelDeploymentMonitoringJob", + request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["pause_model_deployment_monitoring_job"] @@ -1456,12 +1458,12 @@ def resume_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "resume_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "resume_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.JobService/ResumeModelDeploymentMonitoringJob", - request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["resume_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.JobService/ResumeModelDeploymentMonitoringJob", + request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["resume_model_deployment_monitoring_job"] diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/rest.py b/google/cloud/aiplatform_v1/services/job_service/transports/rest.py index 64c6f77f41..3b2bd4a76c 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/rest.py @@ -44,7 +44,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -9771,7 +9773,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9917,7 +9918,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10063,7 +10063,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10210,7 +10209,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10363,7 +10361,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10507,7 +10504,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10623,7 +10619,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10739,7 +10734,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10885,7 +10879,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11029,7 +11022,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/job_service/transports/rest_asyncio.py index 04cda12a57..297fc26c4e 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/rest_asyncio.py @@ -57,7 +57,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -2185,9 +2187,9 @@ def __init__( self._interceptor = interceptor or AsyncJobServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -10246,7 +10248,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10400,7 +10401,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10554,7 +10554,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10709,7 +10708,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10870,7 +10868,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -11022,7 +11019,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -11146,7 +11142,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -11270,7 +11265,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11424,7 +11418,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11576,7 +11569,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/job_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/job_service/transports/rest_base.py index fe67301f48..a83ac9c96c 100644 --- a/google/cloud/aiplatform_v1/services/job_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/job_service/transports/rest_base.py @@ -34,7 +34,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, diff --git a/google/cloud/aiplatform_v1/services/llm_utility_service/async_client.py b/google/cloud/aiplatform_v1/services/llm_utility_service/async_client.py index 734041a3e0..73fe6e03eb 100644 --- a/google/cloud/aiplatform_v1/services/llm_utility_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/llm_utility_service/async_client.py @@ -282,21 +282,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.LlmUtilityServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", + "credentialsType": None, + } + ), ) async def count_tokens( diff --git a/google/cloud/aiplatform_v1/services/llm_utility_service/client.py b/google/cloud/aiplatform_v1/services/llm_utility_service/client.py index 51c21c7101..86ad43747b 100644 --- a/google/cloud/aiplatform_v1/services/llm_utility_service/client.py +++ b/google/cloud/aiplatform_v1/services/llm_utility_service/client.py @@ -764,21 +764,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.LlmUtilityServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.LlmUtilityService", + "credentialsType": None, + } + ), ) def count_tokens( diff --git a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/base.py b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/base.py index 2089960a7e..9857c46f81 100644 --- a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/base.py @@ -257,13 +257,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest.py b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest.py index e18808fb1e..63641e656c 100644 --- a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest.py @@ -909,7 +909,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1052,7 +1051,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1194,7 +1192,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1337,7 +1334,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1485,7 +1481,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1630,7 +1625,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1747,7 +1741,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1863,7 +1856,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2006,7 +1998,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2149,7 +2140,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest_asyncio.py index a1a2703f19..6525fe12af 100644 --- a/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/llm_utility_service/transports/rest_asyncio.py @@ -992,7 +992,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1145,7 +1144,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1298,7 +1296,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1452,7 +1449,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1610,7 +1606,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1761,7 +1756,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1884,7 +1878,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2007,7 +2000,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2160,7 +2152,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2313,7 +2304,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/match_service/async_client.py b/google/cloud/aiplatform_v1/services/match_service/async_client.py index eacda917a3..090175ebc5 100644 --- a/google/cloud/aiplatform_v1/services/match_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/match_service/async_client.py @@ -274,21 +274,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MatchServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MatchService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MatchService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MatchService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MatchService", + "credentialsType": None, + } + ), ) async def find_neighbors( diff --git a/google/cloud/aiplatform_v1/services/match_service/client.py b/google/cloud/aiplatform_v1/services/match_service/client.py index 34c5e18081..ed45e283d5 100644 --- a/google/cloud/aiplatform_v1/services/match_service/client.py +++ b/google/cloud/aiplatform_v1/services/match_service/client.py @@ -732,21 +732,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MatchServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MatchService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MatchService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MatchService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MatchService", + "credentialsType": None, + } + ), ) def find_neighbors( diff --git a/google/cloud/aiplatform_v1/services/match_service/transports/base.py b/google/cloud/aiplatform_v1/services/match_service/transports/base.py index 02fd1e08fc..f0e819520c 100644 --- a/google/cloud/aiplatform_v1/services/match_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/match_service/transports/base.py @@ -256,13 +256,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/match_service/transports/rest.py b/google/cloud/aiplatform_v1/services/match_service/transports/rest.py index d60fdfbdc1..3efde86168 100644 --- a/google/cloud/aiplatform_v1/services/match_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/match_service/transports/rest.py @@ -913,7 +913,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1059,7 +1058,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1201,7 +1199,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1346,7 +1343,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1497,7 +1493,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1641,7 +1636,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1757,7 +1751,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1873,7 +1866,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2017,7 +2009,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2159,7 +2150,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/match_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/match_service/transports/rest_asyncio.py index 55c5143013..f1612a6a0a 100644 --- a/google/cloud/aiplatform_v1/services/match_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/match_service/transports/rest_asyncio.py @@ -993,7 +993,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1147,7 +1146,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1299,7 +1297,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1452,7 +1449,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1612,7 +1608,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1764,7 +1759,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1888,7 +1882,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2012,7 +2005,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2164,7 +2156,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2316,7 +2307,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py index 8d1389b1eb..5993b4a989 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py @@ -58,10 +58,14 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store -from google.cloud.aiplatform_v1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -308,21 +312,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MetadataServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MetadataService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MetadataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MetadataService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MetadataService", + "credentialsType": None, + } + ), ) async def create_metadata_store( diff --git a/google/cloud/aiplatform_v1/services/metadata_service/client.py b/google/cloud/aiplatform_v1/services/metadata_service/client.py index a5e0bf3d0e..28b77fe7df 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/client.py @@ -74,10 +74,14 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store -from google.cloud.aiplatform_v1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -853,21 +857,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MetadataServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MetadataService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MetadataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MetadataService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MetadataService", + "credentialsType": None, + } + ), ) def create_metadata_store( diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py index ab59fa7212..885c9b36ae 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py @@ -36,7 +36,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -727,13 +729,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py index b466a808ef..897765ec9b 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py @@ -39,7 +39,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -808,12 +810,12 @@ def add_context_artifacts_and_executions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "add_context_artifacts_and_executions" not in self._stubs: - self._stubs[ - "add_context_artifacts_and_executions" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/AddContextArtifactsAndExecutions", - request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, - response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + self._stubs["add_context_artifacts_and_executions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/AddContextArtifactsAndExecutions", + request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, + response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + ) ) return self._stubs["add_context_artifacts_and_executions"] @@ -905,12 +907,12 @@ def query_context_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_context_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_context_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryContextLineageSubgraph", - request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_context_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryContextLineageSubgraph", + request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_context_lineage_subgraph"] @@ -1131,12 +1133,12 @@ def query_execution_inputs_and_outputs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_execution_inputs_and_outputs" not in self._stubs: - self._stubs[ - "query_execution_inputs_and_outputs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryExecutionInputsAndOutputs", - request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_execution_inputs_and_outputs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryExecutionInputsAndOutputs", + request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_execution_inputs_and_outputs"] @@ -1251,12 +1253,12 @@ def query_artifact_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_artifact_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_artifact_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryArtifactLineageSubgraph", - request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_artifact_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryArtifactLineageSubgraph", + request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_artifact_lineage_subgraph"] diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py index 7ab1b9bafc..9929403b3a 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py @@ -42,7 +42,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -837,12 +839,12 @@ def add_context_artifacts_and_executions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "add_context_artifacts_and_executions" not in self._stubs: - self._stubs[ - "add_context_artifacts_and_executions" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/AddContextArtifactsAndExecutions", - request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, - response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + self._stubs["add_context_artifacts_and_executions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/AddContextArtifactsAndExecutions", + request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, + response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + ) ) return self._stubs["add_context_artifacts_and_executions"] @@ -934,12 +936,12 @@ def query_context_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_context_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_context_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryContextLineageSubgraph", - request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_context_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryContextLineageSubgraph", + request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_context_lineage_subgraph"] @@ -1170,12 +1172,12 @@ def query_execution_inputs_and_outputs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_execution_inputs_and_outputs" not in self._stubs: - self._stubs[ - "query_execution_inputs_and_outputs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryExecutionInputsAndOutputs", - request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_execution_inputs_and_outputs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryExecutionInputsAndOutputs", + request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_execution_inputs_and_outputs"] @@ -1291,12 +1293,12 @@ def query_artifact_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_artifact_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_artifact_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MetadataService/QueryArtifactLineageSubgraph", - request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_artifact_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/QueryArtifactLineageSubgraph", + request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_artifact_lineage_subgraph"] diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest.py index ff70c8bba1..9b4fff920b 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest.py @@ -45,7 +45,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore @@ -9455,7 +9457,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9597,7 +9598,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -9739,7 +9739,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -9882,7 +9881,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10030,7 +10028,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10174,7 +10171,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10290,7 +10286,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10406,7 +10401,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10548,7 +10542,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -10690,7 +10683,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_asyncio.py index 1016160658..dab211831b 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_asyncio.py @@ -58,7 +58,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore @@ -2258,9 +2260,9 @@ def __init__( self._interceptor = interceptor or AsyncMetadataServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -9918,7 +9920,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10071,7 +10072,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10224,7 +10224,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10378,7 +10377,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10536,7 +10534,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10687,7 +10684,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10812,7 +10808,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10937,7 +10932,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11090,7 +11084,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11243,7 +11236,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_base.py index c796c585c5..58a0d06380 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/rest_base.py @@ -35,7 +35,9 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/migration_service/async_client.py b/google/cloud/aiplatform_v1/services/migration_service/async_client.py index bff8c95594..c65632e548 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/migration_service/async_client.py @@ -295,21 +295,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MigrationServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MigrationService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MigrationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MigrationService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MigrationService", + "credentialsType": None, + } + ), ) async def search_migratable_resources( diff --git a/google/cloud/aiplatform_v1/services/migration_service/client.py b/google/cloud/aiplatform_v1/services/migration_service/client.py index bdf0d9ec72..1fe8e002d9 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1/services/migration_service/client.py @@ -870,21 +870,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.MigrationServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.MigrationService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.MigrationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.MigrationService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.MigrationService", + "credentialsType": None, + } + ), ) def search_migratable_resources( diff --git a/google/cloud/aiplatform_v1/services/migration_service/transports/base.py b/google/cloud/aiplatform_v1/services/migration_service/transports/base.py index f6f6280c2c..c11f71c336 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/migration_service/transports/base.py @@ -259,13 +259,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/migration_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/migration_service/transports/grpc.py index ed18837e31..a6a687f809 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/migration_service/transports/grpc.py @@ -368,12 +368,12 @@ def search_migratable_resources( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_migratable_resources" not in self._stubs: - self._stubs[ - "search_migratable_resources" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MigrationService/SearchMigratableResources", - request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, - response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + self._stubs["search_migratable_resources"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MigrationService/SearchMigratableResources", + request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, + response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + ) ) return self._stubs["search_migratable_resources"] diff --git a/google/cloud/aiplatform_v1/services/migration_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/migration_service/transports/grpc_asyncio.py index 2f8e0378f5..e215164984 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/migration_service/transports/grpc_asyncio.py @@ -376,12 +376,12 @@ def search_migratable_resources( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_migratable_resources" not in self._stubs: - self._stubs[ - "search_migratable_resources" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.MigrationService/SearchMigratableResources", - request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, - response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + self._stubs["search_migratable_resources"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.MigrationService/SearchMigratableResources", + request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, + response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + ) ) return self._stubs["search_migratable_resources"] diff --git a/google/cloud/aiplatform_v1/services/migration_service/transports/rest.py b/google/cloud/aiplatform_v1/services/migration_service/transports/rest.py index 86e3b7c09f..e2f45c33eb 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/migration_service/transports/rest.py @@ -2876,7 +2876,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3018,7 +3017,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3160,7 +3158,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3303,7 +3300,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3451,7 +3447,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3596,7 +3591,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3713,7 +3707,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3829,7 +3822,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -3971,7 +3963,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4113,7 +4104,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/migration_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/migration_service/transports/rest_asyncio.py index 106cea9274..8e75ad9035 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/migration_service/transports/rest_asyncio.py @@ -532,9 +532,9 @@ def __init__( self._interceptor = interceptor or AsyncMigrationServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -2956,7 +2956,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3109,7 +3108,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3262,7 +3260,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3416,7 +3413,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3574,7 +3570,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3725,7 +3720,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3850,7 +3844,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3975,7 +3968,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4128,7 +4120,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4281,7 +4272,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/model_garden_service/async_client.py b/google/cloud/aiplatform_v1/services/model_garden_service/async_client.py index eadac02971..bdee217361 100644 --- a/google/cloud/aiplatform_v1/services/model_garden_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/model_garden_service/async_client.py @@ -291,21 +291,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ModelGardenServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", + "credentialsType": None, + } + ), ) async def get_publisher_model( diff --git a/google/cloud/aiplatform_v1/services/model_garden_service/client.py b/google/cloud/aiplatform_v1/services/model_garden_service/client.py index 4f3374af15..40a77d4418 100644 --- a/google/cloud/aiplatform_v1/services/model_garden_service/client.py +++ b/google/cloud/aiplatform_v1/services/model_garden_service/client.py @@ -804,21 +804,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ModelGardenServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ModelGardenService", + "credentialsType": None, + } + ), ) def get_publisher_model( diff --git a/google/cloud/aiplatform_v1/services/model_garden_service/transports/base.py b/google/cloud/aiplatform_v1/services/model_garden_service/transports/base.py index c73a3524a8..9cbb7afcca 100644 --- a/google/cloud/aiplatform_v1/services/model_garden_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/model_garden_service/transports/base.py @@ -259,13 +259,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest.py b/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest.py index c79f847c50..6a75e97087 100644 --- a/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest.py @@ -2854,7 +2854,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -2997,7 +2996,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3140,7 +3138,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3284,7 +3281,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3432,7 +3428,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3579,7 +3574,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3696,7 +3690,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3813,7 +3806,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -3956,7 +3948,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4099,7 +4090,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest_asyncio.py index b4a256c15a..cf2802589e 100644 --- a/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/model_garden_service/transports/rest_asyncio.py @@ -528,9 +528,9 @@ def __init__( self._interceptor = interceptor or AsyncModelGardenServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -2932,7 +2932,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3085,7 +3084,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3238,7 +3236,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3392,7 +3389,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3550,7 +3546,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3701,7 +3696,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3824,7 +3818,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3947,7 +3940,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4100,7 +4092,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4251,7 +4242,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/model_service/async_client.py b/google/cloud/aiplatform_v1/services/model_service/async_client.py index d74f8b06f2..69d611ec0e 100644 --- a/google/cloud/aiplatform_v1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/model_service/async_client.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -304,21 +306,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ModelServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ModelService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ModelService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ModelService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ModelService", + "credentialsType": None, + } + ), ) async def upload_model( diff --git a/google/cloud/aiplatform_v1/services/model_service/client.py b/google/cloud/aiplatform_v1/services/model_service/client.py index e989800a46..8cd2a5ca5b 100644 --- a/google/cloud/aiplatform_v1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1/services/model_service/client.py @@ -71,7 +71,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -862,21 +864,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ModelServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ModelService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ModelService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ModelService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ModelService", + "credentialsType": None, + } + ), ) def upload_model( diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1/services/model_service/transports/base.py index 8a86ad11ef..77fd300f0a 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/base.py @@ -31,7 +31,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.location import locations_pb2 # type: ignore @@ -528,13 +530,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/model_service/transports/grpc.py index ad32b09356..6d64cfa1bc 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/grpc.py @@ -34,7 +34,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.location import locations_pb2 # type: ignore @@ -474,12 +476,12 @@ def list_model_version_checkpoints( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_version_checkpoints" not in self._stubs: - self._stubs[ - "list_model_version_checkpoints" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/ListModelVersionCheckpoints", - request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, - response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + self._stubs["list_model_version_checkpoints"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/ListModelVersionCheckpoints", + request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, + response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + ) ) return self._stubs["list_model_version_checkpoints"] @@ -531,12 +533,12 @@ def update_explanation_dataset( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_explanation_dataset" not in self._stubs: - self._stubs[ - "update_explanation_dataset" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/UpdateExplanationDataset", - request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_explanation_dataset"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/UpdateExplanationDataset", + request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_explanation_dataset"] @@ -745,12 +747,12 @@ def batch_import_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_model_evaluation_slices" not in self._stubs: - self._stubs[ - "batch_import_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/BatchImportModelEvaluationSlices", - request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + self._stubs["batch_import_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/BatchImportModelEvaluationSlices", + request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["batch_import_model_evaluation_slices"] @@ -778,12 +780,12 @@ def batch_import_evaluated_annotations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_evaluated_annotations" not in self._stubs: - self._stubs[ - "batch_import_evaluated_annotations" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/BatchImportEvaluatedAnnotations", - request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, - response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + self._stubs["batch_import_evaluated_annotations"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/BatchImportEvaluatedAnnotations", + request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, + response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + ) ) return self._stubs["batch_import_evaluated_annotations"] @@ -866,12 +868,12 @@ def get_model_evaluation_slice( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_evaluation_slice" not in self._stubs: - self._stubs[ - "get_model_evaluation_slice" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/GetModelEvaluationSlice", - request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, - response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + self._stubs["get_model_evaluation_slice"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/GetModelEvaluationSlice", + request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, + response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + ) ) return self._stubs["get_model_evaluation_slice"] @@ -897,12 +899,12 @@ def list_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_evaluation_slices" not in self._stubs: - self._stubs[ - "list_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/ListModelEvaluationSlices", - request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + self._stubs["list_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/ListModelEvaluationSlices", + request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["list_model_evaluation_slices"] diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/model_service/transports/grpc_asyncio.py index f70e23bb16..b9415e9bff 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/grpc_asyncio.py @@ -37,7 +37,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.location import locations_pb2 # type: ignore @@ -488,12 +490,12 @@ def list_model_version_checkpoints( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_version_checkpoints" not in self._stubs: - self._stubs[ - "list_model_version_checkpoints" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/ListModelVersionCheckpoints", - request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, - response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + self._stubs["list_model_version_checkpoints"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/ListModelVersionCheckpoints", + request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, + response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + ) ) return self._stubs["list_model_version_checkpoints"] @@ -546,12 +548,12 @@ def update_explanation_dataset( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_explanation_dataset" not in self._stubs: - self._stubs[ - "update_explanation_dataset" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/UpdateExplanationDataset", - request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_explanation_dataset"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/UpdateExplanationDataset", + request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_explanation_dataset"] @@ -768,12 +770,12 @@ def batch_import_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_model_evaluation_slices" not in self._stubs: - self._stubs[ - "batch_import_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/BatchImportModelEvaluationSlices", - request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + self._stubs["batch_import_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/BatchImportModelEvaluationSlices", + request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["batch_import_model_evaluation_slices"] @@ -801,12 +803,12 @@ def batch_import_evaluated_annotations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_evaluated_annotations" not in self._stubs: - self._stubs[ - "batch_import_evaluated_annotations" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/BatchImportEvaluatedAnnotations", - request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, - response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + self._stubs["batch_import_evaluated_annotations"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/BatchImportEvaluatedAnnotations", + request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, + response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + ) ) return self._stubs["batch_import_evaluated_annotations"] @@ -890,12 +892,12 @@ def get_model_evaluation_slice( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_evaluation_slice" not in self._stubs: - self._stubs[ - "get_model_evaluation_slice" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/GetModelEvaluationSlice", - request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, - response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + self._stubs["get_model_evaluation_slice"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/GetModelEvaluationSlice", + request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, + response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + ) ) return self._stubs["get_model_evaluation_slice"] @@ -921,12 +923,12 @@ def list_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_evaluation_slices" not in self._stubs: - self._stubs[ - "list_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.ModelService/ListModelEvaluationSlices", - request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + self._stubs["list_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.ModelService/ListModelEvaluationSlices", + request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["list_model_evaluation_slices"] diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/rest.py b/google/cloud/aiplatform_v1/services/model_service/transports/rest.py index 617a4ea09f..839dd31ac3 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/rest.py @@ -40,7 +40,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.longrunning import operations_pb2 # type: ignore @@ -6660,7 +6662,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6806,7 +6807,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6948,7 +6948,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7093,7 +7092,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7244,7 +7242,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7388,7 +7385,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7504,7 +7500,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7620,7 +7615,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7764,7 +7758,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7906,7 +7899,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/model_service/transports/rest_asyncio.py index 2549d7f1e1..31cec31efc 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/rest_asyncio.py @@ -53,7 +53,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.longrunning import operations_pb2 # type: ignore @@ -1504,9 +1506,9 @@ def __init__( self._interceptor = interceptor or AsyncModelServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -6945,7 +6947,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7099,7 +7100,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7251,7 +7251,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7404,7 +7403,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7564,7 +7562,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7716,7 +7713,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7840,7 +7836,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7964,7 +7959,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8116,7 +8110,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8268,7 +8261,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/model_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/model_service/transports/rest_base.py index e6afd8dc1a..ad72576deb 100644 --- a/google/cloud/aiplatform_v1/services/model_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/model_service/transports/rest_base.py @@ -30,7 +30,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/notebook_service/async_client.py b/google/cloud/aiplatform_v1/services/notebook_service/async_client.py index 70e766c1fd..fd5bf24485 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/async_client.py @@ -59,7 +59,9 @@ ) from google.cloud.aiplatform_v1.types import notebook_idle_shutdown_config from google.cloud.aiplatform_v1.types import notebook_runtime -from google.cloud.aiplatform_v1.types import notebook_runtime as gca_notebook_runtime +from google.cloud.aiplatform_v1.types import ( + notebook_runtime as gca_notebook_runtime, +) from google.cloud.aiplatform_v1.types import notebook_runtime_template_ref from google.cloud.aiplatform_v1.types import notebook_service from google.cloud.aiplatform_v1.types import notebook_software_config @@ -322,21 +324,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.NotebookServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.NotebookService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.NotebookService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.NotebookService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.NotebookService", + "credentialsType": None, + } + ), ) async def create_notebook_runtime_template( diff --git a/google/cloud/aiplatform_v1/services/notebook_service/client.py b/google/cloud/aiplatform_v1/services/notebook_service/client.py index 58fa5b5dec..873b6d193a 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/client.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/client.py @@ -75,7 +75,9 @@ ) from google.cloud.aiplatform_v1.types import notebook_idle_shutdown_config from google.cloud.aiplatform_v1.types import notebook_runtime -from google.cloud.aiplatform_v1.types import notebook_runtime as gca_notebook_runtime +from google.cloud.aiplatform_v1.types import ( + notebook_runtime as gca_notebook_runtime, +) from google.cloud.aiplatform_v1.types import notebook_runtime_template_ref from google.cloud.aiplatform_v1.types import notebook_service from google.cloud.aiplatform_v1.types import notebook_software_config @@ -890,21 +892,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.NotebookServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.NotebookService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.NotebookService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.NotebookService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.NotebookService", + "credentialsType": None, + } + ), ) def create_notebook_runtime_template( diff --git a/google/cloud/aiplatform_v1/services/notebook_service/transports/base.py b/google/cloud/aiplatform_v1/services/notebook_service/transports/base.py index 50cef4f5c2..b3f870d0b2 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/transports/base.py @@ -475,13 +475,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc.py index e12b9ce58a..ddae9890f5 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc.py @@ -368,12 +368,12 @@ def create_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_runtime_template" not in self._stubs: - self._stubs[ - "create_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookRuntimeTemplate", - request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookRuntimeTemplate", + request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_runtime_template"] @@ -399,12 +399,12 @@ def get_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_runtime_template" not in self._stubs: - self._stubs[ - "get_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/GetNotebookRuntimeTemplate", - request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["get_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/GetNotebookRuntimeTemplate", + request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["get_notebook_runtime_template"] @@ -431,12 +431,12 @@ def list_notebook_runtime_templates( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_runtime_templates" not in self._stubs: - self._stubs[ - "list_notebook_runtime_templates" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/ListNotebookRuntimeTemplates", - request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, - response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + self._stubs["list_notebook_runtime_templates"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/ListNotebookRuntimeTemplates", + request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, + response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + ) ) return self._stubs["list_notebook_runtime_templates"] @@ -463,12 +463,12 @@ def delete_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_runtime_template" not in self._stubs: - self._stubs[ - "delete_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookRuntimeTemplate", - request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookRuntimeTemplate", + request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_runtime_template"] @@ -495,12 +495,12 @@ def update_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_notebook_runtime_template" not in self._stubs: - self._stubs[ - "update_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/UpdateNotebookRuntimeTemplate", - request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["update_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/UpdateNotebookRuntimeTemplate", + request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["update_notebook_runtime_template"] @@ -724,12 +724,12 @@ def create_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_execution_job" not in self._stubs: - self._stubs[ - "create_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookExecutionJob", - request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookExecutionJob", + request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_execution_job"] @@ -755,12 +755,12 @@ def get_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_execution_job" not in self._stubs: - self._stubs[ - "get_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/GetNotebookExecutionJob", - request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, - response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + self._stubs["get_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/GetNotebookExecutionJob", + request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, + response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + ) ) return self._stubs["get_notebook_execution_job"] @@ -786,12 +786,12 @@ def list_notebook_execution_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_execution_jobs" not in self._stubs: - self._stubs[ - "list_notebook_execution_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/ListNotebookExecutionJobs", - request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, - response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + self._stubs["list_notebook_execution_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/ListNotebookExecutionJobs", + request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, + response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + ) ) return self._stubs["list_notebook_execution_jobs"] @@ -816,12 +816,12 @@ def delete_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_execution_job" not in self._stubs: - self._stubs[ - "delete_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookExecutionJob", - request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookExecutionJob", + request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_execution_job"] diff --git a/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc_asyncio.py index 768ab1d057..9568b6a8b0 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/transports/grpc_asyncio.py @@ -376,12 +376,12 @@ def create_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_runtime_template" not in self._stubs: - self._stubs[ - "create_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookRuntimeTemplate", - request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookRuntimeTemplate", + request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_runtime_template"] @@ -407,12 +407,12 @@ def get_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_runtime_template" not in self._stubs: - self._stubs[ - "get_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/GetNotebookRuntimeTemplate", - request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["get_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/GetNotebookRuntimeTemplate", + request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["get_notebook_runtime_template"] @@ -439,12 +439,12 @@ def list_notebook_runtime_templates( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_runtime_templates" not in self._stubs: - self._stubs[ - "list_notebook_runtime_templates" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/ListNotebookRuntimeTemplates", - request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, - response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + self._stubs["list_notebook_runtime_templates"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/ListNotebookRuntimeTemplates", + request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, + response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + ) ) return self._stubs["list_notebook_runtime_templates"] @@ -471,12 +471,12 @@ def delete_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_runtime_template" not in self._stubs: - self._stubs[ - "delete_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookRuntimeTemplate", - request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookRuntimeTemplate", + request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_runtime_template"] @@ -503,12 +503,12 @@ def update_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_notebook_runtime_template" not in self._stubs: - self._stubs[ - "update_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/UpdateNotebookRuntimeTemplate", - request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["update_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/UpdateNotebookRuntimeTemplate", + request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["update_notebook_runtime_template"] @@ -739,12 +739,12 @@ def create_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_execution_job" not in self._stubs: - self._stubs[ - "create_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookExecutionJob", - request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/CreateNotebookExecutionJob", + request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_execution_job"] @@ -770,12 +770,12 @@ def get_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_execution_job" not in self._stubs: - self._stubs[ - "get_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/GetNotebookExecutionJob", - request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, - response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + self._stubs["get_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/GetNotebookExecutionJob", + request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, + response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + ) ) return self._stubs["get_notebook_execution_job"] @@ -801,12 +801,12 @@ def list_notebook_execution_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_execution_jobs" not in self._stubs: - self._stubs[ - "list_notebook_execution_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/ListNotebookExecutionJobs", - request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, - response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + self._stubs["list_notebook_execution_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/ListNotebookExecutionJobs", + request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, + response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + ) ) return self._stubs["list_notebook_execution_jobs"] @@ -832,12 +832,12 @@ def delete_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_execution_job" not in self._stubs: - self._stubs[ - "delete_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookExecutionJob", - request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.NotebookService/DeleteNotebookExecutionJob", + request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_execution_job"] diff --git a/google/cloud/aiplatform_v1/services/notebook_service/transports/rest.py b/google/cloud/aiplatform_v1/services/notebook_service/transports/rest.py index d98728c225..510172ece9 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/transports/rest.py @@ -6042,7 +6042,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6184,7 +6183,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6326,7 +6324,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6469,7 +6466,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6617,7 +6613,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6761,7 +6756,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6877,7 +6871,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6993,7 +6986,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7135,7 +7127,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7277,7 +7268,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/notebook_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/notebook_service/transports/rest_asyncio.py index 4e51bfd17c..1020e8ab06 100644 --- a/google/cloud/aiplatform_v1/services/notebook_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/notebook_service/transports/rest_asyncio.py @@ -1349,9 +1349,9 @@ def __init__( self._interceptor = interceptor or AsyncNotebookServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -6292,7 +6292,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6445,7 +6444,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6598,7 +6596,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6752,7 +6749,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6910,7 +6906,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7061,7 +7056,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7186,7 +7180,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7311,7 +7304,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7464,7 +7456,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7617,7 +7608,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/async_client.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/async_client.py index 48c97f5039..2e4f86bc01 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.persistent_resource_service import pagers +from google.cloud.aiplatform_v1.services.persistent_resource_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import persistent_resource @@ -319,21 +321,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PersistentResourceServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", + "credentialsType": None, + } + ), ) async def create_persistent_resource( diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/client.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/client.py index 773b3253eb..e39f369669 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/client.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.persistent_resource_service import pagers +from google.cloud.aiplatform_v1.services.persistent_resource_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import persistent_resource @@ -109,9 +111,9 @@ class PersistentResourceServiceClientMeta(type): _transport_registry["grpc_asyncio"] = PersistentResourceServiceGrpcAsyncIOTransport _transport_registry["rest"] = PersistentResourceServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncPersistentResourceServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncPersistentResourceServiceRestTransport + ) def get_transport_class( cls, @@ -824,21 +826,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PersistentResourceServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PersistentResourceService", + "credentialsType": None, + } + ), ) def create_persistent_resource( diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/base.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/base.py index 65fc7bef28..2f237c7465 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/base.py @@ -319,13 +319,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc.py index 6e2f7c825a..e1dc557c1f 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc.py @@ -366,12 +366,12 @@ def create_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_persistent_resource" not in self._stubs: - self._stubs[ - "create_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/CreatePersistentResource", - request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/CreatePersistentResource", + request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_persistent_resource"] @@ -455,12 +455,12 @@ def delete_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_persistent_resource" not in self._stubs: - self._stubs[ - "delete_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/DeletePersistentResource", - request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/DeletePersistentResource", + request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_persistent_resource"] @@ -486,12 +486,12 @@ def update_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_persistent_resource" not in self._stubs: - self._stubs[ - "update_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/UpdatePersistentResource", - request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/UpdatePersistentResource", + request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_persistent_resource"] @@ -517,12 +517,12 @@ def reboot_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reboot_persistent_resource" not in self._stubs: - self._stubs[ - "reboot_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/RebootPersistentResource", - request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["reboot_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/RebootPersistentResource", + request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["reboot_persistent_resource"] diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc_asyncio.py index 815e00a9b3..5985f2b85e 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/grpc_asyncio.py @@ -374,12 +374,12 @@ def create_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_persistent_resource" not in self._stubs: - self._stubs[ - "create_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/CreatePersistentResource", - request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/CreatePersistentResource", + request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_persistent_resource"] @@ -463,12 +463,12 @@ def delete_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_persistent_resource" not in self._stubs: - self._stubs[ - "delete_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/DeletePersistentResource", - request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/DeletePersistentResource", + request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_persistent_resource"] @@ -494,12 +494,12 @@ def update_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_persistent_resource" not in self._stubs: - self._stubs[ - "update_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/UpdatePersistentResource", - request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/UpdatePersistentResource", + request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_persistent_resource"] @@ -525,12 +525,12 @@ def reboot_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reboot_persistent_resource" not in self._stubs: - self._stubs[ - "reboot_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PersistentResourceService/RebootPersistentResource", - request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["reboot_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PersistentResourceService/RebootPersistentResource", + request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["reboot_persistent_resource"] diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest.py index 2e7ec1cb89..caf405b87b 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest.py @@ -3776,7 +3776,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3921,7 +3920,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4066,7 +4064,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4212,7 +4209,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4362,7 +4358,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4507,7 +4502,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4626,7 +4620,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4745,7 +4738,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4890,7 +4882,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5035,7 +5026,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest_asyncio.py index fd90354cf2..e303e2be91 100644 --- a/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/persistent_resource_service/transports/rest_asyncio.py @@ -767,9 +767,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3917,7 +3917,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4068,7 +4067,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4219,7 +4217,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4371,7 +4368,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4529,7 +4525,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4680,7 +4675,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4803,7 +4797,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4926,7 +4919,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5077,7 +5069,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5228,7 +5219,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py index 16052d5557..fd930acb7f 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/async_client.py @@ -57,7 +57,9 @@ from google.cloud.aiplatform_v1.types import pipeline_state from google.cloud.aiplatform_v1.types import service_networking from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -321,21 +323,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PipelineServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PipelineService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PipelineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PipelineService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PipelineService", + "credentialsType": None, + } + ), ) async def create_training_pipeline( diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1/services/pipeline_service/client.py index 25bd981250..a7525eb937 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/client.py @@ -73,7 +73,9 @@ from google.cloud.aiplatform_v1.types import pipeline_state from google.cloud.aiplatform_v1.types import service_networking from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -957,21 +959,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PipelineServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PipelineService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PipelineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PipelineService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PipelineService", + "credentialsType": None, + } + ), ) def create_training_pipeline( diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/base.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/base.py index 86e637105f..5c9e73e6e1 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/base.py @@ -32,7 +32,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -413,13 +415,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc.py index 38e5913b7d..9f13d254dc 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc.py @@ -35,7 +35,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -640,12 +642,12 @@ def batch_delete_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_delete_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_delete_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PipelineService/BatchDeletePipelineJobs", - request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_delete_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PipelineService/BatchDeletePipelineJobs", + request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_delete_pipeline_jobs"] @@ -716,12 +718,12 @@ def batch_cancel_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_cancel_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_cancel_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PipelineService/BatchCancelPipelineJobs", - request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_cancel_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PipelineService/BatchCancelPipelineJobs", + request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_cancel_pipeline_jobs"] diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc_asyncio.py index a4a357bf21..ac1b45abc5 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc_asyncio.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -655,12 +657,12 @@ def batch_delete_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_delete_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_delete_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PipelineService/BatchDeletePipelineJobs", - request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_delete_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PipelineService/BatchDeletePipelineJobs", + request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_delete_pipeline_jobs"] @@ -734,12 +736,12 @@ def batch_cancel_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_cancel_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_cancel_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.PipelineService/BatchCancelPipelineJobs", - request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_cancel_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.PipelineService/BatchCancelPipelineJobs", + request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_cancel_pipeline_jobs"] diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest.py index 0ed7f26c3a..dd8158adb1 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest.py @@ -41,7 +41,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -4950,7 +4952,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5092,7 +5093,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5234,7 +5234,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5377,7 +5376,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5525,7 +5523,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5669,7 +5666,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5785,7 +5781,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5901,7 +5896,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6043,7 +6037,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6185,7 +6178,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_asyncio.py index 0f08148505..cc86368505 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_asyncio.py @@ -54,7 +54,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -1039,9 +1041,9 @@ def __init__( self._interceptor = interceptor or AsyncPipelineServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5132,7 +5134,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5285,7 +5286,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5438,7 +5438,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5592,7 +5591,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5750,7 +5748,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5901,7 +5898,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6026,7 +6022,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6151,7 +6146,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6304,7 +6298,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6457,7 +6450,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_base.py index 5c9d04114e..a8fdc60846 100644 --- a/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/pipeline_service/transports/rest_base.py @@ -31,7 +31,9 @@ from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job from google.cloud.aiplatform_v1.types import pipeline_service from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/prediction_service/async_client.py b/google/cloud/aiplatform_v1/services/prediction_service/async_client.py index c073417a1a..297dcc3fa2 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/async_client.py @@ -297,21 +297,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PredictionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PredictionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PredictionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PredictionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PredictionService", + "credentialsType": None, + } + ), ) async def predict( diff --git a/google/cloud/aiplatform_v1/services/prediction_service/client.py b/google/cloud/aiplatform_v1/services/prediction_service/client.py index 79955703ee..22e1023edb 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/client.py @@ -836,21 +836,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.PredictionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.PredictionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.PredictionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.PredictionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.PredictionService", + "credentialsType": None, + } + ), ) def predict( diff --git a/google/cloud/aiplatform_v1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1/services/prediction_service/transports/base.py index 2d5f14ea91..2706c8ac1c 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/transports/base.py @@ -441,13 +441,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc.py index efd1cf155c..6ceddb1124 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc.py @@ -531,12 +531,12 @@ def stream_direct_raw_predict( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_direct_raw_predict" not in self._stubs: - self._stubs[ - "stream_direct_raw_predict" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1.PredictionService/StreamDirectRawPredict", - request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, - response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + self._stubs["stream_direct_raw_predict"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1.PredictionService/StreamDirectRawPredict", + request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, + response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + ) ) return self._stubs["stream_direct_raw_predict"] diff --git a/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc_asyncio.py index 767efdfdb6..7ff11e8f8b 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/transports/grpc_asyncio.py @@ -544,12 +544,12 @@ def stream_direct_raw_predict( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_direct_raw_predict" not in self._stubs: - self._stubs[ - "stream_direct_raw_predict" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1.PredictionService/StreamDirectRawPredict", - request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, - response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + self._stubs["stream_direct_raw_predict"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1.PredictionService/StreamDirectRawPredict", + request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, + response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + ) ) return self._stubs["stream_direct_raw_predict"] diff --git a/google/cloud/aiplatform_v1/services/prediction_service/transports/rest.py b/google/cloud/aiplatform_v1/services/prediction_service/transports/rest.py index a32a27c137..0a5588e7bf 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/transports/rest.py @@ -2655,7 +2655,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -2798,7 +2797,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -2940,7 +2938,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3083,7 +3080,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3231,7 +3227,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3376,7 +3371,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3493,7 +3487,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3609,7 +3602,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -3752,7 +3744,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -3895,7 +3886,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/prediction_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/prediction_service/transports/rest_asyncio.py index 273ad8c366..3429eeaaf6 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/transports/rest_asyncio.py @@ -2814,7 +2814,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -2967,7 +2966,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3120,7 +3118,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3274,7 +3271,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3432,7 +3428,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3583,7 +3578,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3706,7 +3700,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3829,7 +3822,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -3982,7 +3974,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4135,7 +4126,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/async_client.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/async_client.py index 3859aa5805..c729b13871 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/async_client.py @@ -48,7 +48,9 @@ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -297,21 +299,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ReasoningEngineExecutionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", + "credentialsType": None, + } + ), ) async def query_reasoning_engine( diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/client.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/client.py index 69bebac790..aef703bc91 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/client.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/client.py @@ -63,7 +63,9 @@ _LOGGER = std_logging.getLogger(__name__) from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -101,14 +103,14 @@ class ReasoningEngineExecutionServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[ReasoningEngineExecutionServiceTransport]] _transport_registry["grpc"] = ReasoningEngineExecutionServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = ReasoningEngineExecutionServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + ReasoningEngineExecutionServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = ReasoningEngineExecutionServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncReasoningEngineExecutionServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncReasoningEngineExecutionServiceRestTransport + ) def get_transport_class( cls, @@ -764,21 +766,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ReasoningEngineExecutionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineExecutionService", + "credentialsType": None, + } + ), ) def query_reasoning_engine( diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/__init__.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/__init__.py index 06c5aa0858..8d2c6f6d5a 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/__init__.py @@ -42,14 +42,14 @@ OrderedDict() ) # type: Dict[str, Type[ReasoningEngineExecutionServiceTransport]] _transport_registry["grpc"] = ReasoningEngineExecutionServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = ReasoningEngineExecutionServiceGrpcAsyncIOTransport +_transport_registry["grpc_asyncio"] = ( + ReasoningEngineExecutionServiceGrpcAsyncIOTransport +) _transport_registry["rest"] = ReasoningEngineExecutionServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncReasoningEngineExecutionServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncReasoningEngineExecutionServiceRestTransport + ) __all__ = ( "ReasoningEngineExecutionServiceTransport", diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/base.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/base.py index e607332203..578b21c99d 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/base.py @@ -28,7 +28,9 @@ import google.protobuf from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -254,13 +256,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc.py index ac6e581aa8..f0d02124a8 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc.py @@ -31,7 +31,9 @@ import proto # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -378,12 +380,12 @@ def stream_query_reasoning_engine( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_query_reasoning_engine" not in self._stubs: - self._stubs[ - "stream_query_reasoning_engine" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", - request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, - response_deserializer=httpbody_pb2.HttpBody.FromString, + self._stubs["stream_query_reasoning_engine"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", + request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) ) return self._stubs["stream_query_reasoning_engine"] diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py index 5a1026eef2..4c57fe4818 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py @@ -34,7 +34,9 @@ from grpc.experimental import aio # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -386,12 +388,12 @@ def stream_query_reasoning_engine( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_query_reasoning_engine" not in self._stubs: - self._stubs[ - "stream_query_reasoning_engine" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", - request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, - response_deserializer=httpbody_pb2.HttpBody.FromString, + self._stubs["stream_query_reasoning_engine"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", + request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) ) return self._stubs["stream_query_reasoning_engine"] diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest.py index d7fcf7d79b..21c05eaa2e 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest.py @@ -37,7 +37,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -956,7 +958,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1101,7 +1102,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1244,7 +1244,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1388,7 +1387,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1538,7 +1536,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1683,7 +1680,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1800,7 +1796,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1917,7 +1912,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2060,7 +2054,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2203,7 +2196,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_asyncio.py index 470ed2711b..7df31e2706 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_asyncio.py @@ -49,7 +49,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -1035,7 +1037,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1188,7 +1189,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1341,7 +1341,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1495,7 +1494,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1653,7 +1651,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1806,7 +1803,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1931,7 +1927,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2056,7 +2051,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2209,7 +2203,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2362,7 +2355,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_base.py index 135bdf8a2b..b543d26f2a 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_execution_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_service/async_client.py b/google/cloud/aiplatform_v1/services/reasoning_engine_service/async_client.py index 96316ee627..f507e5d3fc 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_service/async_client.py @@ -47,11 +47,15 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.reasoning_engine_service import pagers +from google.cloud.aiplatform_v1.services.reasoning_engine_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import reasoning_engine -from google.cloud.aiplatform_v1.types import reasoning_engine as gca_reasoning_engine +from google.cloud.aiplatform_v1.types import ( + reasoning_engine as gca_reasoning_engine, +) from google.cloud.aiplatform_v1.types import reasoning_engine_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -299,21 +303,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ReasoningEngineServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", + "credentialsType": None, + } + ), ) async def create_reasoning_engine( diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_service/client.py b/google/cloud/aiplatform_v1/services/reasoning_engine_service/client.py index cce8cec275..e07d4df54b 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_service/client.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_service/client.py @@ -63,11 +63,15 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.reasoning_engine_service import pagers +from google.cloud.aiplatform_v1.services.reasoning_engine_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import reasoning_engine -from google.cloud.aiplatform_v1.types import reasoning_engine as gca_reasoning_engine +from google.cloud.aiplatform_v1.types import ( + reasoning_engine as gca_reasoning_engine, +) from google.cloud.aiplatform_v1.types import reasoning_engine_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -773,21 +777,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ReasoningEngineServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ReasoningEngineService", + "credentialsType": None, + } + ), ) def create_reasoning_engine( diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/base.py b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/base.py index 1bc99cfc4e..b7963e2ecd 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/base.py @@ -305,13 +305,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest.py b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest.py index 0523c46d5e..e5c0fbf027 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest.py @@ -3538,7 +3538,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3681,7 +3680,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3824,7 +3822,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3968,7 +3965,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4116,7 +4112,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4263,7 +4258,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4382,7 +4376,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4501,7 +4494,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4644,7 +4636,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4789,7 +4780,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest_asyncio.py index e9f342560b..510da1382d 100644 --- a/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/reasoning_engine_service/transports/rest_asyncio.py @@ -707,9 +707,9 @@ def __init__( self._interceptor = interceptor or AsyncReasoningEngineServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3659,7 +3659,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3810,7 +3809,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3961,7 +3959,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4113,7 +4110,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4269,7 +4265,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4420,7 +4415,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4543,7 +4537,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4666,7 +4659,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4817,7 +4809,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4968,7 +4959,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/schedule_service/async_client.py b/google/cloud/aiplatform_v1/services/schedule_service/async_client.py index 37cd66ec2b..76326d2404 100644 --- a/google/cloud/aiplatform_v1/services/schedule_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/schedule_service/async_client.py @@ -325,21 +325,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ScheduleServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ScheduleService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ScheduleService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ScheduleService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ScheduleService", + "credentialsType": None, + } + ), ) async def create_schedule( diff --git a/google/cloud/aiplatform_v1/services/schedule_service/client.py b/google/cloud/aiplatform_v1/services/schedule_service/client.py index ebd753a5aa..f0d3d0a2f6 100644 --- a/google/cloud/aiplatform_v1/services/schedule_service/client.py +++ b/google/cloud/aiplatform_v1/services/schedule_service/client.py @@ -995,21 +995,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.ScheduleServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.ScheduleService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.ScheduleService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.ScheduleService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.ScheduleService", + "credentialsType": None, + } + ), ) def create_schedule( diff --git a/google/cloud/aiplatform_v1/services/schedule_service/transports/base.py b/google/cloud/aiplatform_v1/services/schedule_service/transports/base.py index d31b8d4da0..c1215a43bf 100644 --- a/google/cloud/aiplatform_v1/services/schedule_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/schedule_service/transports/base.py @@ -332,13 +332,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/schedule_service/transports/rest.py b/google/cloud/aiplatform_v1/services/schedule_service/transports/rest.py index 100ae3eb95..37a8b45bed 100644 --- a/google/cloud/aiplatform_v1/services/schedule_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/schedule_service/transports/rest.py @@ -3781,7 +3781,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3923,7 +3922,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4065,7 +4063,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4208,7 +4205,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4356,7 +4352,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4500,7 +4495,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4616,7 +4610,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4732,7 +4725,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4874,7 +4866,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5016,7 +5007,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/schedule_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/schedule_service/transports/rest_asyncio.py index fb09a7b684..81614a3fdb 100644 --- a/google/cloud/aiplatform_v1/services/schedule_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/schedule_service/transports/rest_asyncio.py @@ -735,9 +735,9 @@ def __init__( self._interceptor = interceptor or AsyncScheduleServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3937,7 +3937,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4090,7 +4089,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4243,7 +4241,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4397,7 +4394,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4555,7 +4551,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4706,7 +4701,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4831,7 +4825,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4956,7 +4949,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5109,7 +5101,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5262,7 +5253,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py index 5d91ace63c..13112bd91b 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/async_client.py @@ -47,10 +47,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.specialist_pool_service import pagers +from google.cloud.aiplatform_v1.services.specialist_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import specialist_pool -from google.cloud.aiplatform_v1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1.types import specialist_pool_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -297,21 +301,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.SpecialistPoolServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", + "credentialsType": None, + } + ), ) async def create_specialist_pool( diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py index 1a48bbd576..130848079e 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/client.py @@ -63,10 +63,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.specialist_pool_service import pagers +from google.cloud.aiplatform_v1.services.specialist_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import specialist_pool -from google.cloud.aiplatform_v1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1.types import specialist_pool_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -755,21 +759,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.SpecialistPoolServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.SpecialistPoolService", + "credentialsType": None, + } + ), ) def create_specialist_pool( diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/base.py index c9da6ae9e3..2db92c433d 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/base.py @@ -304,13 +304,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest.py index 1f47ba6678..3f2b86f9d3 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest.py @@ -3550,7 +3550,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3693,7 +3692,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3836,7 +3834,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3980,7 +3977,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4128,7 +4124,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4275,7 +4270,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4394,7 +4388,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4513,7 +4506,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4656,7 +4648,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4799,7 +4790,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest_asyncio.py index bec7e074c2..83ded27d45 100644 --- a/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/specialist_pool_service/transports/rest_asyncio.py @@ -708,9 +708,9 @@ def __init__( self._interceptor = interceptor or AsyncSpecialistPoolServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3648,7 +3648,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3799,7 +3798,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3950,7 +3948,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4102,7 +4099,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4258,7 +4254,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4409,7 +4404,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4532,7 +4526,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4655,7 +4648,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4806,7 +4798,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4957,7 +4948,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py b/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py index d836fed8f9..31df45ad1c 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/async_client.py @@ -60,7 +60,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -318,21 +320,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.TensorboardServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.TensorboardService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.TensorboardService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.TensorboardService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.TensorboardService", + "credentialsType": None, + } + ), ) async def create_tensorboard( diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/client.py b/google/cloud/aiplatform_v1/services/tensorboard_service/client.py index b1c6261086..4a906246cd 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/client.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/client.py @@ -75,7 +75,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -841,21 +843,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.TensorboardServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.TensorboardService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.TensorboardService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.TensorboardService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.TensorboardService", + "credentialsType": None, + } + ), ) def create_tensorboard( diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/base.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/base.py index 640d84ee0b..6824a099cf 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/base.py @@ -34,7 +34,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -730,13 +732,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc.py index 84564d8242..72fb9b300c 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc.py @@ -37,7 +37,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -574,12 +576,12 @@ def create_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_experiment" not in self._stubs: - self._stubs[ - "create_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardExperiment", - request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["create_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardExperiment", + request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["create_tensorboard_experiment"] @@ -605,12 +607,12 @@ def get_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_experiment" not in self._stubs: - self._stubs[ - "get_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardExperiment", - request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, - response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["get_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardExperiment", + request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, + response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["get_tensorboard_experiment"] @@ -636,12 +638,12 @@ def update_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_experiment" not in self._stubs: - self._stubs[ - "update_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardExperiment", - request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["update_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardExperiment", + request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["update_tensorboard_experiment"] @@ -667,12 +669,12 @@ def list_tensorboard_experiments( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_experiments" not in self._stubs: - self._stubs[ - "list_tensorboard_experiments" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardExperiments", - request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + self._stubs["list_tensorboard_experiments"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardExperiments", + request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + ) ) return self._stubs["list_tensorboard_experiments"] @@ -698,12 +700,12 @@ def delete_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_experiment" not in self._stubs: - self._stubs[ - "delete_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardExperiment", - request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardExperiment", + request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_experiment"] @@ -758,12 +760,12 @@ def batch_create_tensorboard_runs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_runs" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_runs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardRuns", - request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + self._stubs["batch_create_tensorboard_runs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardRuns", + request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_runs"] @@ -905,12 +907,12 @@ def batch_create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardTimeSeries", - request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + self._stubs["batch_create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardTimeSeries", + request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_time_series"] @@ -936,12 +938,12 @@ def create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardTimeSeries", - request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardTimeSeries", + request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["create_tensorboard_time_series"] @@ -967,12 +969,12 @@ def get_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_time_series" not in self._stubs: - self._stubs[ - "get_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardTimeSeries", - request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["get_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardTimeSeries", + request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["get_tensorboard_time_series"] @@ -998,12 +1000,12 @@ def update_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_time_series" not in self._stubs: - self._stubs[ - "update_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardTimeSeries", - request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["update_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardTimeSeries", + request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["update_tensorboard_time_series"] @@ -1029,12 +1031,12 @@ def list_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_time_series" not in self._stubs: - self._stubs[ - "list_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardTimeSeries", - request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + self._stubs["list_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardTimeSeries", + request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["list_tensorboard_time_series"] @@ -1060,12 +1062,12 @@ def delete_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_time_series" not in self._stubs: - self._stubs[ - "delete_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardTimeSeries", - request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardTimeSeries", + request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_time_series"] @@ -1097,12 +1099,12 @@ def batch_read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "batch_read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["batch_read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["batch_read_tensorboard_time_series_data"] @@ -1133,12 +1135,12 @@ def read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_time_series_data"] @@ -1167,12 +1169,12 @@ def read_tensorboard_blob_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_blob_data" not in self._stubs: - self._stubs[ - "read_tensorboard_blob_data" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardBlobData", - request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + self._stubs["read_tensorboard_blob_data"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardBlobData", + request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_blob_data"] @@ -1201,12 +1203,12 @@ def write_tensorboard_experiment_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_experiment_data" not in self._stubs: - self._stubs[ - "write_tensorboard_experiment_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardExperimentData", - request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + self._stubs["write_tensorboard_experiment_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardExperimentData", + request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_experiment_data"] @@ -1234,12 +1236,12 @@ def write_tensorboard_run_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_run_data" not in self._stubs: - self._stubs[ - "write_tensorboard_run_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardRunData", - request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + self._stubs["write_tensorboard_run_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardRunData", + request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_run_data"] @@ -1267,12 +1269,12 @@ def export_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "export_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ExportTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["export_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ExportTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["export_tensorboard_time_series_data"] diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc_asyncio.py index 0f0536e8e8..8d4282dd6f 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/grpc_asyncio.py @@ -40,7 +40,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -587,12 +589,12 @@ def create_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_experiment" not in self._stubs: - self._stubs[ - "create_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardExperiment", - request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["create_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardExperiment", + request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["create_tensorboard_experiment"] @@ -618,12 +620,12 @@ def get_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_experiment" not in self._stubs: - self._stubs[ - "get_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardExperiment", - request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, - response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["get_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardExperiment", + request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, + response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["get_tensorboard_experiment"] @@ -649,12 +651,12 @@ def update_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_experiment" not in self._stubs: - self._stubs[ - "update_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardExperiment", - request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["update_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardExperiment", + request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["update_tensorboard_experiment"] @@ -680,12 +682,12 @@ def list_tensorboard_experiments( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_experiments" not in self._stubs: - self._stubs[ - "list_tensorboard_experiments" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardExperiments", - request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + self._stubs["list_tensorboard_experiments"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardExperiments", + request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + ) ) return self._stubs["list_tensorboard_experiments"] @@ -711,12 +713,12 @@ def delete_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_experiment" not in self._stubs: - self._stubs[ - "delete_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardExperiment", - request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardExperiment", + request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_experiment"] @@ -771,12 +773,12 @@ def batch_create_tensorboard_runs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_runs" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_runs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardRuns", - request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + self._stubs["batch_create_tensorboard_runs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardRuns", + request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_runs"] @@ -920,12 +922,12 @@ def batch_create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardTimeSeries", - request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + self._stubs["batch_create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardTimeSeries", + request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_time_series"] @@ -951,12 +953,12 @@ def create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardTimeSeries", - request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardTimeSeries", + request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["create_tensorboard_time_series"] @@ -982,12 +984,12 @@ def get_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_time_series" not in self._stubs: - self._stubs[ - "get_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardTimeSeries", - request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["get_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardTimeSeries", + request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["get_tensorboard_time_series"] @@ -1013,12 +1015,12 @@ def update_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_time_series" not in self._stubs: - self._stubs[ - "update_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardTimeSeries", - request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["update_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardTimeSeries", + request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["update_tensorboard_time_series"] @@ -1044,12 +1046,12 @@ def list_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_time_series" not in self._stubs: - self._stubs[ - "list_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardTimeSeries", - request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + self._stubs["list_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardTimeSeries", + request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["list_tensorboard_time_series"] @@ -1075,12 +1077,12 @@ def delete_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_time_series" not in self._stubs: - self._stubs[ - "delete_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardTimeSeries", - request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardTimeSeries", + request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_time_series"] @@ -1112,12 +1114,12 @@ def batch_read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "batch_read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/BatchReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["batch_read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/BatchReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["batch_read_tensorboard_time_series_data"] @@ -1148,12 +1150,12 @@ def read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_time_series_data"] @@ -1182,12 +1184,12 @@ def read_tensorboard_blob_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_blob_data" not in self._stubs: - self._stubs[ - "read_tensorboard_blob_data" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardBlobData", - request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + self._stubs["read_tensorboard_blob_data"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardBlobData", + request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_blob_data"] @@ -1216,12 +1218,12 @@ def write_tensorboard_experiment_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_experiment_data" not in self._stubs: - self._stubs[ - "write_tensorboard_experiment_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardExperimentData", - request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + self._stubs["write_tensorboard_experiment_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardExperimentData", + request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_experiment_data"] @@ -1249,12 +1251,12 @@ def write_tensorboard_run_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_run_data" not in self._stubs: - self._stubs[ - "write_tensorboard_run_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardRunData", - request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + self._stubs["write_tensorboard_run_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardRunData", + request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_run_data"] @@ -1282,12 +1284,12 @@ def export_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "export_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.TensorboardService/ExportTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["export_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.TensorboardService/ExportTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["export_tensorboard_time_series_data"] diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest.py index 02a7c96b3b..29438f5244 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest.py @@ -43,7 +43,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -9280,7 +9282,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9423,7 +9424,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -9566,7 +9566,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -9710,7 +9709,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -9858,7 +9856,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10005,7 +10002,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10122,7 +10118,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10239,7 +10234,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10382,7 +10376,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -10525,7 +10518,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_asyncio.py index 59fcba2499..8c281b3f85 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_asyncio.py @@ -56,7 +56,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -2199,9 +2201,9 @@ def __init__( self._interceptor = interceptor or AsyncTensorboardServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -9677,7 +9679,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9830,7 +9831,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -9983,7 +9983,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10137,7 +10136,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10295,7 +10293,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10446,7 +10443,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10569,7 +10565,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10692,7 +10687,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10845,7 +10839,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -10996,7 +10989,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_base.py b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_base.py index 842c2edfcb..7d44637071 100644 --- a/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1/services/tensorboard_service/transports/rest_base.py @@ -33,7 +33,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/async_client.py b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/async_client.py index 73869171f7..c9a47a3d27 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.vertex_rag_data_service import pagers +from google.cloud.aiplatform_v1.services.vertex_rag_data_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -305,21 +307,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VertexRagDataServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", + "credentialsType": None, + } + ), ) async def create_rag_corpus( diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/client.py b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/client.py index c12844014f..9dedde29e3 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/client.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1.services.vertex_rag_data_service import pagers +from google.cloud.aiplatform_v1.services.vertex_rag_data_service import ( + pagers, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -859,21 +861,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VertexRagDataServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VertexRagDataService", + "credentialsType": None, + } + ), ) def create_rag_corpus( diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/base.py b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/base.py index 4739d84b7b..c3b684b757 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/base.py @@ -408,13 +408,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest.py b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest.py index c22350780c..a64f544ad8 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest.py @@ -5069,7 +5069,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5212,7 +5211,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5355,7 +5353,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5499,7 +5496,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5647,7 +5643,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5794,7 +5789,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5911,7 +5905,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6028,7 +6021,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6171,7 +6163,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6314,7 +6305,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest_asyncio.py index db18cfef3d..9eb78116dc 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_data_service/transports/rest_asyncio.py @@ -1110,9 +1110,9 @@ def __init__( self._interceptor = interceptor or AsyncVertexRagDataServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5257,7 +5257,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5410,7 +5409,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5561,7 +5559,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5713,7 +5710,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5869,7 +5865,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6020,7 +6015,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6143,7 +6137,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6266,7 +6259,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6417,7 +6409,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6568,7 +6559,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_service/async_client.py b/google/cloud/aiplatform_v1/services/vertex_rag_service/async_client.py index 0ef0470e40..6379b7a254 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_service/async_client.py @@ -278,21 +278,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VertexRagServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VertexRagService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VertexRagService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VertexRagService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VertexRagService", + "credentialsType": None, + } + ), ) async def retrieve_contexts( diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_service/client.py b/google/cloud/aiplatform_v1/services/vertex_rag_service/client.py index 0468caa577..a1f96c96db 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_service/client.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_service/client.py @@ -740,21 +740,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VertexRagServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VertexRagService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VertexRagService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VertexRagService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VertexRagService", + "credentialsType": None, + } + ), ) def retrieve_contexts( diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/base.py b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/base.py index 965a98f6e3..04ff8d0c3c 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/base.py @@ -273,13 +273,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest.py b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest.py index 61b53fb120..074b5b881f 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest.py @@ -1142,7 +1142,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1284,7 +1283,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1426,7 +1424,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1569,7 +1566,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1717,7 +1713,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1862,7 +1857,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1979,7 +1973,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2095,7 +2088,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2237,7 +2229,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2379,7 +2370,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest_asyncio.py index abd760fba3..4f135c108c 100644 --- a/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/vertex_rag_service/transports/rest_asyncio.py @@ -1230,7 +1230,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1383,7 +1382,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1536,7 +1534,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1690,7 +1687,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1848,7 +1844,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1999,7 +1994,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2124,7 +2118,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2249,7 +2242,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2402,7 +2394,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2555,7 +2546,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/vizier_service/async_client.py b/google/cloud/aiplatform_v1/services/vizier_service/async_client.py index 35d3ddcc9b..ff6b8c5b96 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/async_client.py @@ -288,21 +288,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VizierServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VizierService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VizierService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VizierService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VizierService", + "credentialsType": None, + } + ), ) async def create_study( diff --git a/google/cloud/aiplatform_v1/services/vizier_service/client.py b/google/cloud/aiplatform_v1/services/vizier_service/client.py index c1a0afd750..bc6cdc19db 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/client.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/client.py @@ -789,21 +789,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1.VizierServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1.VizierService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1.VizierService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1.VizierService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1.VizierService", + "credentialsType": None, + } + ), ) def create_study( diff --git a/google/cloud/aiplatform_v1/services/vizier_service/transports/base.py b/google/cloud/aiplatform_v1/services/vizier_service/transports/base.py index 39fe50de29..2144e3f306 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/transports/base.py @@ -445,13 +445,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc.py index fce1213478..5fd4ebb120 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc.py @@ -695,12 +695,12 @@ def check_trial_early_stopping_state( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_trial_early_stopping_state" not in self._stubs: - self._stubs[ - "check_trial_early_stopping_state" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.VizierService/CheckTrialEarlyStoppingState", - request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["check_trial_early_stopping_state"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.VizierService/CheckTrialEarlyStoppingState", + request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["check_trial_early_stopping_state"] diff --git a/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc_asyncio.py index ec5e4830b5..10c35a137e 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/transports/grpc_asyncio.py @@ -711,12 +711,12 @@ def check_trial_early_stopping_state( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_trial_early_stopping_state" not in self._stubs: - self._stubs[ - "check_trial_early_stopping_state" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1.VizierService/CheckTrialEarlyStoppingState", - request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["check_trial_early_stopping_state"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1.VizierService/CheckTrialEarlyStoppingState", + request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["check_trial_early_stopping_state"] diff --git a/google/cloud/aiplatform_v1/services/vizier_service/transports/rest.py b/google/cloud/aiplatform_v1/services/vizier_service/transports/rest.py index 499f29c7d1..5b426a81b5 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/transports/rest.py @@ -5540,7 +5540,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5684,7 +5683,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5826,7 +5824,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5969,7 +5966,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6116,7 +6112,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6260,7 +6255,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6376,7 +6370,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6492,7 +6485,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6634,7 +6626,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6776,7 +6767,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/services/vizier_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/vizier_service/transports/rest_asyncio.py index 4cd085895f..21cc2f80b8 100644 --- a/google/cloud/aiplatform_v1/services/vizier_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/vizier_service/transports/rest_asyncio.py @@ -1160,9 +1160,9 @@ def __init__( self._interceptor = interceptor or AsyncVizierServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5765,7 +5765,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5917,7 +5916,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6069,7 +6067,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6222,7 +6219,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6380,7 +6376,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6532,7 +6527,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6656,7 +6650,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6780,7 +6773,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6932,7 +6924,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7084,7 +7075,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1/types/accelerator_type.py b/google/cloud/aiplatform_v1/types/accelerator_type.py index 7774213c96..54db499797 100644 --- a/google/cloud/aiplatform_v1/types/accelerator_type.py +++ b/google/cloud/aiplatform_v1/types/accelerator_type.py @@ -72,6 +72,7 @@ class AcceleratorType(proto.Enum): TPU_V5_LITEPOD (12): TPU v5. """ + ACCELERATOR_TYPE_UNSPECIFIED = 0 NVIDIA_TESLA_K80 = 1 NVIDIA_TESLA_P100 = 2 diff --git a/google/cloud/aiplatform_v1/types/artifact.py b/google/cloud/aiplatform_v1/types/artifact.py index 2886b89f86..f8de1d6c6a 100644 --- a/google/cloud/aiplatform_v1/types/artifact.py +++ b/google/cloud/aiplatform_v1/types/artifact.py @@ -112,6 +112,7 @@ class State(proto.Enum): exist, unless something external to the system deletes it. """ + STATE_UNSPECIFIED = 0 PENDING = 1 LIVE = 2 diff --git a/google/cloud/aiplatform_v1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1/types/batch_prediction_job.py index 7efe86f8f2..4a5e92a48a 100644 --- a/google/cloud/aiplatform_v1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1/types/batch_prediction_job.py @@ -19,8 +19,12 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import completion_stats as gca_completion_stats -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + completion_stats as gca_completion_stats, +) +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import job_state @@ -613,7 +617,9 @@ class OutputInfo(proto.Message): proto.STRING, number=29, ) - manual_batch_tuning_parameters: gca_manual_batch_tuning_parameters.ManualBatchTuningParameters = proto.Field( + manual_batch_tuning_parameters: ( + gca_manual_batch_tuning_parameters.ManualBatchTuningParameters + ) = proto.Field( proto.MESSAGE, number=8, message=gca_manual_batch_tuning_parameters.ManualBatchTuningParameters, diff --git a/google/cloud/aiplatform_v1/types/cached_content.py b/google/cloud/aiplatform_v1/types/cached_content.py index f3fb112609..bbd6fd1294 100644 --- a/google/cloud/aiplatform_v1/types/cached_content.py +++ b/google/cloud/aiplatform_v1/types/cached_content.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import content -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import tool from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/content.py b/google/cloud/aiplatform_v1/types/content.py index eb9c346603..bf80aaa5c5 100644 --- a/google/cloud/aiplatform_v1/types/content.py +++ b/google/cloud/aiplatform_v1/types/content.py @@ -77,6 +77,7 @@ class HarmCategory(proto.Enum): Deprecated: Election filter is not longer supported. The harm category is civic integrity. """ + HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_HATE_SPEECH = 1 HARM_CATEGORY_DANGEROUS_CONTENT = 2 @@ -102,6 +103,7 @@ class Modality(proto.Enum): DOCUMENT (5): Document, e.g. PDF. """ + MODALITY_UNSPECIFIED = 0 TEXT = 1 IMAGE = 2 @@ -511,12 +513,15 @@ class ModelRoutingPreference(proto.Enum): PRIORITIZE_COST (3): Prefer lower cost over higher quality. """ + UNKNOWN = 0 PRIORITIZE_QUALITY = 1 BALANCED = 2 PRIORITIZE_COST = 3 - model_routing_preference: "GenerationConfig.RoutingConfig.AutoRoutingMode.ModelRoutingPreference" = proto.Field( + model_routing_preference: ( + "GenerationConfig.RoutingConfig.AutoRoutingMode.ModelRoutingPreference" + ) = proto.Field( proto.ENUM, number=1, optional=True, @@ -703,6 +708,7 @@ class HarmBlockThreshold(proto.Enum): OFF (5): Turn off the safety filter. """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 @@ -723,6 +729,7 @@ class HarmBlockMethod(proto.Enum): The harm block method uses the probability score. """ + HARM_BLOCK_METHOD_UNSPECIFIED = 0 SEVERITY = 1 PROBABILITY = 2 @@ -780,6 +787,7 @@ class HarmProbability(proto.Enum): HIGH (4): High level of harm. """ + HARM_PROBABILITY_UNSPECIFIED = 0 NEGLIGIBLE = 1 LOW = 2 @@ -801,6 +809,7 @@ class HarmSeverity(proto.Enum): HARM_SEVERITY_HIGH (4): High level of harm severity. """ + HARM_SEVERITY_UNSPECIFIED = 0 HARM_SEVERITY_NEGLIGIBLE = 1 HARM_SEVERITY_LOW = 2 @@ -983,6 +992,7 @@ class FinishReason(proto.Enum): The model response was blocked by Model Armor. """ + FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 @@ -1085,6 +1095,7 @@ class UrlRetrievalStatus(proto.Enum): URL_RETRIEVAL_STATUS_ERROR (2): Url retrieval is failed due to error. """ + URL_RETRIEVAL_STATUS_UNSPECIFIED = 0 URL_RETRIEVAL_STATUS_SUCCESS = 1 URL_RETRIEVAL_STATUS_ERROR = 2 diff --git a/google/cloud/aiplatform_v1/types/custom_job.py b/google/cloud/aiplatform_v1/types/custom_job.py index 08f0b12077..caed31b232 100644 --- a/google/cloud/aiplatform_v1/types/custom_job.py +++ b/google/cloud/aiplatform_v1/types/custom_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import job_state @@ -601,6 +603,7 @@ class Strategy(proto.Enum): Flex Start strategy uses DWS to queue for resources. """ + STRATEGY_UNSPECIFIED = 0 ON_DEMAND = 1 LOW_COST = 2 diff --git a/google/cloud/aiplatform_v1/types/data_foundry_service.py b/google/cloud/aiplatform_v1/types/data_foundry_service.py index efc2c78969..8d1c9bb36a 100644 --- a/google/cloud/aiplatform_v1/types/data_foundry_service.py +++ b/google/cloud/aiplatform_v1/types/data_foundry_service.py @@ -160,6 +160,7 @@ class FieldType(proto.Enum): AUDIO (4): Audio field type. """ + FIELD_TYPE_UNSPECIFIED = 0 CONTENT = 1 TEXT = 2 diff --git a/google/cloud/aiplatform_v1/types/data_labeling_job.py b/google/cloud/aiplatform_v1/types/data_labeling_job.py index 1b05a14cae..beca34ba35 100644 --- a/google/cloud/aiplatform_v1/types/data_labeling_job.py +++ b/google/cloud/aiplatform_v1/types/data_labeling_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import job_state from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -320,6 +322,7 @@ class SampleStrategy(proto.Enum): UNCERTAINTY (1): Sample the most uncertain data to label. """ + SAMPLE_STRATEGY_UNSPECIFIED = 0 UNCERTAINTY = 1 diff --git a/google/cloud/aiplatform_v1/types/dataset.py b/google/cloud/aiplatform_v1/types/dataset.py index 233e52682f..186c9106f4 100644 --- a/google/cloud/aiplatform_v1/types/dataset.py +++ b/google/cloud/aiplatform_v1/types/dataset.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import saved_query from google.protobuf import struct_pb2 # type: ignore @@ -372,6 +374,7 @@ class ExportUse(proto.Enum): CUSTOM_CODE_TRAINING (6): Export for custom code training. """ + EXPORT_USE_UNSPECIFIED = 0 CUSTOM_CODE_TRAINING = 6 diff --git a/google/cloud/aiplatform_v1/types/dataset_service.py b/google/cloud/aiplatform_v1/types/dataset_service.py index c6a13a20cb..05086b8737 100644 --- a/google/cloud/aiplatform_v1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1/types/dataset_service.py @@ -22,7 +22,9 @@ from google.cloud.aiplatform_v1.types import annotation from google.cloud.aiplatform_v1.types import data_item as gca_data_item from google.cloud.aiplatform_v1.types import dataset as gca_dataset -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import operation from google.cloud.aiplatform_v1.types import saved_query as gca_saved_query @@ -578,12 +580,12 @@ class ListDatasetVersionsResponse(proto.Message): def raw_page(self): return self - dataset_versions: MutableSequence[ - gca_dataset_version.DatasetVersion - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset_version.DatasetVersion, + dataset_versions: MutableSequence[gca_dataset_version.DatasetVersion] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_dataset_version.DatasetVersion, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/deployment_resource_pool.py b/google/cloud/aiplatform_v1/types/deployment_resource_pool.py index 991c9a6a6d..df3352c995 100644 --- a/google/cloud/aiplatform_v1/types/deployment_resource_pool.py +++ b/google/cloud/aiplatform_v1/types/deployment_resource_pool.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import machine_resources from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/deployment_resource_pool_service.py b/google/cloud/aiplatform_v1/types/deployment_resource_pool_service.py index 59a11a62bc..1325565ba4 100644 --- a/google/cloud/aiplatform_v1/types/deployment_resource_pool_service.py +++ b/google/cloud/aiplatform_v1/types/deployment_resource_pool_service.py @@ -310,12 +310,12 @@ def raw_page(self): proto.STRING, number=2, ) - deployed_model_refs: MutableSequence[ - deployed_model_ref.DeployedModelRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=deployed_model_ref.DeployedModelRef, + deployed_model_refs: MutableSequence[deployed_model_ref.DeployedModelRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=deployed_model_ref.DeployedModelRef, + ) ) total_deployed_model_count: int = proto.Field( proto.INT32, diff --git a/google/cloud/aiplatform_v1/types/deployment_stage.py b/google/cloud/aiplatform_v1/types/deployment_stage.py index 79426c5b1a..dfb2c2a1ce 100644 --- a/google/cloud/aiplatform_v1/types/deployment_stage.py +++ b/google/cloud/aiplatform_v1/types/deployment_stage.py @@ -56,6 +56,7 @@ class DeploymentStage(proto.Enum): DEPLOYMENT_TERMINATED (10): The deployment has terminated. """ + DEPLOYMENT_STAGE_UNSPECIFIED = 0 STARTING_DEPLOYMENT = 5 PREPARING_MODEL = 6 diff --git a/google/cloud/aiplatform_v1/types/endpoint.py b/google/cloud/aiplatform_v1/types/endpoint.py index 345966c5f7..9a0679d18b 100644 --- a/google/cloud/aiplatform_v1/types/endpoint.py +++ b/google/cloud/aiplatform_v1/types/endpoint.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import machine_resources diff --git a/google/cloud/aiplatform_v1/types/endpoint_service.py b/google/cloud/aiplatform_v1/types/endpoint_service.py index 25d7f25d98..5be21fa24b 100644 --- a/google/cloud/aiplatform_v1/types/endpoint_service.py +++ b/google/cloud/aiplatform_v1/types/endpoint_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import deployment_stage as gca_deployment_stage +from google.cloud.aiplatform_v1.types import ( + deployment_stage as gca_deployment_stage, +) from google.cloud.aiplatform_v1.types import endpoint as gca_endpoint from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/evaluated_annotation.py b/google/cloud/aiplatform_v1/types/evaluated_annotation.py index 5efa58841a..c5f9f66f35 100644 --- a/google/cloud/aiplatform_v1/types/evaluated_annotation.py +++ b/google/cloud/aiplatform_v1/types/evaluated_annotation.py @@ -126,6 +126,7 @@ class EvaluatedAnnotationType(proto.Enum): has a ground truth annotation which is not matched by any of the model created predictions. """ + EVALUATED_ANNOTATION_TYPE_UNSPECIFIED = 0 TRUE_POSITIVE = 1 FALSE_POSITIVE = 2 @@ -155,19 +156,19 @@ class EvaluatedAnnotationType(proto.Enum): proto.STRING, number=6, ) - explanations: MutableSequence[ - "EvaluatedAnnotationExplanation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message="EvaluatedAnnotationExplanation", + explanations: MutableSequence["EvaluatedAnnotationExplanation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=8, + message="EvaluatedAnnotationExplanation", + ) ) - error_analysis_annotations: MutableSequence[ - "ErrorAnalysisAnnotation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message="ErrorAnalysisAnnotation", + error_analysis_annotations: MutableSequence["ErrorAnalysisAnnotation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message="ErrorAnalysisAnnotation", + ) ) @@ -234,6 +235,7 @@ class QueryType(proto.Enum): Query dissimilar samples from the same class of the input sample. """ + QUERY_TYPE_UNSPECIFIED = 0 ALL_SIMILAR = 1 SAME_CLASS_SIMILAR = 2 diff --git a/google/cloud/aiplatform_v1/types/evaluation_service.py b/google/cloud/aiplatform_v1/types/evaluation_service.py index a3e90ffea1..4518f716e4 100644 --- a/google/cloud/aiplatform_v1/types/evaluation_service.py +++ b/google/cloud/aiplatform_v1/types/evaluation_service.py @@ -150,6 +150,7 @@ class PairwiseChoice(proto.Enum): TIE (3): Winner cannot be determined """ + PAIRWISE_CHOICE_UNSPECIFIED = 0 BASELINE = 1 CANDIDATE = 2 @@ -365,7 +366,9 @@ class EvaluateInstancesRequest(proto.Message): oneof="metric_inputs", message="QuestionAnsweringQualityInput", ) - pairwise_question_answering_quality_input: "PairwiseQuestionAnsweringQualityInput" = proto.Field( + pairwise_question_answering_quality_input: ( + "PairwiseQuestionAnsweringQualityInput" + ) = proto.Field( proto.MESSAGE, number=24, oneof="metric_inputs", @@ -654,7 +657,9 @@ class EvaluateInstancesResponse(proto.Message): oneof="evaluation_results", message="QuestionAnsweringQualityResult", ) - pairwise_question_answering_quality_result: "PairwiseQuestionAnsweringQualityResult" = proto.Field( + pairwise_question_answering_quality_result: ( + "PairwiseQuestionAnsweringQualityResult" + ) = proto.Field( proto.MESSAGE, number=23, oneof="evaluation_results", @@ -800,12 +805,12 @@ class ExactMatchResults(proto.Message): Output only. Exact match metric values. """ - exact_match_metric_values: MutableSequence[ - "ExactMatchMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ExactMatchMetricValue", + exact_match_metric_values: MutableSequence["ExactMatchMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExactMatchMetricValue", + ) ) @@ -2945,12 +2950,12 @@ class ToolCallValidResults(proto.Message): Output only. Tool call valid metric values. """ - tool_call_valid_metric_values: MutableSequence[ - "ToolCallValidMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ToolCallValidMetricValue", + tool_call_valid_metric_values: MutableSequence["ToolCallValidMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ToolCallValidMetricValue", + ) ) @@ -3036,12 +3041,12 @@ class ToolNameMatchResults(proto.Message): Output only. Tool name match metric values. """ - tool_name_match_metric_values: MutableSequence[ - "ToolNameMatchMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ToolNameMatchMetricValue", + tool_name_match_metric_values: MutableSequence["ToolNameMatchMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ToolNameMatchMetricValue", + ) ) @@ -3314,6 +3319,7 @@ class CometVersion(proto.Enum): Comet 22 for translation + source + reference (source-reference-combined). """ + COMET_VERSION_UNSPECIFIED = 0 COMET_22_SRC_REF = 2 @@ -3451,6 +3457,7 @@ class MetricxVersion(proto.Enum): MetricX 2024 (2.6) for translation + source + reference (source-reference-combined). """ + METRICX_VERSION_UNSPECIFIED = 0 METRICX_24_REF = 1 METRICX_24_SRC = 2 diff --git a/google/cloud/aiplatform_v1/types/event.py b/google/cloud/aiplatform_v1/types/event.py index 8f80fe9fea..2f4081629b 100644 --- a/google/cloud/aiplatform_v1/types/event.py +++ b/google/cloud/aiplatform_v1/types/event.py @@ -75,6 +75,7 @@ class Type(proto.Enum): OUTPUT (2): An output of the Execution. """ + TYPE_UNSPECIFIED = 0 INPUT = 1 OUTPUT = 2 diff --git a/google/cloud/aiplatform_v1/types/execution.py b/google/cloud/aiplatform_v1/types/execution.py index ae5f42ad0c..390e185410 100644 --- a/google/cloud/aiplatform_v1/types/execution.py +++ b/google/cloud/aiplatform_v1/types/execution.py @@ -111,6 +111,7 @@ class State(proto.Enum): CANCELLED (6): The Execution was cancelled. """ + STATE_UNSPECIFIED = 0 NEW = 1 RUNNING = 2 diff --git a/google/cloud/aiplatform_v1/types/explanation.py b/google/cloud/aiplatform_v1/types/explanation.py index 85d4287365..712c9017cb 100644 --- a/google/cloud/aiplatform_v1/types/explanation.py +++ b/google/cloud/aiplatform_v1/types/explanation.py @@ -774,6 +774,7 @@ class DataFormat(proto.Enum): JSONL (1): Examples are stored in JSONL files. """ + DATA_FORMAT_UNSPECIFIED = 0 JSONL = 1 @@ -845,6 +846,7 @@ class Query(proto.Enum): Faster response as a trade-off against less precise neighbors. """ + PRECISE = 0 FAST = 1 @@ -862,6 +864,7 @@ class Modality(proto.Enum): TABULAR (3): TABULAR modality """ + MODALITY_UNSPECIFIED = 0 IMAGE = 1 TEXT = 2 @@ -995,6 +998,7 @@ class DataFormat(proto.Enum): EMBEDDINGS (2): Provided data is a set of embeddings. """ + DATA_FORMAT_UNSPECIFIED = 0 INSTANCES = 1 EMBEDDINGS = 2 @@ -1007,12 +1011,12 @@ class DataFormat(proto.Enum): proto.INT32, number=2, ) - restrictions: MutableSequence[ - "ExamplesRestrictionsNamespace" - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message="ExamplesRestrictionsNamespace", + restrictions: MutableSequence["ExamplesRestrictionsNamespace"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ExamplesRestrictionsNamespace", + ) ) return_embeddings: bool = proto.Field( proto.BOOL, diff --git a/google/cloud/aiplatform_v1/types/explanation_metadata.py b/google/cloud/aiplatform_v1/types/explanation_metadata.py index df8068de31..b4be565e37 100644 --- a/google/cloud/aiplatform_v1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1/types/explanation_metadata.py @@ -234,6 +234,7 @@ class Encoding(proto.Enum): [0.5, 0.3, 0.1, 0.2, 0.4], [0.4, 0.3, 0.2, 0.5, 0.1]] """ + ENCODING_UNSPECIFIED = 0 IDENTITY = 1 BAG_OF_FEATURES = 2 @@ -346,6 +347,7 @@ class Type(proto.Enum): Shows which region contributed to the image prediction by outlining the region. """ + TYPE_UNSPECIFIED = 0 PIXELS = 1 OUTLINES = 2 @@ -368,6 +370,7 @@ class Polarity(proto.Enum): Shows both positive and negative attributions. """ + POLARITY_UNSPECIFIED = 0 POSITIVE = 1 NEGATIVE = 2 @@ -396,6 +399,7 @@ class ColorMap(proto.Enum): PINK_WHITE_GREEN (5): PiYG palette. """ + COLOR_MAP_UNSPECIFIED = 0 PINK_GREEN = 1 VIRIDIS = 2 @@ -423,6 +427,7 @@ class OverlayType(proto.Enum): predictive parts of the image and hide the un-predictive parts. """ + OVERLAY_TYPE_UNSPECIFIED = 0 NONE = 1 ORIGINAL = 2 @@ -456,7 +461,9 @@ class OverlayType(proto.Enum): proto.FLOAT, number=5, ) - overlay_type: "ExplanationMetadata.InputMetadata.Visualization.OverlayType" = proto.Field( + overlay_type: ( + "ExplanationMetadata.InputMetadata.Visualization.OverlayType" + ) = proto.Field( proto.ENUM, number=6, enum="ExplanationMetadata.InputMetadata.Visualization.OverlayType", diff --git a/google/cloud/aiplatform_v1/types/feature.py b/google/cloud/aiplatform_v1/types/feature.py index d91f1e8c86..4b92f36f79 100644 --- a/google/cloud/aiplatform_v1/types/feature.py +++ b/google/cloud/aiplatform_v1/types/feature.py @@ -130,6 +130,7 @@ class ValueType(proto.Enum): STRUCT (14): Used for Feature that is struct. """ + VALUE_TYPE_UNSPECIFIED = 0 BOOL = 1 BOOL_ARRAY = 2 @@ -175,6 +176,7 @@ class Objective(proto.Enum): SNAPSHOT_ANALYSIS (2): Stats are generated by Snapshot Analysis. """ + OBJECTIVE_UNSPECIFIED = 0 IMPORT_FEATURE_ANALYSIS = 1 SNAPSHOT_ANALYSIS = 2 @@ -228,12 +230,12 @@ class Objective(proto.Enum): proto.BOOL, number=12, ) - monitoring_stats_anomalies: MutableSequence[ - MonitoringStatsAnomaly - ] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=MonitoringStatsAnomaly, + monitoring_stats_anomalies: MutableSequence[MonitoringStatsAnomaly] = ( + proto.RepeatedField( + proto.MESSAGE, + number=11, + message=MonitoringStatsAnomaly, + ) ) version_column_name: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/feature_online_store.py b/google/cloud/aiplatform_v1/types/feature_online_store.py index 68593c9903..c921d6c98a 100644 --- a/google/cloud/aiplatform_v1/types/feature_online_store.py +++ b/google/cloud/aiplatform_v1/types/feature_online_store.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -128,6 +130,7 @@ class State(proto.Enum): featureOnlineStore is still usable in this state. """ + STATE_UNSPECIFIED = 0 STABLE = 1 UPDATING = 2 @@ -214,7 +217,9 @@ class DedicatedServingEndpoint(proto.Message): proto.STRING, number=2, ) - private_service_connect_config: service_networking.PrivateServiceConnectConfig = proto.Field( + private_service_connect_config: ( + service_networking.PrivateServiceConnectConfig + ) = proto.Field( proto.MESSAGE, number=3, message=service_networking.PrivateServiceConnectConfig, diff --git a/google/cloud/aiplatform_v1/types/feature_online_store_admin_service.py b/google/cloud/aiplatform_v1/types/feature_online_store_admin_service.py index d8b0385b5d..18f6548a20 100644 --- a/google/cloud/aiplatform_v1/types/feature_online_store_admin_service.py +++ b/google/cloud/aiplatform_v1/types/feature_online_store_admin_service.py @@ -23,7 +23,9 @@ feature_online_store as gca_feature_online_store, ) from google.cloud.aiplatform_v1.types import feature_view as gca_feature_view -from google.cloud.aiplatform_v1.types import feature_view_sync as gca_feature_view_sync +from google.cloud.aiplatform_v1.types import ( + feature_view_sync as gca_feature_view_sync, +) from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -706,12 +708,12 @@ class ListFeatureViewSyncsResponse(proto.Message): def raw_page(self): return self - feature_view_syncs: MutableSequence[ - gca_feature_view_sync.FeatureViewSync - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_view_sync.FeatureViewSync, + feature_view_syncs: MutableSequence[gca_feature_view_sync.FeatureViewSync] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_view_sync.FeatureViewSync, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/feature_online_store_service.py b/google/cloud/aiplatform_v1/types/feature_online_store_service.py index 40838f0720..f97de125bd 100644 --- a/google/cloud/aiplatform_v1/types/feature_online_store_service.py +++ b/google/cloud/aiplatform_v1/types/feature_online_store_service.py @@ -54,6 +54,7 @@ class FeatureViewDataFormat(proto.Enum): PROTO_STRUCT (2): Return response data in proto Struct format. """ + FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED = 0 KEY_VALUE = 1 PROTO_STRUCT = 2 @@ -385,6 +386,7 @@ class Operator(proto.Enum): Entities are eligible if their value is != the query's. """ + OPERATOR_UNSPECIFIED = 0 LESS = 1 LESS_EQUAL = 2 @@ -645,12 +647,12 @@ class Feature(proto.Message): proto.STRING, number=1, ) - data_key_and_feature_values: MutableSequence[ - DataKeyAndFeatureValues - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=DataKeyAndFeatureValues, + data_key_and_feature_values: MutableSequence[DataKeyAndFeatureValues] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=DataKeyAndFeatureValues, + ) ) diff --git a/google/cloud/aiplatform_v1/types/feature_registry_service.py b/google/cloud/aiplatform_v1/types/feature_registry_service.py index 4fd306bc8a..bc1e9f18db 100644 --- a/google/cloud/aiplatform_v1/types/feature_registry_service.py +++ b/google/cloud/aiplatform_v1/types/feature_registry_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -184,12 +186,12 @@ class ListFeatureGroupsResponse(proto.Message): def raw_page(self): return self - feature_groups: MutableSequence[ - gca_feature_group.FeatureGroup - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_group.FeatureGroup, + feature_groups: MutableSequence[gca_feature_group.FeatureGroup] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_group.FeatureGroup, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/feature_view.py b/google/cloud/aiplatform_v1/types/feature_view.py index ed8a9da6b3..8a59837bdd 100644 --- a/google/cloud/aiplatform_v1/types/feature_view.py +++ b/google/cloud/aiplatform_v1/types/feature_view.py @@ -142,6 +142,7 @@ class ServiceAgentType(proto.Enum): service account will be used to read from the source BigQuery table during sync. """ + SERVICE_AGENT_TYPE_UNSPECIFIED = 0 SERVICE_AGENT_TYPE_PROJECT = 1 SERVICE_AGENT_TYPE_FEATURE_VIEW = 2 @@ -270,6 +271,7 @@ class DistanceMeasureType(proto.Enum): Dot Product Distance. Defined as a negative of the dot product. """ + DISTANCE_MEASURE_TYPE_UNSPECIFIED = 0 SQUARED_L2_DISTANCE = 1 COSINE_DISTANCE = 2 diff --git a/google/cloud/aiplatform_v1/types/featurestore.py b/google/cloud/aiplatform_v1/types/featurestore.py index e40fd9dd6b..6be53cbe60 100644 --- a/google/cloud/aiplatform_v1/types/featurestore.py +++ b/google/cloud/aiplatform_v1/types/featurestore.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore @@ -120,6 +122,7 @@ class State(proto.Enum): still be the original value of ``fixed_node_count``. The featurestore is still usable in this state. """ + STATE_UNSPECIFIED = 0 STABLE = 1 UPDATING = 2 diff --git a/google/cloud/aiplatform_v1/types/featurestore_monitoring.py b/google/cloud/aiplatform_v1/types/featurestore_monitoring.py index fedaf953cf..5800e8631b 100644 --- a/google/cloud/aiplatform_v1/types/featurestore_monitoring.py +++ b/google/cloud/aiplatform_v1/types/featurestore_monitoring.py @@ -139,6 +139,7 @@ class State(proto.Enum): features analysis regardless of the EntityType-level config. """ + STATE_UNSPECIFIED = 0 DEFAULT = 1 ENABLED = 2 @@ -166,6 +167,7 @@ class Baseline(proto.Enum): Use the statistics generated by the previous import features analysis if exists. """ + BASELINE_UNSPECIFIED = 0 LATEST_STATS = 1 MOST_RECENT_SNAPSHOT_STATS = 2 @@ -178,7 +180,9 @@ class Baseline(proto.Enum): enum="FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.State", ) ) - anomaly_detection_baseline: "FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline" = proto.Field( + anomaly_detection_baseline: ( + "FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline" + ) = proto.Field( proto.ENUM, number=2, enum="FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline", diff --git a/google/cloud/aiplatform_v1/types/featurestore_online_service.py b/google/cloud/aiplatform_v1/types/featurestore_online_service.py index 503e03d6df..c9afbc0d6f 100644 --- a/google/cloud/aiplatform_v1/types/featurestore_online_service.py +++ b/google/cloud/aiplatform_v1/types/featurestore_online_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import feature_selector as gca_feature_selector +from google.cloud.aiplatform_v1.types import ( + feature_selector as gca_feature_selector, +) from google.cloud.aiplatform_v1.types import types from google.protobuf import timestamp_pb2 # type: ignore @@ -250,12 +252,12 @@ class Data(proto.Message): proto.STRING, number=1, ) - data: MutableSequence[ - "ReadFeatureValuesResponse.EntityView.Data" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="ReadFeatureValuesResponse.EntityView.Data", + data: MutableSequence["ReadFeatureValuesResponse.EntityView.Data"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ReadFeatureValuesResponse.EntityView.Data", + ) ) header: Header = proto.Field( diff --git a/google/cloud/aiplatform_v1/types/featurestore_service.py b/google/cloud/aiplatform_v1/types/featurestore_service.py index 06ce677166..074b826d1d 100644 --- a/google/cloud/aiplatform_v1/types/featurestore_service.py +++ b/google/cloud/aiplatform_v1/types/featurestore_service.py @@ -21,7 +21,9 @@ from google.cloud.aiplatform_v1.types import entity_type as gca_entity_type from google.cloud.aiplatform_v1.types import feature as gca_feature -from google.cloud.aiplatform_v1.types import feature_selector as gca_feature_selector +from google.cloud.aiplatform_v1.types import ( + feature_selector as gca_feature_selector, +) from google.cloud.aiplatform_v1.types import featurestore as gca_featurestore from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import operation diff --git a/google/cloud/aiplatform_v1/types/gen_ai_cache_service.py b/google/cloud/aiplatform_v1/types/gen_ai_cache_service.py index 4e38aafdc1..ced9f1da16 100644 --- a/google/cloud/aiplatform_v1/types/gen_ai_cache_service.py +++ b/google/cloud/aiplatform_v1/types/gen_ai_cache_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -169,12 +171,12 @@ class ListCachedContentsResponse(proto.Message): def raw_page(self): return self - cached_contents: MutableSequence[ - gca_cached_content.CachedContent - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_cached_content.CachedContent, + cached_contents: MutableSequence[gca_cached_content.CachedContent] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_cached_content.CachedContent, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/hyperparameter_tuning_job.py b/google/cloud/aiplatform_v1/types/hyperparameter_tuning_job.py index e5eaee7b18..915f56d6d9 100644 --- a/google/cloud/aiplatform_v1/types/hyperparameter_tuning_job.py +++ b/google/cloud/aiplatform_v1/types/hyperparameter_tuning_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import custom_job -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import job_state from google.cloud.aiplatform_v1.types import study from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/index.py b/google/cloud/aiplatform_v1/types/index.py index 1cc08f0fd6..8c0d8c6192 100644 --- a/google/cloud/aiplatform_v1/types/index.py +++ b/google/cloud/aiplatform_v1/types/index.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import deployed_index_ref -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -130,6 +132,7 @@ class IndexUpdateMethod(proto.Enum): corresponding DeployedIndexes in nearly real-time. """ + INDEX_UPDATE_METHOD_UNSPECIFIED = 0 BATCH_UPDATE = 1 STREAM_UPDATE = 2 @@ -155,12 +158,12 @@ class IndexUpdateMethod(proto.Enum): number=6, message=struct_pb2.Value, ) - deployed_indexes: MutableSequence[ - deployed_index_ref.DeployedIndexRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=deployed_index_ref.DeployedIndexRef, + deployed_indexes: MutableSequence[deployed_index_ref.DeployedIndexRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=7, + message=deployed_index_ref.DeployedIndexRef, + ) ) etag: str = proto.Field( proto.STRING, @@ -355,6 +358,7 @@ class Operator(proto.Enum): Datapoints are eligible iff their value is != the query's. """ + OPERATOR_UNSPECIFIED = 0 LESS = 1 LESS_EQUAL = 2 diff --git a/google/cloud/aiplatform_v1/types/index_endpoint.py b/google/cloud/aiplatform_v1/types/index_endpoint.py index 9a0e9f7fd2..bf0b130cef 100644 --- a/google/cloud/aiplatform_v1/types/index_endpoint.py +++ b/google/cloud/aiplatform_v1/types/index_endpoint.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -407,12 +409,12 @@ class DeployedIndex(proto.Message): proto.STRING, number=11, ) - psc_automation_configs: MutableSequence[ - service_networking.PSCAutomationConfig - ] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message=service_networking.PSCAutomationConfig, + psc_automation_configs: MutableSequence[service_networking.PSCAutomationConfig] = ( + proto.RepeatedField( + proto.MESSAGE, + number=19, + message=service_networking.PSCAutomationConfig, + ) ) diff --git a/google/cloud/aiplatform_v1/types/index_endpoint_service.py b/google/cloud/aiplatform_v1/types/index_endpoint_service.py index f574237cbe..c657a1aa00 100644 --- a/google/cloud/aiplatform_v1/types/index_endpoint_service.py +++ b/google/cloud/aiplatform_v1/types/index_endpoint_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -190,12 +192,12 @@ class ListIndexEndpointsResponse(proto.Message): def raw_page(self): return self - index_endpoints: MutableSequence[ - gca_index_endpoint.IndexEndpoint - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_index_endpoint.IndexEndpoint, + index_endpoints: MutableSequence[gca_index_endpoint.IndexEndpoint] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_index_endpoint.IndexEndpoint, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/index_service.py b/google/cloud/aiplatform_v1/types/index_service.py index eac2867fed..15b3774a0b 100644 --- a/google/cloud/aiplatform_v1/types/index_service.py +++ b/google/cloud/aiplatform_v1/types/index_service.py @@ -85,7 +85,9 @@ class CreateIndexOperationMetadata(proto.Message): number=1, message=operation.GenericOperationMetadata, ) - nearest_neighbor_search_operation_metadata: "NearestNeighborSearchOperationMetadata" = proto.Field( + nearest_neighbor_search_operation_metadata: ( + "NearestNeighborSearchOperationMetadata" + ) = proto.Field( proto.MESSAGE, number=2, message="NearestNeighborSearchOperationMetadata", @@ -225,7 +227,9 @@ class UpdateIndexOperationMetadata(proto.Message): number=1, message=operation.GenericOperationMetadata, ) - nearest_neighbor_search_operation_metadata: "NearestNeighborSearchOperationMetadata" = proto.Field( + nearest_neighbor_search_operation_metadata: ( + "NearestNeighborSearchOperationMetadata" + ) = proto.Field( proto.MESSAGE, number=2, message="NearestNeighborSearchOperationMetadata", @@ -410,6 +414,7 @@ class RecordErrorType(proto.Enum): INVALID_EMBEDDING (17): Invalid dense embedding. """ + ERROR_TYPE_UNSPECIFIED = 0 EMPTY_LINE = 1 INVALID_JSON_SYNTAX = 2 @@ -429,7 +434,9 @@ class RecordErrorType(proto.Enum): INVALID_SPARSE_EMBEDDING = 16 INVALID_EMBEDDING = 17 - error_type: "NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType" = proto.Field( + error_type: ( + "NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType" + ) = proto.Field( proto.ENUM, number=1, enum="NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType", @@ -505,12 +512,12 @@ class ContentValidationStats(proto.Message): number=6, ) - content_validation_stats: MutableSequence[ - ContentValidationStats - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=ContentValidationStats, + content_validation_stats: MutableSequence[ContentValidationStats] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ContentValidationStats, + ) ) data_bytes_count: int = proto.Field( proto.INT64, diff --git a/google/cloud/aiplatform_v1/types/io.py b/google/cloud/aiplatform_v1/types/io.py index 0fcc4b8080..d6b4ba2a03 100644 --- a/google/cloud/aiplatform_v1/types/io.py +++ b/google/cloud/aiplatform_v1/types/io.py @@ -238,6 +238,7 @@ class ResourceType(proto.Enum): RESOURCE_TYPE_FOLDER (2): Folder resource type. """ + RESOURCE_TYPE_UNSPECIFIED = 0 RESOURCE_TYPE_FILE = 1 RESOURCE_TYPE_FOLDER = 2 @@ -320,12 +321,12 @@ class SlackChannel(proto.Message): message=timestamp_pb2.Timestamp, ) - channels: MutableSequence[ - "SlackSource.SlackChannels.SlackChannel" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="SlackSource.SlackChannels.SlackChannel", + channels: MutableSequence["SlackSource.SlackChannels.SlackChannel"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SlackSource.SlackChannels.SlackChannel", + ) ) api_key_config: api_auth.ApiAuth.ApiKeyConfig = proto.Field( proto.MESSAGE, diff --git a/google/cloud/aiplatform_v1/types/job_service.py b/google/cloud/aiplatform_v1/types/job_service.py index d5d6751e43..056dfdba75 100644 --- a/google/cloud/aiplatform_v1/types/job_service.py +++ b/google/cloud/aiplatform_v1/types/job_service.py @@ -23,7 +23,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) @@ -384,12 +386,12 @@ class ListDataLabelingJobsResponse(proto.Message): def raw_page(self): return self - data_labeling_jobs: MutableSequence[ - gca_data_labeling_job.DataLabelingJob - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_data_labeling_job.DataLabelingJob, + data_labeling_jobs: MutableSequence[gca_data_labeling_job.DataLabelingJob] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_data_labeling_job.DataLabelingJob, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -842,12 +844,12 @@ class ListNasTrialDetailsResponse(proto.Message): def raw_page(self): return self - nas_trial_details: MutableSequence[ - gca_nas_job.NasTrialDetail - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_nas_job.NasTrialDetail, + nas_trial_details: MutableSequence[gca_nas_job.NasTrialDetail] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_nas_job.NasTrialDetail, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1047,7 +1049,9 @@ class CreateModelDeploymentMonitoringJobRequest(proto.Message): proto.STRING, number=1, ) - model_deployment_monitoring_job: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob = proto.Field( + model_deployment_monitoring_job: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob + ) = proto.Field( proto.MESSAGE, number=2, message=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob, @@ -1107,7 +1111,9 @@ class StatsAnomaliesObjective(proto.Message): latest monitoring run. """ - type_: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType = proto.Field( + type_: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType + ) = proto.Field( proto.ENUM, number=1, enum=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType, @@ -1332,7 +1338,9 @@ class UpdateModelDeploymentMonitoringJobRequest(proto.Message): - ``model_deployment_monitoring_objective_configs.objective_config.prediction_drift_detection_config`` """ - model_deployment_monitoring_job: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob = proto.Field( + model_deployment_monitoring_job: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob + ) = proto.Field( proto.MESSAGE, number=1, message=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob, diff --git a/google/cloud/aiplatform_v1/types/job_state.py b/google/cloud/aiplatform_v1/types/job_state.py index ffcc75603f..ea784a89fd 100644 --- a/google/cloud/aiplatform_v1/types/job_state.py +++ b/google/cloud/aiplatform_v1/types/job_state.py @@ -63,6 +63,7 @@ class JobState(proto.Enum): The job is partially succeeded, some results may be missing due to errors. """ + JOB_STATE_UNSPECIFIED = 0 JOB_STATE_QUEUED = 1 JOB_STATE_PENDING = 2 diff --git a/google/cloud/aiplatform_v1/types/machine_resources.py b/google/cloud/aiplatform_v1/types/machine_resources.py index 4291ef3a0d..3b47081441 100644 --- a/google/cloud/aiplatform_v1/types/machine_resources.py +++ b/google/cloud/aiplatform_v1/types/machine_resources.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import accelerator_type as gca_accelerator_type +from google.cloud.aiplatform_v1.types import ( + accelerator_type as gca_accelerator_type, +) from google.cloud.aiplatform_v1.types import ( reservation_affinity as gca_reservation_affinity, ) @@ -199,12 +201,12 @@ class DedicatedResources(proto.Message): proto.INT32, number=9, ) - autoscaling_metric_specs: MutableSequence[ - "AutoscalingMetricSpec" - ] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="AutoscalingMetricSpec", + autoscaling_metric_specs: MutableSequence["AutoscalingMetricSpec"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AutoscalingMetricSpec", + ) ) spot: bool = proto.Field( proto.BOOL, diff --git a/google/cloud/aiplatform_v1/types/match_service.py b/google/cloud/aiplatform_v1/types/match_service.py index 92844b4994..0247f6f142 100644 --- a/google/cloud/aiplatform_v1/types/match_service.py +++ b/google/cloud/aiplatform_v1/types/match_service.py @@ -222,12 +222,12 @@ class NearestNeighbors(proto.Message): proto.STRING, number=1, ) - neighbors: MutableSequence[ - "FindNeighborsResponse.Neighbor" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="FindNeighborsResponse.Neighbor", + neighbors: MutableSequence["FindNeighborsResponse.Neighbor"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="FindNeighborsResponse.Neighbor", + ) ) nearest_neighbors: MutableSequence[NearestNeighbors] = proto.RepeatedField( diff --git a/google/cloud/aiplatform_v1/types/metadata_schema.py b/google/cloud/aiplatform_v1/types/metadata_schema.py index 2736fe90e4..a5043d3113 100644 --- a/google/cloud/aiplatform_v1/types/metadata_schema.py +++ b/google/cloud/aiplatform_v1/types/metadata_schema.py @@ -79,6 +79,7 @@ class MetadataSchemaType(proto.Enum): A state indicating that the MetadataSchema will be used by Contexts. """ + METADATA_SCHEMA_TYPE_UNSPECIFIED = 0 ARTIFACT_TYPE = 1 EXECUTION_TYPE = 2 diff --git a/google/cloud/aiplatform_v1/types/metadata_service.py b/google/cloud/aiplatform_v1/types/metadata_service.py index 9ca8751d1b..7fa1bda618 100644 --- a/google/cloud/aiplatform_v1/types/metadata_service.py +++ b/google/cloud/aiplatform_v1/types/metadata_service.py @@ -23,8 +23,12 @@ from google.cloud.aiplatform_v1.types import context as gca_context from google.cloud.aiplatform_v1.types import event from google.cloud.aiplatform_v1.types import execution as gca_execution -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema -from google.cloud.aiplatform_v1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) +from google.cloud.aiplatform_v1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -213,12 +217,12 @@ class ListMetadataStoresResponse(proto.Message): def raw_page(self): return self - metadata_stores: MutableSequence[ - gca_metadata_store.MetadataStore - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_metadata_store.MetadataStore, + metadata_stores: MutableSequence[gca_metadata_store.MetadataStore] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_metadata_store.MetadataStore, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1490,12 +1494,12 @@ class ListMetadataSchemasResponse(proto.Message): def raw_page(self): return self - metadata_schemas: MutableSequence[ - gca_metadata_schema.MetadataSchema - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_metadata_schema.MetadataSchema, + metadata_schemas: MutableSequence[gca_metadata_schema.MetadataSchema] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_metadata_schema.MetadataSchema, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/metadata_store.py b/google/cloud/aiplatform_v1/types/metadata_store.py index eed1cc747c..70adb59de0 100644 --- a/google/cloud/aiplatform_v1/types/metadata_store.py +++ b/google/cloud/aiplatform_v1/types/metadata_store.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/migration_service.py b/google/cloud/aiplatform_v1/types/migration_service.py index f60058adda..3e75344527 100644 --- a/google/cloud/aiplatform_v1/types/migration_service.py +++ b/google/cloud/aiplatform_v1/types/migration_service.py @@ -147,12 +147,12 @@ class BatchMigrateResourcesRequest(proto.Message): proto.STRING, number=1, ) - migrate_resource_requests: MutableSequence[ - "MigrateResourceRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="MigrateResourceRequest", + migrate_resource_requests: MutableSequence["MigrateResourceRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MigrateResourceRequest", + ) ) @@ -367,12 +367,12 @@ class BatchMigrateResourcesResponse(proto.Message): Successfully migrated resources. """ - migrate_resource_responses: MutableSequence[ - "MigrateResourceResponse" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="MigrateResourceResponse", + migrate_resource_responses: MutableSequence["MigrateResourceResponse"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MigrateResourceResponse", + ) ) diff --git a/google/cloud/aiplatform_v1/types/model.py b/google/cloud/aiplatform_v1/types/model.py index 8f305375a5..3c65e62f64 100644 --- a/google/cloud/aiplatform_v1/types/model.py +++ b/google/cloud/aiplatform_v1/types/model.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import deployed_model_ref -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import explanation from google.protobuf import duration_pb2 # type: ignore @@ -349,6 +351,7 @@ class DeploymentResourcesType(proto.Enum): [DeploymentResourcePool][google.cloud.aiplatform.v1.DeploymentResourcePool] is required. """ + DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED = 0 DEDICATED_RESOURCES = 1 AUTOMATIC_RESOURCES = 2 @@ -404,6 +407,7 @@ class ExportableContent(proto.Enum): [ExportModelRequest.output_config][google.cloud.aiplatform.v1.ExportModelRequest.output_config] object. """ + EXPORTABLE_CONTENT_UNSPECIFIED = 0 ARTIFACT = 1 IMAGE = 2 @@ -412,12 +416,12 @@ class ExportableContent(proto.Enum): proto.STRING, number=1, ) - exportable_contents: MutableSequence[ - "Model.ExportFormat.ExportableContent" - ] = proto.RepeatedField( - proto.ENUM, - number=2, - enum="Model.ExportFormat.ExportableContent", + exportable_contents: MutableSequence["Model.ExportFormat.ExportableContent"] = ( + proto.RepeatedField( + proto.ENUM, + number=2, + enum="Model.ExportFormat.ExportableContent", + ) ) class DataStats(proto.Message): @@ -602,12 +606,12 @@ class BaseModelSource(proto.Message): proto.STRING, number=26, ) - supported_deployment_resources_types: MutableSequence[ - DeploymentResourcesType - ] = proto.RepeatedField( - proto.ENUM, - number=10, - enum=DeploymentResourcesType, + supported_deployment_resources_types: MutableSequence[DeploymentResourcesType] = ( + proto.RepeatedField( + proto.ENUM, + number=10, + enum=DeploymentResourcesType, + ) ) supported_input_storage_formats: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -627,12 +631,12 @@ class BaseModelSource(proto.Message): number=14, message=timestamp_pb2.Timestamp, ) - deployed_models: MutableSequence[ - deployed_model_ref.DeployedModelRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=deployed_model_ref.DeployedModelRef, + deployed_models: MutableSequence[deployed_model_ref.DeployedModelRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=15, + message=deployed_model_ref.DeployedModelRef, + ) ) explanation_spec: explanation.ExplanationSpec = proto.Field( proto.MESSAGE, @@ -1223,6 +1227,7 @@ class ModelSourceType(proto.Enum): MARKETPLACE (7): The Model is saved or tuned from Marketplace. """ + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 AUTOML = 1 CUSTOM = 2 diff --git a/google/cloud/aiplatform_v1/types/model_deployment_monitoring_job.py b/google/cloud/aiplatform_v1/types/model_deployment_monitoring_job.py index 791ab305fd..3ca2a22825 100644 --- a/google/cloud/aiplatform_v1/types/model_deployment_monitoring_job.py +++ b/google/cloud/aiplatform_v1/types/model_deployment_monitoring_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import feature_monitoring_stats from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import job_state @@ -63,6 +65,7 @@ class ModelDeploymentMonitoringObjectiveType(proto.Enum): between Prediction datasets collected within different time windows. """ + MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED = 0 RAW_FEATURE_SKEW = 1 RAW_FEATURE_DRIFT = 2 @@ -218,6 +221,7 @@ class MonitoringScheduleState(proto.Enum): RUNNING (3): The pipeline is running. """ + MONITORING_SCHEDULE_STATE_UNSPECIFIED = 0 PENDING = 1 OFFLINE = 2 @@ -280,7 +284,9 @@ class LatestMonitoringPipelineMetadata(proto.Message): number=6, message="ModelDeploymentMonitoringObjectiveConfig", ) - model_deployment_monitoring_schedule_config: "ModelDeploymentMonitoringScheduleConfig" = proto.Field( + model_deployment_monitoring_schedule_config: ( + "ModelDeploymentMonitoringScheduleConfig" + ) = proto.Field( proto.MESSAGE, number=7, message="ModelDeploymentMonitoringScheduleConfig", @@ -310,12 +316,12 @@ class LatestMonitoringPipelineMetadata(proto.Message): proto.STRING, number=16, ) - bigquery_tables: MutableSequence[ - "ModelDeploymentMonitoringBigQueryTable" - ] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="ModelDeploymentMonitoringBigQueryTable", + bigquery_tables: MutableSequence["ModelDeploymentMonitoringBigQueryTable"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=10, + message="ModelDeploymentMonitoringBigQueryTable", + ) ) log_ttl: duration_pb2.Duration = proto.Field( proto.MESSAGE, @@ -402,6 +408,7 @@ class LogSource(proto.Enum): SERVING (2): Logs coming from Serving traffic. """ + LOG_SOURCE_UNSPECIFIED = 0 TRAINING = 1 SERVING = 2 @@ -417,6 +424,7 @@ class LogType(proto.Enum): EXPLAIN (2): Explain logs. """ + LOG_TYPE_UNSPECIFIED = 0 PREDICT = 1 EXPLAIN = 2 diff --git a/google/cloud/aiplatform_v1/types/model_evaluation.py b/google/cloud/aiplatform_v1/types/model_evaluation.py index 0e50ada361..89d0fa0898 100644 --- a/google/cloud/aiplatform_v1/types/model_evaluation.py +++ b/google/cloud/aiplatform_v1/types/model_evaluation.py @@ -171,12 +171,12 @@ class ModelEvaluationExplanationSpec(proto.Message): number=8, message=explanation.ModelExplanation, ) - explanation_specs: MutableSequence[ - ModelEvaluationExplanationSpec - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=ModelEvaluationExplanationSpec, + explanation_specs: MutableSequence[ModelEvaluationExplanationSpec] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message=ModelEvaluationExplanationSpec, + ) ) metadata: struct_pb2.Value = proto.Field( proto.MESSAGE, diff --git a/google/cloud/aiplatform_v1/types/model_garden_service.py b/google/cloud/aiplatform_v1/types/model_garden_service.py index f50674f082..2e50eed228 100644 --- a/google/cloud/aiplatform_v1/types/model_garden_service.py +++ b/google/cloud/aiplatform_v1/types/model_garden_service.py @@ -52,6 +52,7 @@ class PublisherModelView(proto.Enum): Include: VersionId, ModelVersionExternalName, and SupportedActions. """ + PUBLISHER_MODEL_VIEW_UNSPECIFIED = 0 PUBLISHER_MODEL_VIEW_BASIC = 1 PUBLISHER_MODEL_VIEW_FULL = 2 diff --git a/google/cloud/aiplatform_v1/types/model_monitoring.py b/google/cloud/aiplatform_v1/types/model_monitoring.py index b3888bd712..8ded08af30 100644 --- a/google/cloud/aiplatform_v1/types/model_monitoring.py +++ b/google/cloud/aiplatform_v1/types/model_monitoring.py @@ -164,13 +164,13 @@ class TrainingPredictionSkewDetectionConfig(proto.Message): number=1, message="ThresholdConfig", ) - attribution_score_skew_thresholds: MutableMapping[ - str, "ThresholdConfig" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message="ThresholdConfig", + attribution_score_skew_thresholds: MutableMapping[str, "ThresholdConfig"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="ThresholdConfig", + ) ) default_skew_threshold: "ThresholdConfig" = proto.Field( proto.MESSAGE, @@ -207,13 +207,13 @@ class PredictionDriftDetectionConfig(proto.Message): number=1, message="ThresholdConfig", ) - attribution_score_drift_thresholds: MutableMapping[ - str, "ThresholdConfig" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message="ThresholdConfig", + attribution_score_drift_thresholds: MutableMapping[str, "ThresholdConfig"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="ThresholdConfig", + ) ) default_drift_threshold: "ThresholdConfig" = proto.Field( proto.MESSAGE, @@ -277,6 +277,7 @@ class PredictionFormat(proto.Enum): BIGQUERY (3): Predictions are in BigQuery. """ + PREDICTION_FORMAT_UNSPECIFIED = 0 JSONL = 2 BIGQUERY = 3 @@ -303,7 +304,9 @@ class PredictionFormat(proto.Enum): proto.BOOL, number=1, ) - explanation_baseline: "ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline" = proto.Field( + explanation_baseline: ( + "ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline" + ) = proto.Field( proto.MESSAGE, number=2, message="ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline", diff --git a/google/cloud/aiplatform_v1/types/model_service.py b/google/cloud/aiplatform_v1/types/model_service.py index 272741e635..92176fd9e7 100644 --- a/google/cloud/aiplatform_v1/types/model_service.py +++ b/google/cloud/aiplatform_v1/types/model_service.py @@ -19,12 +19,16 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import evaluated_annotation from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import model as gca_model -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -993,12 +997,12 @@ class BatchImportEvaluatedAnnotationsRequest(proto.Message): proto.STRING, number=1, ) - evaluated_annotations: MutableSequence[ - evaluated_annotation.EvaluatedAnnotation - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=evaluated_annotation.EvaluatedAnnotation, + evaluated_annotations: MutableSequence[evaluated_annotation.EvaluatedAnnotation] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=evaluated_annotation.EvaluatedAnnotation, + ) ) @@ -1098,12 +1102,12 @@ class ListModelEvaluationsResponse(proto.Message): def raw_page(self): return self - model_evaluations: MutableSequence[ - gca_model_evaluation.ModelEvaluation - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model_evaluation.ModelEvaluation, + model_evaluations: MutableSequence[gca_model_evaluation.ModelEvaluation] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_model_evaluation.ModelEvaluation, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/nas_job.py b/google/cloud/aiplatform_v1/types/nas_job.py index 3224f5feea..07a7a7f7e9 100644 --- a/google/cloud/aiplatform_v1/types/nas_job.py +++ b/google/cloud/aiplatform_v1/types/nas_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import custom_job -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import job_state from google.cloud.aiplatform_v1.types import study from google.protobuf import timestamp_pb2 # type: ignore @@ -263,6 +265,7 @@ class MultiTrialAlgorithm(proto.Enum): The Grid Search Algorithm for Multi-trial Neural Architecture Search (NAS). """ + MULTI_TRIAL_ALGORITHM_UNSPECIFIED = 0 REINFORCEMENT_LEARNING = 1 GRID_SEARCH = 2 @@ -290,6 +293,7 @@ class GoalType(proto.Enum): MINIMIZE (2): Minimize the goal metric. """ + GOAL_TYPE_UNSPECIFIED = 0 MAXIMIZE = 1 MINIMIZE = 2 @@ -376,7 +380,9 @@ class TrainTrialSpec(proto.Message): number=3, ) - multi_trial_algorithm: "NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm" = proto.Field( + multi_trial_algorithm: ( + "NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm" + ) = proto.Field( proto.ENUM, number=1, enum="NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm", @@ -507,6 +513,7 @@ class State(proto.Enum): The service will set a NasTrial to INFEASIBLE when it's done but missing the final_measurement. """ + STATE_UNSPECIFIED = 0 REQUESTED = 1 ACTIVE = 2 diff --git a/google/cloud/aiplatform_v1/types/notebook_execution_job.py b/google/cloud/aiplatform_v1/types/notebook_execution_job.py index fd39bd3c92..13f5e8b49d 100644 --- a/google/cloud/aiplatform_v1/types/notebook_execution_job.py +++ b/google/cloud/aiplatform_v1/types/notebook_execution_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import job_state as gca_job_state from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import network_spec as gca_network_spec diff --git a/google/cloud/aiplatform_v1/types/notebook_runtime.py b/google/cloud/aiplatform_v1/types/notebook_runtime.py index 8746c69eee..a4bb41a181 100644 --- a/google/cloud/aiplatform_v1/types/notebook_runtime.py +++ b/google/cloud/aiplatform_v1/types/notebook_runtime.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import network_spec as gca_network_spec from google.cloud.aiplatform_v1.types import notebook_euc_config @@ -55,6 +57,7 @@ class NotebookRuntimeType(proto.Enum): runtime or template with system defined configurations. """ + NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED = 0 USER_DEFINED = 1 ONE_CLICK = 2 @@ -387,6 +390,7 @@ class HealthState(proto.Enum): NotebookRuntime is in unhealthy state. Applies to ACTIVE state. """ + HEALTH_STATE_UNSPECIFIED = 0 HEALTHY = 1 UNHEALTHY = 2 @@ -419,6 +423,7 @@ class RuntimeState(proto.Enum): NotebookRuntime is in invalid state. Cannot be recovered. """ + RUNTIME_STATE_UNSPECIFIED = 0 RUNNING = 1 BEING_STARTED = 2 @@ -436,7 +441,9 @@ class RuntimeState(proto.Enum): proto.STRING, number=2, ) - notebook_runtime_template_ref: gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef = proto.Field( + notebook_runtime_template_ref: ( + gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef + ) = proto.Field( proto.MESSAGE, number=3, message=gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef, diff --git a/google/cloud/aiplatform_v1/types/notebook_service.py b/google/cloud/aiplatform_v1/types/notebook_service.py index f519335c23..99c9395d5b 100644 --- a/google/cloud/aiplatform_v1/types/notebook_service.py +++ b/google/cloud/aiplatform_v1/types/notebook_service.py @@ -22,7 +22,9 @@ from google.cloud.aiplatform_v1.types import ( notebook_execution_job as gca_notebook_execution_job, ) -from google.cloud.aiplatform_v1.types import notebook_runtime as gca_notebook_runtime +from google.cloud.aiplatform_v1.types import ( + notebook_runtime as gca_notebook_runtime, +) from google.cloud.aiplatform_v1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -76,6 +78,7 @@ class NotebookExecutionJobView(proto.Enum): NOTEBOOK_EXECUTION_JOB_VIEW_FULL (2): Includes all fields. """ + NOTEBOOK_EXECUTION_JOB_VIEW_UNSPECIFIED = 0 NOTEBOOK_EXECUTION_JOB_VIEW_BASIC = 1 NOTEBOOK_EXECUTION_JOB_VIEW_FULL = 2 @@ -527,12 +530,12 @@ class ListNotebookRuntimesResponse(proto.Message): def raw_page(self): return self - notebook_runtimes: MutableSequence[ - gca_notebook_runtime.NotebookRuntime - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_notebook_runtime.NotebookRuntime, + notebook_runtimes: MutableSequence[gca_notebook_runtime.NotebookRuntime] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_notebook_runtime.NotebookRuntime, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/notebook_software_config.py b/google/cloud/aiplatform_v1/types/notebook_software_config.py index a04457cbba..84d0f528e1 100644 --- a/google/cloud/aiplatform_v1/types/notebook_software_config.py +++ b/google/cloud/aiplatform_v1/types/notebook_software_config.py @@ -63,6 +63,7 @@ class PostStartupScriptBehavior(proto.Enum): Download and run post startup script every time runtime is started. """ + POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED = 0 RUN_ONCE = 1 RUN_EVERY_START = 2 diff --git a/google/cloud/aiplatform_v1/types/openapi.py b/google/cloud/aiplatform_v1/types/openapi.py index e0ec494dd0..4ea820593b 100644 --- a/google/cloud/aiplatform_v1/types/openapi.py +++ b/google/cloud/aiplatform_v1/types/openapi.py @@ -51,6 +51,7 @@ class Type(proto.Enum): OBJECT (6): OpenAPI object type """ + TYPE_UNSPECIFIED = 0 STRING = 1 NUMBER = 2 diff --git a/google/cloud/aiplatform_v1/types/persistent_resource.py b/google/cloud/aiplatform_v1/types/persistent_resource.py index 91173097e3..f456405a19 100644 --- a/google/cloud/aiplatform_v1/types/persistent_resource.py +++ b/google/cloud/aiplatform_v1/types/persistent_resource.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -155,6 +157,7 @@ class State(proto.Enum): The UPDATING state indicates the persistent resource is being updated. """ + STATE_UNSPECIFIED = 0 PROVISIONING = 1 RUNNING = 3 diff --git a/google/cloud/aiplatform_v1/types/pipeline_failure_policy.py b/google/cloud/aiplatform_v1/types/pipeline_failure_policy.py index d9549d6248..f87c4b0e27 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_failure_policy.py +++ b/google/cloud/aiplatform_v1/types/pipeline_failure_policy.py @@ -49,6 +49,7 @@ class PipelineFailurePolicy(proto.Enum): Indicates that the pipeline should stop scheduling new tasks after a task has failed. """ + PIPELINE_FAILURE_POLICY_UNSPECIFIED = 0 PIPELINE_FAILURE_POLICY_FAIL_SLOW = 1 PIPELINE_FAILURE_POLICY_FAIL_FAST = 2 diff --git a/google/cloud/aiplatform_v1/types/pipeline_job.py b/google/cloud/aiplatform_v1/types/pipeline_job.py index 00b573f669..3c539888e2 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_job.py +++ b/google/cloud/aiplatform_v1/types/pipeline_job.py @@ -21,7 +21,9 @@ from google.cloud.aiplatform_v1.types import artifact from google.cloud.aiplatform_v1.types import context -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import pipeline_failure_policy from google.cloud.aiplatform_v1.types import pipeline_state @@ -489,6 +491,7 @@ class State(proto.Enum): specified in the ``condition`` field of [PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec]. """ + STATE_UNSPECIFIED = 0 PENDING = 1 RUNNING = 2 diff --git a/google/cloud/aiplatform_v1/types/pipeline_service.py b/google/cloud/aiplatform_v1/types/pipeline_service.py index 1d469b63bc..4281bc0b4b 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1/types/pipeline_service.py @@ -21,7 +21,9 @@ from google.cloud.aiplatform_v1.types import operation from google.cloud.aiplatform_v1.types import pipeline_job as gca_pipeline_job -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -191,12 +193,12 @@ class ListTrainingPipelinesResponse(proto.Message): def raw_page(self): return self - training_pipelines: MutableSequence[ - gca_training_pipeline.TrainingPipeline - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_training_pipeline.TrainingPipeline, + training_pipelines: MutableSequence[gca_training_pipeline.TrainingPipeline] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_training_pipeline.TrainingPipeline, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/pipeline_state.py b/google/cloud/aiplatform_v1/types/pipeline_state.py index 2128f561a2..bca4dc71d6 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_state.py +++ b/google/cloud/aiplatform_v1/types/pipeline_state.py @@ -55,6 +55,7 @@ class PipelineState(proto.Enum): The pipeline has been stopped, and can be resumed. """ + PIPELINE_STATE_UNSPECIFIED = 0 PIPELINE_STATE_QUEUED = 1 PIPELINE_STATE_PENDING = 2 diff --git a/google/cloud/aiplatform_v1/types/prediction_service.py b/google/cloud/aiplatform_v1/types/prediction_service.py index c5edc6a775..21035103fe 100644 --- a/google/cloud/aiplatform_v1/types/prediction_service.py +++ b/google/cloud/aiplatform_v1/types/prediction_service.py @@ -777,12 +777,12 @@ class CountTokensResponse(proto.Message): proto.INT32, number=2, ) - prompt_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=content.ModalityTokenCount, + prompt_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=content.ModalityTokenCount, + ) ) @@ -960,6 +960,7 @@ class BlockedReason(proto.Enum): MODEL_ARMOR (5): The user prompt was blocked by Model Armor. """ + BLOCKED_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 @@ -1034,26 +1035,26 @@ class UsageMetadata(proto.Message): proto.INT32, number=5, ) - prompt_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=content.ModalityTokenCount, + prompt_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message=content.ModalityTokenCount, + ) ) - cache_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=content.ModalityTokenCount, + cache_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=10, + message=content.ModalityTokenCount, + ) ) - candidates_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=content.ModalityTokenCount, + candidates_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=11, + message=content.ModalityTokenCount, + ) ) candidates: MutableSequence[content.Candidate] = proto.RepeatedField( diff --git a/google/cloud/aiplatform_v1/types/publisher_model.py b/google/cloud/aiplatform_v1/types/publisher_model.py index 4fa0a98b0b..64f6b8622d 100644 --- a/google/cloud/aiplatform_v1/types/publisher_model.py +++ b/google/cloud/aiplatform_v1/types/publisher_model.py @@ -95,6 +95,7 @@ class OpenSourceCategory(proto.Enum): Used to indicate the PublisherModel is a 3p-owned pure open source model. """ + OPEN_SOURCE_CATEGORY_UNSPECIFIED = 0 PROPRIETARY = 1 GOOGLE_OWNED_OSS_WITH_GOOGLE_CHECKPOINT = 2 @@ -129,6 +130,7 @@ class LaunchStage(proto.Enum): launch stage, available to all customers and ready for production workload. """ + LAUNCH_STAGE_UNSPECIFIED = 0 EXPERIMENTAL = 1 PRIVATE_PREVIEW = 2 @@ -146,6 +148,7 @@ class VersionState(proto.Enum): VERSION_STATE_UNSTABLE (2): Used to indicate the version is unstable. """ + VERSION_STATE_UNSPECIFIED = 0 VERSION_STATE_STABLE = 1 VERSION_STATE_UNSTABLE = 2 @@ -296,13 +299,13 @@ class RegionalResourceReferences(proto.Message): This field is a member of `oneof`_ ``_resource_description``. """ - references: MutableMapping[ - str, "PublisherModel.ResourceReference" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="PublisherModel.ResourceReference", + references: MutableMapping[str, "PublisherModel.ResourceReference"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="PublisherModel.ResourceReference", + ) ) title: str = proto.Field( proto.STRING, @@ -334,12 +337,12 @@ class ViewRestApi(proto.Message): Required. The title of the view rest API. """ - documentations: MutableSequence[ - "PublisherModel.Documentation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="PublisherModel.Documentation", + documentations: MutableSequence["PublisherModel.Documentation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PublisherModel.Documentation", + ) ) title: str = proto.Field( proto.STRING, @@ -561,18 +564,24 @@ class DeployGke(proto.Message): message="PublisherModel.CallToAction.RegionalResourceReferences", ) ) - open_fine_tuning_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_fine_tuning_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=4, message="PublisherModel.CallToAction.RegionalResourceReferences", ) - open_fine_tuning_pipelines: "PublisherModel.CallToAction.OpenFineTuningPipelines" = proto.Field( + open_fine_tuning_pipelines: ( + "PublisherModel.CallToAction.OpenFineTuningPipelines" + ) = proto.Field( proto.MESSAGE, number=13, optional=True, message="PublisherModel.CallToAction.OpenFineTuningPipelines", ) - open_prompt_tuning_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_prompt_tuning_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=5, message="PublisherModel.CallToAction.RegionalResourceReferences", @@ -594,7 +603,9 @@ class DeployGke(proto.Message): number=14, message="PublisherModel.CallToAction.DeployGke", ) - open_generation_ai_studio: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_generation_ai_studio: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=8, message="PublisherModel.CallToAction.RegionalResourceReferences", @@ -606,7 +617,9 @@ class DeployGke(proto.Message): message="PublisherModel.CallToAction.RegionalResourceReferences", ) ) - open_evaluation_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_evaluation_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=11, message="PublisherModel.CallToAction.RegionalResourceReferences", diff --git a/google/cloud/aiplatform_v1/types/reasoning_engine.py b/google/cloud/aiplatform_v1/types/reasoning_engine.py index d9ceb165c6..b8c8de23f9 100644 --- a/google/cloud/aiplatform_v1/types/reasoning_engine.py +++ b/google/cloud/aiplatform_v1/types/reasoning_engine.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import service_networking from google.protobuf import struct_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/reasoning_engine_service.py b/google/cloud/aiplatform_v1/types/reasoning_engine_service.py index 65bad48875..03cd623b0f 100644 --- a/google/cloud/aiplatform_v1/types/reasoning_engine_service.py +++ b/google/cloud/aiplatform_v1/types/reasoning_engine_service.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import operation -from google.cloud.aiplatform_v1.types import reasoning_engine as gca_reasoning_engine +from google.cloud.aiplatform_v1.types import ( + reasoning_engine as gca_reasoning_engine, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -192,12 +194,12 @@ class ListReasoningEnginesResponse(proto.Message): def raw_page(self): return self - reasoning_engines: MutableSequence[ - gca_reasoning_engine.ReasoningEngine - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_reasoning_engine.ReasoningEngine, + reasoning_engines: MutableSequence[gca_reasoning_engine.ReasoningEngine] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_reasoning_engine.ReasoningEngine, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/reservation_affinity.py b/google/cloud/aiplatform_v1/types/reservation_affinity.py index 32407ead58..cf20d7aa76 100644 --- a/google/cloud/aiplatform_v1/types/reservation_affinity.py +++ b/google/cloud/aiplatform_v1/types/reservation_affinity.py @@ -66,6 +66,7 @@ class Type(proto.Enum): reservation must be identified via the ``key`` and ``values`` fields. """ + TYPE_UNSPECIFIED = 0 NO_RESERVATION = 1 ANY_RESERVATION = 2 diff --git a/google/cloud/aiplatform_v1/types/schedule.py b/google/cloud/aiplatform_v1/types/schedule.py index aa25f565f9..cdb61246d4 100644 --- a/google/cloud/aiplatform_v1/types/schedule.py +++ b/google/cloud/aiplatform_v1/types/schedule.py @@ -157,6 +157,7 @@ class State(proto.Enum): allowed to complete. Schedules in completed state cannot be paused or resumed. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 PAUSED = 2 @@ -196,7 +197,9 @@ class RunResponse(proto.Message): message=pipeline_service.CreatePipelineJobRequest, ) ) - create_notebook_execution_job_request: notebook_service.CreateNotebookExecutionJobRequest = proto.Field( + create_notebook_execution_job_request: ( + notebook_service.CreateNotebookExecutionJobRequest + ) = proto.Field( proto.MESSAGE, number=20, oneof="request", diff --git a/google/cloud/aiplatform_v1/types/service_networking.py b/google/cloud/aiplatform_v1/types/service_networking.py index 5d1c3d4ffa..8e7893f4cf 100644 --- a/google/cloud/aiplatform_v1/types/service_networking.py +++ b/google/cloud/aiplatform_v1/types/service_networking.py @@ -44,6 +44,7 @@ class PSCAutomationState(proto.Enum): PSC_AUTOMATION_STATE_FAILED (2): The PSC service automation has failed. """ + PSC_AUTOMATION_STATE_UNSPECIFIED = 0 PSC_AUTOMATION_STATE_SUCCESSFUL = 1 PSC_AUTOMATION_STATE_FAILED = 2 @@ -132,12 +133,12 @@ class PrivateServiceConnectConfig(proto.Message): proto.STRING, number=2, ) - psc_automation_configs: MutableSequence[ - "PSCAutomationConfig" - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message="PSCAutomationConfig", + psc_automation_configs: MutableSequence["PSCAutomationConfig"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="PSCAutomationConfig", + ) ) service_attachment: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/specialist_pool_service.py b/google/cloud/aiplatform_v1/types/specialist_pool_service.py index aea9960511..b3dbcf6b76 100644 --- a/google/cloud/aiplatform_v1/types/specialist_pool_service.py +++ b/google/cloud/aiplatform_v1/types/specialist_pool_service.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import operation -from google.cloud.aiplatform_v1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1.types import ( + specialist_pool as gca_specialist_pool, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -152,12 +154,12 @@ class ListSpecialistPoolsResponse(proto.Message): def raw_page(self): return self - specialist_pools: MutableSequence[ - gca_specialist_pool.SpecialistPool - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + specialist_pools: MutableSequence[gca_specialist_pool.SpecialistPool] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_specialist_pool.SpecialistPool, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/study.py b/google/cloud/aiplatform_v1/types/study.py index 8a118f84dd..dac41d5f1f 100644 --- a/google/cloud/aiplatform_v1/types/study.py +++ b/google/cloud/aiplatform_v1/types/study.py @@ -77,6 +77,7 @@ class State(proto.Enum): The study is done when the service exhausts the parameter search space or max_trial_count is reached. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 INACTIVE = 2 @@ -201,6 +202,7 @@ class State(proto.Enum): service will set a Trial to INFEASIBLE when it's done but missing the final_measurement. """ + STATE_UNSPECIFIED = 0 REQUESTED = 1 ACTIVE = 2 @@ -425,6 +427,7 @@ class Algorithm(proto.Enum): Simple random search within the feasible space. """ + ALGORITHM_UNSPECIFIED = 0 GRID_SEARCH = 2 RANDOM_SEARCH = 3 @@ -447,6 +450,7 @@ class ObservationNoise(proto.Enum): in metric evaluations, it may repeat the same Trial parameters more than once. """ + OBSERVATION_NOISE_UNSPECIFIED = 0 LOW = 1 HIGH = 2 @@ -473,6 +477,7 @@ class MeasurementSelectionType(proto.Enum): BEST_MEASUREMENT (2): Use the best measurement reported. """ + MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0 LAST_MEASUREMENT = 1 BEST_MEASUREMENT = 2 @@ -509,6 +514,7 @@ class GoalType(proto.Enum): MINIMIZE (2): Minimize the goal metric. """ + GOAL_TYPE_UNSPECIFIED = 0 MAXIMIZE = 1 MINIMIZE = 2 @@ -628,6 +634,7 @@ class ScaleType(proto.Enum): The entire feasible space must be strictly positive. """ + SCALE_TYPE_UNSPECIFIED = 0 UNIT_LINEAR_SCALE = 1 UNIT_LOG_SCALE = 2 @@ -863,7 +870,9 @@ class CategoricalValueCondition(proto.Message): oneof="parent_value_condition", message="StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition", ) - parent_int_values: "StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition" = proto.Field( + parent_int_values: ( + "StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition" + ) = proto.Field( proto.MESSAGE, number=3, oneof="parent_value_condition", diff --git a/google/cloud/aiplatform_v1/types/tensorboard.py b/google/cloud/aiplatform_v1/types/tensorboard.py index 23dad29b79..692e17eb10 100644 --- a/google/cloud/aiplatform_v1/types/tensorboard.py +++ b/google/cloud/aiplatform_v1/types/tensorboard.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1/types/tensorboard_service.py b/google/cloud/aiplatform_v1/types/tensorboard_service.py index 24129c968c..a34aec1070 100644 --- a/google/cloud/aiplatform_v1/types/tensorboard_service.py +++ b/google/cloud/aiplatform_v1/types/tensorboard_service.py @@ -25,7 +25,9 @@ from google.cloud.aiplatform_v1.types import ( tensorboard_experiment as gca_tensorboard_experiment, ) -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import ( tensorboard_time_series as gca_tensorboard_time_series, ) @@ -593,12 +595,12 @@ class BatchCreateTensorboardRunsResponse(proto.Message): The created TensorboardRuns. """ - tensorboard_runs: MutableSequence[ - gca_tensorboard_run.TensorboardRun - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_tensorboard_run.TensorboardRun, + tensorboard_runs: MutableSequence[gca_tensorboard_run.TensorboardRun] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_tensorboard_run.TensorboardRun, + ) ) @@ -769,12 +771,12 @@ class ListTensorboardRunsResponse(proto.Message): def raw_page(self): return self - tensorboard_runs: MutableSequence[ - gca_tensorboard_run.TensorboardRun - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_tensorboard_run.TensorboardRun, + tensorboard_runs: MutableSequence[gca_tensorboard_run.TensorboardRun] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_tensorboard_run.TensorboardRun, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -852,12 +854,12 @@ class BatchCreateTensorboardTimeSeriesRequest(proto.Message): proto.STRING, number=1, ) - requests: MutableSequence[ - "CreateTensorboardTimeSeriesRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="CreateTensorboardTimeSeriesRequest", + requests: MutableSequence["CreateTensorboardTimeSeriesRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateTensorboardTimeSeriesRequest", + ) ) @@ -1111,12 +1113,12 @@ class BatchReadTensorboardTimeSeriesDataResponse(proto.Message): The returned time series data. """ - time_series_data: MutableSequence[ - tensorboard_data.TimeSeriesData - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=tensorboard_data.TimeSeriesData, + time_series_data: MutableSequence[tensorboard_data.TimeSeriesData] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=tensorboard_data.TimeSeriesData, + ) ) @@ -1187,12 +1189,12 @@ class WriteTensorboardExperimentDataRequest(proto.Message): proto.STRING, number=1, ) - write_run_data_requests: MutableSequence[ - "WriteTensorboardRunDataRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="WriteTensorboardRunDataRequest", + write_run_data_requests: MutableSequence["WriteTensorboardRunDataRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="WriteTensorboardRunDataRequest", + ) ) @@ -1226,12 +1228,12 @@ class WriteTensorboardRunDataRequest(proto.Message): proto.STRING, number=1, ) - time_series_data: MutableSequence[ - tensorboard_data.TimeSeriesData - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=tensorboard_data.TimeSeriesData, + time_series_data: MutableSequence[tensorboard_data.TimeSeriesData] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=tensorboard_data.TimeSeriesData, + ) ) @@ -1313,12 +1315,12 @@ class ExportTensorboardTimeSeriesDataResponse(proto.Message): def raw_page(self): return self - time_series_data_points: MutableSequence[ - tensorboard_data.TimeSeriesDataPoint - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=tensorboard_data.TimeSeriesDataPoint, + time_series_data_points: MutableSequence[tensorboard_data.TimeSeriesDataPoint] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=tensorboard_data.TimeSeriesDataPoint, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1/types/tensorboard_time_series.py b/google/cloud/aiplatform_v1/types/tensorboard_time_series.py index e100606942..834269dfea 100644 --- a/google/cloud/aiplatform_v1/types/tensorboard_time_series.py +++ b/google/cloud/aiplatform_v1/types/tensorboard_time_series.py @@ -90,6 +90,7 @@ class ValueType(proto.Enum): of blob sequences. E.g. set of sample images with labels over epochs/time. """ + VALUE_TYPE_UNSPECIFIED = 0 SCALAR = 1 TENSOR = 2 diff --git a/google/cloud/aiplatform_v1/types/tool.py b/google/cloud/aiplatform_v1/types/tool.py index a52c623230..9dbb7e1891 100644 --- a/google/cloud/aiplatform_v1/types/tool.py +++ b/google/cloud/aiplatform_v1/types/tool.py @@ -149,6 +149,7 @@ class Environment(proto.Enum): ENVIRONMENT_BROWSER (1): Operates in a web browser. """ + ENVIRONMENT_UNSPECIFIED = 0 ENVIRONMENT_BROWSER = 1 @@ -396,6 +397,7 @@ class Language(proto.Enum): Python >= 3.10, with numpy and simpy available. """ + LANGUAGE_UNSPECIFIED = 0 PYTHON = 1 @@ -441,6 +443,7 @@ class Outcome(proto.Enum): cancelled. There may or may not be a partial output present. """ + OUTCOME_UNSPECIFIED = 0 OUTCOME_OK = 1 OUTCOME_FAILED = 2 @@ -720,6 +723,7 @@ class Mode(proto.Enum): Run retrieval only when system decides it is necessary. """ + MODE_UNSPECIFIED = 0 MODE_DYNAMIC = 1 @@ -793,6 +797,7 @@ class Mode(proto.Enum): Model behavior is same as when not passing any function declarations. """ + MODE_UNSPECIFIED = 0 AUTO = 1 ANY = 2 diff --git a/google/cloud/aiplatform_v1/types/training_pipeline.py b/google/cloud/aiplatform_v1/types/training_pipeline.py index a15975a8fa..6b3a4d4d66 100644 --- a/google/cloud/aiplatform_v1/types/training_pipeline.py +++ b/google/cloud/aiplatform_v1/types/training_pipeline.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import pipeline_state diff --git a/google/cloud/aiplatform_v1/types/tuning_job.py b/google/cloud/aiplatform_v1/types/tuning_job.py index 0ae93dc179..33f6545b04 100644 --- a/google/cloud/aiplatform_v1/types/tuning_job.py +++ b/google/cloud/aiplatform_v1/types/tuning_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import content -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import job_state from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -501,6 +503,7 @@ class AdapterSize(proto.Enum): ADAPTER_SIZE_THIRTY_TWO (5): Adapter size 32. """ + ADAPTER_SIZE_UNSPECIFIED = 0 ADAPTER_SIZE_ONE = 1 ADAPTER_SIZE_TWO = 6 diff --git a/google/cloud/aiplatform_v1/types/types.py b/google/cloud/aiplatform_v1/types/types.py index bf4feefc20..b40005875f 100644 --- a/google/cloud/aiplatform_v1/types/types.py +++ b/google/cloud/aiplatform_v1/types/types.py @@ -164,6 +164,7 @@ class DataType(proto.Enum): UINT64 (12): No description available. """ + DATA_TYPE_UNSPECIFIED = 0 BOOL = 1 STRING = 2 diff --git a/google/cloud/aiplatform_v1/types/vertex_rag_data.py b/google/cloud/aiplatform_v1/types/vertex_rag_data.py index 91b3e505cf..334a5b5425 100644 --- a/google/cloud/aiplatform_v1/types/vertex_rag_data.py +++ b/google/cloud/aiplatform_v1/types/vertex_rag_data.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1.types import api_auth as gca_api_auth -from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1.types import io from google.protobuf import timestamp_pb2 # type: ignore @@ -303,6 +305,7 @@ class State(proto.Enum): RagFile resource is in a problematic state. See ``error_message`` field for details. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 ERROR = 2 @@ -362,6 +365,7 @@ class State(proto.Enum): RagCorpus is in a problematic situation. See ``error_message`` field for details. """ + UNKNOWN = 0 INITIALIZED = 1 ACTIVE = 2 diff --git a/google/cloud/aiplatform_v1beta1/gapic_version.py b/google/cloud/aiplatform_v1beta1/gapic_version.py index 43aebe06ae..cd4257b1a2 100644 --- a/google/cloud/aiplatform_v1beta1/gapic_version.py +++ b/google/cloud/aiplatform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.111.0" # {x-release-please-version} +__version__ = "1.112.0" # {x-release-please-version} diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index faae822b32..a0c1fbf5ab 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import saved_query @@ -314,21 +316,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.DatasetServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", + "credentialsType": None, + } + ), ) async def create_dataset( diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index 0e42f1e548..d38da9faa6 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -71,7 +71,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import saved_query @@ -941,21 +943,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.DatasetServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.DatasetService", + "credentialsType": None, + } + ), ) def create_dataset( diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py index 90e3146f5a..8890fe7ce7 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py @@ -33,7 +33,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -552,13 +554,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py index 22aec8c968..75677d0c83 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py @@ -36,7 +36,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py index c3678e1c73..de272406e5 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py @@ -39,7 +39,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest.py index cf47bc5bec..b8d42d3001 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest.py @@ -42,7 +42,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore @@ -7224,7 +7226,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7366,7 +7367,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7509,7 +7509,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7657,7 +7656,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7805,7 +7803,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7954,7 +7951,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8070,7 +8066,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8186,7 +8181,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8328,7 +8322,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8470,7 +8463,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_asyncio.py index 6b26605e47..44cbd37757 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_asyncio.py @@ -55,7 +55,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore @@ -1617,9 +1619,9 @@ def __init__( self._interceptor = interceptor or AsyncDatasetServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -7545,7 +7547,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7697,7 +7698,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7850,7 +7850,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -8008,7 +8007,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -8167,7 +8165,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -8323,7 +8320,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8448,7 +8444,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8572,7 +8567,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8725,7 +8719,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8877,7 +8870,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_base.py index 023b496b99..ca1aafa227 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/rest_base.py @@ -32,7 +32,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py index 27216ee9b2..40a059ff6c 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/async_client.py @@ -54,7 +54,9 @@ from google.cloud.aiplatform_v1beta1.types import ( deployment_resource_pool as gca_deployment_resource_pool, ) -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import endpoint from google.cloud.aiplatform_v1beta1.types import machine_resources @@ -319,21 +321,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "credentialsType": None, + } + ), ) async def create_deployment_resource_pool( diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py index f1e6ebda52..374967bb7b 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/client.py @@ -70,7 +70,9 @@ from google.cloud.aiplatform_v1beta1.types import ( deployment_resource_pool as gca_deployment_resource_pool, ) -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import endpoint from google.cloud.aiplatform_v1beta1.types import machine_resources @@ -108,14 +110,14 @@ class DeploymentResourcePoolServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[DeploymentResourcePoolServiceTransport]] _transport_registry["grpc"] = DeploymentResourcePoolServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = DeploymentResourcePoolServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + DeploymentResourcePoolServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = DeploymentResourcePoolServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncDeploymentResourcePoolServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncDeploymentResourcePoolServiceRestTransport + ) def get_transport_class( cls, @@ -837,21 +839,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.DeploymentResourcePoolServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService", + "credentialsType": None, + } + ), ) def create_deployment_resource_pool( diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py index 12f860b499..c65c306d13 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/pagers.py @@ -38,7 +38,9 @@ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.aiplatform_v1beta1.types import endpoint diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py index 324e745f88..dad07c12b9 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/__init__.py @@ -45,9 +45,9 @@ _transport_registry["grpc_asyncio"] = DeploymentResourcePoolServiceGrpcAsyncIOTransport _transport_registry["rest"] = DeploymentResourcePoolServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncDeploymentResourcePoolServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncDeploymentResourcePoolServiceRestTransport + ) __all__ = ( "DeploymentResourcePoolServiceTransport", diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py index 7795e25a48..3c3536fb1b 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -324,13 +326,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py index 97a405f08f..2d7de93c0c 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -368,12 +370,12 @@ def create_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_deployment_resource_pool" not in self._stubs: - self._stubs[ - "create_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_deployment_resource_pool"] @@ -399,12 +401,12 @@ def get_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_deployment_resource_pool" not in self._stubs: - self._stubs[ - "get_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, - response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + self._stubs["get_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) ) return self._stubs["get_deployment_resource_pool"] @@ -430,12 +432,12 @@ def list_deployment_resource_pools( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_deployment_resource_pools" not in self._stubs: - self._stubs[ - "list_deployment_resource_pools" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", - request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, - response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + self._stubs["list_deployment_resource_pools"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) ) return self._stubs["list_deployment_resource_pools"] @@ -462,12 +464,12 @@ def update_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_deployment_resource_pool" not in self._stubs: - self._stubs[ - "update_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_deployment_resource_pool"] @@ -494,12 +496,12 @@ def delete_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_deployment_resource_pool" not in self._stubs: - self._stubs[ - "delete_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_deployment_resource_pool"] diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py index 76ea8fe7dd..1ff5718ba9 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -376,12 +378,12 @@ def create_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_deployment_resource_pool" not in self._stubs: - self._stubs[ - "create_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/CreateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.CreateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_deployment_resource_pool"] @@ -407,12 +409,12 @@ def get_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_deployment_resource_pool" not in self._stubs: - self._stubs[ - "get_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, - response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + self._stubs["get_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/GetDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.GetDeploymentResourcePoolRequest.serialize, + response_deserializer=deployment_resource_pool.DeploymentResourcePool.deserialize, + ) ) return self._stubs["get_deployment_resource_pool"] @@ -438,12 +440,12 @@ def list_deployment_resource_pools( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_deployment_resource_pools" not in self._stubs: - self._stubs[ - "list_deployment_resource_pools" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", - request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, - response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + self._stubs["list_deployment_resource_pools"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/ListDeploymentResourcePools", + request_serializer=deployment_resource_pool_service.ListDeploymentResourcePoolsRequest.serialize, + response_deserializer=deployment_resource_pool_service.ListDeploymentResourcePoolsResponse.deserialize, + ) ) return self._stubs["list_deployment_resource_pools"] @@ -470,12 +472,12 @@ def update_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_deployment_resource_pool" not in self._stubs: - self._stubs[ - "update_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/UpdateDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.UpdateDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_deployment_resource_pool"] @@ -502,12 +504,12 @@ def delete_deployment_resource_pool( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_deployment_resource_pool" not in self._stubs: - self._stubs[ - "delete_deployment_resource_pool" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", - request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_deployment_resource_pool"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.DeploymentResourcePoolService/DeleteDeploymentResourcePool", + request_serializer=deployment_resource_pool_service.DeleteDeploymentResourcePoolRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_deployment_resource_pool"] diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest.py index 9dfd1df024..a72be893e6 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -4013,7 +4015,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4158,7 +4159,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4304,7 +4304,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4455,7 +4454,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4606,7 +4604,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4756,7 +4753,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4873,7 +4869,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4990,7 +4985,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5135,7 +5129,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5278,7 +5271,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_asyncio.py index 80db1c38f9..dd802c9e41 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -771,9 +773,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4148,7 +4150,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4299,7 +4300,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4451,7 +4451,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4608,7 +4607,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4767,7 +4765,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4925,7 +4922,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5050,7 +5046,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5173,7 +5168,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5326,7 +5320,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5477,7 +5470,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_base.py index edaa8bc9f9..c355cb531f 100644 --- a/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/deployment_resource_pool_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py index 41930dc351..fc22524179 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py @@ -304,21 +304,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.EndpointServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", + "credentialsType": None, + } + ), ) async def create_endpoint( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py index f423d58f8f..4e707f7edc 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py @@ -854,21 +854,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.EndpointServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.EndpointService", + "credentialsType": None, + } + ), ) def create_endpoint( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py index 24c268a509..6e266b3e5c 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py @@ -387,13 +387,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py index 6c93f218c9..ce7415abd2 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py @@ -471,12 +471,12 @@ def update_endpoint_long_running( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_endpoint_long_running" not in self._stubs: - self._stubs[ - "update_endpoint_long_running" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/UpdateEndpointLongRunning", - request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_endpoint_long_running"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/UpdateEndpointLongRunning", + request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_endpoint_long_running"] @@ -616,12 +616,12 @@ def set_publisher_model_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_publisher_model_config" not in self._stubs: - self._stubs[ - "set_publisher_model_config" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/SetPublisherModelConfig", - request_serializer=endpoint_service.SetPublisherModelConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["set_publisher_model_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/SetPublisherModelConfig", + request_serializer=endpoint_service.SetPublisherModelConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["set_publisher_model_config"] @@ -647,12 +647,12 @@ def fetch_publisher_model_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "fetch_publisher_model_config" not in self._stubs: - self._stubs[ - "fetch_publisher_model_config" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/FetchPublisherModelConfig", - request_serializer=endpoint_service.FetchPublisherModelConfigRequest.serialize, - response_deserializer=endpoint.PublisherModelConfig.deserialize, + self._stubs["fetch_publisher_model_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/FetchPublisherModelConfig", + request_serializer=endpoint_service.FetchPublisherModelConfigRequest.serialize, + response_deserializer=endpoint.PublisherModelConfig.deserialize, + ) ) return self._stubs["fetch_publisher_model_config"] diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py index 23e79e9bcf..6ef39a199d 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py @@ -485,12 +485,12 @@ def update_endpoint_long_running( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_endpoint_long_running" not in self._stubs: - self._stubs[ - "update_endpoint_long_running" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/UpdateEndpointLongRunning", - request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_endpoint_long_running"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/UpdateEndpointLongRunning", + request_serializer=endpoint_service.UpdateEndpointLongRunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_endpoint_long_running"] @@ -638,12 +638,12 @@ def set_publisher_model_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_publisher_model_config" not in self._stubs: - self._stubs[ - "set_publisher_model_config" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/SetPublisherModelConfig", - request_serializer=endpoint_service.SetPublisherModelConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["set_publisher_model_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/SetPublisherModelConfig", + request_serializer=endpoint_service.SetPublisherModelConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["set_publisher_model_config"] @@ -669,12 +669,12 @@ def fetch_publisher_model_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "fetch_publisher_model_config" not in self._stubs: - self._stubs[ - "fetch_publisher_model_config" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.EndpointService/FetchPublisherModelConfig", - request_serializer=endpoint_service.FetchPublisherModelConfigRequest.serialize, - response_deserializer=endpoint.PublisherModelConfig.deserialize, + self._stubs["fetch_publisher_model_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.EndpointService/FetchPublisherModelConfig", + request_serializer=endpoint_service.FetchPublisherModelConfigRequest.serialize, + response_deserializer=endpoint.PublisherModelConfig.deserialize, + ) ) return self._stubs["fetch_publisher_model_config"] diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest.py index 36cceb1e0a..a811a58d31 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest.py @@ -5036,7 +5036,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5178,7 +5177,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5321,7 +5319,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5469,7 +5466,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5618,7 +5614,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5767,7 +5762,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5883,7 +5877,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5999,7 +5992,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6141,7 +6133,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6283,7 +6274,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest_asyncio.py index 3acb6f155f..ccc9a98464 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/rest_asyncio.py @@ -1036,9 +1036,9 @@ def __init__( self._interceptor = interceptor or AsyncEndpointServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5248,7 +5248,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5401,7 +5400,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5555,7 +5553,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5714,7 +5711,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5873,7 +5869,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6029,7 +6024,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6154,7 +6148,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6279,7 +6272,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6432,7 +6424,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6585,7 +6576,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/evaluation_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/evaluation_service/async_client.py index ee44baeac7..67ba2db8fb 100644 --- a/google/cloud/aiplatform_v1beta1/services/evaluation_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/evaluation_service/async_client.py @@ -277,21 +277,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.EvaluationServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", + "credentialsType": None, + } + ), ) async def evaluate_instances( diff --git a/google/cloud/aiplatform_v1beta1/services/evaluation_service/client.py b/google/cloud/aiplatform_v1beta1/services/evaluation_service/client.py index 83e6284123..2943486a81 100644 --- a/google/cloud/aiplatform_v1beta1/services/evaluation_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/evaluation_service/client.py @@ -719,21 +719,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.EvaluationServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.EvaluationService", + "credentialsType": None, + } + ), ) def evaluate_instances( diff --git a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/base.py index e825cef335..68aa7ef982 100644 --- a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/base.py @@ -259,13 +259,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest.py index 2c85388c45..e44453f13f 100644 --- a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest.py @@ -3080,7 +3080,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3223,7 +3222,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3366,7 +3364,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3514,7 +3511,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3663,7 +3659,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3813,7 +3808,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3930,7 +3924,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4046,7 +4039,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4189,7 +4181,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4332,7 +4323,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest_asyncio.py index 5bf8fe8495..8526e28db6 100644 --- a/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/evaluation_service/transports/rest_asyncio.py @@ -531,9 +531,9 @@ def __init__( self._interceptor = interceptor or AsyncEvaluationServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3158,7 +3158,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3311,7 +3310,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3465,7 +3463,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3624,7 +3621,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3783,7 +3779,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3939,7 +3934,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4062,7 +4056,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4185,7 +4178,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4338,7 +4330,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4491,7 +4482,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/example_store_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/example_store_service/async_client.py index 3124b0b2ba..9d6a022e73 100644 --- a/google/cloud/aiplatform_v1beta1/services/example_store_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/example_store_service/async_client.py @@ -47,9 +47,13 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.example_store_service import pagers +from google.cloud.aiplatform_v1beta1.services.example_store_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import example_store -from google.cloud.aiplatform_v1beta1.types import example_store as gca_example_store +from google.cloud.aiplatform_v1beta1.types import ( + example_store as gca_example_store, +) from google.cloud.aiplatform_v1beta1.types import example_store_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -287,21 +291,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExampleStoreServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", + "credentialsType": None, + } + ), ) async def create_example_store( diff --git a/google/cloud/aiplatform_v1beta1/services/example_store_service/client.py b/google/cloud/aiplatform_v1beta1/services/example_store_service/client.py index 8319b0983d..ef1c607bbc 100644 --- a/google/cloud/aiplatform_v1beta1/services/example_store_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/example_store_service/client.py @@ -63,9 +63,13 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.example_store_service import pagers +from google.cloud.aiplatform_v1beta1.services.example_store_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import example_store -from google.cloud.aiplatform_v1beta1.types import example_store as gca_example_store +from google.cloud.aiplatform_v1beta1.types import ( + example_store as gca_example_store, +) from google.cloud.aiplatform_v1beta1.types import example_store_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -747,21 +751,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExampleStoreServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExampleStoreService", + "credentialsType": None, + } + ), ) def create_example_store( diff --git a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/base.py index bd5278070f..2087c4c367 100644 --- a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/base.py @@ -370,13 +370,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest.py index bdc9751c08..c49167016a 100644 --- a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest.py @@ -4656,7 +4656,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4799,7 +4798,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4943,7 +4941,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5092,7 +5089,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5241,7 +5237,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5393,7 +5388,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5510,7 +5504,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5627,7 +5620,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5770,7 +5762,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5913,7 +5904,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest_asyncio.py index fa967a812e..b199da27d5 100644 --- a/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/example_store_service/transports/rest_asyncio.py @@ -943,9 +943,9 @@ def __init__( self._interceptor = interceptor or AsyncExampleStoreServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4800,7 +4800,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4953,7 +4952,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5105,7 +5103,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5264,7 +5261,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5423,7 +5419,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5579,7 +5574,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5702,7 +5696,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5825,7 +5818,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5978,7 +5970,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6129,7 +6120,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/async_client.py index fffe17e303..fd304b421c 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/async_client.py @@ -294,21 +294,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExtensionExecutionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", + "credentialsType": None, + } + ), ) async def execute_extension( diff --git a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/client.py b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/client.py index 878cf225f0..fe18b61ccc 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/client.py @@ -96,9 +96,9 @@ class ExtensionExecutionServiceClientMeta(type): _transport_registry["grpc_asyncio"] = ExtensionExecutionServiceGrpcAsyncIOTransport _transport_registry["rest"] = ExtensionExecutionServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncExtensionExecutionServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncExtensionExecutionServiceRestTransport + ) def get_transport_class( cls, @@ -768,21 +768,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExtensionExecutionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionExecutionService", + "credentialsType": None, + } + ), ) def execute_extension( diff --git a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/base.py index 353ba430c3..7fd03c151c 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/base.py @@ -256,13 +256,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest.py index 18f688a2d7..dbb9885946 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest.py @@ -928,7 +928,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1073,7 +1072,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1219,7 +1217,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1370,7 +1367,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1521,7 +1517,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1671,7 +1666,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1790,7 +1784,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1909,7 +1902,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2054,7 +2046,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2199,7 +2190,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest_asyncio.py index 48c90d75d4..cc40ff503b 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_execution_service/transports/rest_asyncio.py @@ -1001,7 +1001,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1152,7 +1151,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1304,7 +1302,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1461,7 +1458,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1620,7 +1616,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1776,7 +1771,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1899,7 +1893,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2022,7 +2015,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2173,7 +2165,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2324,7 +2315,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/async_client.py index 07cb89252a..b9f49858ab 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.extension_registry_service import pagers +from google.cloud.aiplatform_v1beta1.services.extension_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import extension from google.cloud.aiplatform_v1beta1.types import extension as gca_extension from google.cloud.aiplatform_v1beta1.types import extension_registry_service @@ -303,21 +305,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExtensionRegistryServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", + "credentialsType": None, + } + ), ) async def import_extension( diff --git a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/client.py b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/client.py index 0791bf67ed..a3b023be0d 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.extension_registry_service import pagers +from google.cloud.aiplatform_v1beta1.services.extension_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import extension from google.cloud.aiplatform_v1beta1.types import extension as gca_extension from google.cloud.aiplatform_v1beta1.types import extension_registry_service @@ -799,21 +801,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ExtensionRegistryServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ExtensionRegistryService", + "credentialsType": None, + } + ), ) def import_extension( diff --git a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/base.py index 0cff5e1996..0e521477b6 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/base.py @@ -303,13 +303,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest.py index 104d76c138..3511a75ccb 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest.py @@ -3744,7 +3744,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3887,7 +3886,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4033,7 +4031,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4184,7 +4181,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4335,7 +4331,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4487,7 +4482,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4606,7 +4600,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4725,7 +4718,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4870,7 +4862,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5015,7 +5006,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest_asyncio.py index fdfcaa59b8..dd3d55623b 100644 --- a/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/extension_registry_service/transports/rest_asyncio.py @@ -708,9 +708,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3850,7 +3850,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4001,7 +4000,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4153,7 +4151,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4310,7 +4307,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4467,7 +4463,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4623,7 +4618,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4746,7 +4740,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4869,7 +4862,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5020,7 +5012,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5171,7 +5162,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/async_client.py index b681fe1a82..77666284f5 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/async_client.py @@ -55,9 +55,13 @@ from google.cloud.aiplatform_v1beta1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view -from google.cloud.aiplatform_v1beta1.types import feature_view as gca_feature_view +from google.cloud.aiplatform_v1beta1.types import ( + feature_view as gca_feature_view, +) from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore @@ -325,21 +329,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureOnlineStoreAdminServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", + "credentialsType": None, + } + ), ) async def create_feature_online_store( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/client.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/client.py index 31370d3c15..7132a33450 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/client.py @@ -71,9 +71,13 @@ from google.cloud.aiplatform_v1beta1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view -from google.cloud.aiplatform_v1beta1.types import feature_view as gca_feature_view +from google.cloud.aiplatform_v1beta1.types import ( + feature_view as gca_feature_view, +) from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore @@ -116,14 +120,14 @@ class FeatureOnlineStoreAdminServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[FeatureOnlineStoreAdminServiceTransport]] _transport_registry["grpc"] = FeatureOnlineStoreAdminServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = FeatureOnlineStoreAdminServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreAdminServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreAdminServiceRestTransport + ) def get_transport_class( cls, @@ -829,21 +833,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureOnlineStoreAdminServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService", + "credentialsType": None, + } + ), ) def create_feature_online_store( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/pagers.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/pagers.py index c4d19bff30..4c50fc751a 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/pagers.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/pagers.py @@ -38,7 +38,9 @@ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/__init__.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/__init__.py index 903ebeb944..31e263b75e 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/__init__.py @@ -45,9 +45,9 @@ _transport_registry["grpc_asyncio"] = FeatureOnlineStoreAdminServiceGrpcAsyncIOTransport _transport_registry["rest"] = FeatureOnlineStoreAdminServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreAdminServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreAdminServiceRestTransport + ) __all__ = ( "FeatureOnlineStoreAdminServiceTransport", diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/base.py index a7a4777e01..818783586f 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -433,13 +435,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc.py index 627f44a326..52b3eab532 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -371,12 +373,12 @@ def create_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_online_store" not in self._stubs: - self._stubs[ - "create_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_feature_online_store"] @@ -432,12 +434,12 @@ def list_feature_online_stores( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_feature_online_stores" not in self._stubs: - self._stubs[ - "list_feature_online_stores" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", - request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, - response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + self._stubs["list_feature_online_stores"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", + request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, + response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + ) ) return self._stubs["list_feature_online_stores"] @@ -464,12 +466,12 @@ def update_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_feature_online_store" not in self._stubs: - self._stubs[ - "update_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_feature_online_store"] @@ -496,12 +498,12 @@ def delete_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_feature_online_store" not in self._stubs: - self._stubs[ - "delete_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_feature_online_store"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc_asyncio.py index e67b39ccc4..83a402eb6f 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.location import locations_pb2 # type: ignore @@ -379,12 +381,12 @@ def create_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_online_store" not in self._stubs: - self._stubs[ - "create_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/CreateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.CreateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_feature_online_store"] @@ -440,12 +442,12 @@ def list_feature_online_stores( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_feature_online_stores" not in self._stubs: - self._stubs[ - "list_feature_online_stores" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", - request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, - response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + self._stubs["list_feature_online_stores"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/ListFeatureOnlineStores", + request_serializer=feature_online_store_admin_service.ListFeatureOnlineStoresRequest.serialize, + response_deserializer=feature_online_store_admin_service.ListFeatureOnlineStoresResponse.deserialize, + ) ) return self._stubs["list_feature_online_stores"] @@ -472,12 +474,12 @@ def update_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_feature_online_store" not in self._stubs: - self._stubs[ - "update_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/UpdateFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.UpdateFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_feature_online_store"] @@ -504,12 +506,12 @@ def delete_feature_online_store( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_feature_online_store" not in self._stubs: - self._stubs[ - "delete_feature_online_store" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", - request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_feature_online_store"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreAdminService/DeleteFeatureOnlineStore", + request_serializer=feature_online_store_admin_service.DeleteFeatureOnlineStoreRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_feature_online_store"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest.py index 7b569b43d3..3fa1e17cf6 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore @@ -5570,7 +5572,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5715,7 +5716,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5859,7 +5859,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6010,7 +6009,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6163,7 +6161,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6313,7 +6310,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6430,7 +6426,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6547,7 +6542,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6692,7 +6686,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6835,7 +6828,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_asyncio.py index 710f508f26..c982ff0dac 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore @@ -1182,9 +1184,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5788,7 +5790,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5941,7 +5942,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6093,7 +6093,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6250,7 +6249,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6409,7 +6407,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6567,7 +6564,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6692,7 +6688,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6815,7 +6810,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6968,7 +6962,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7121,7 +7114,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_base.py index ad910acced..534842eddd 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_admin_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import feature_online_store -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/async_client.py index 56a0ee989a..e55b3ab716 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/async_client.py @@ -48,7 +48,9 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -292,21 +294,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureOnlineStoreServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", + "credentialsType": None, + } + ), ) async def fetch_feature_values( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/client.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/client.py index b034ec841c..cc987796de 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/client.py @@ -63,7 +63,9 @@ _LOGGER = std_logging.getLogger(__name__) -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -99,9 +101,9 @@ class FeatureOnlineStoreServiceClientMeta(type): _transport_registry["grpc_asyncio"] = FeatureOnlineStoreServiceGrpcAsyncIOTransport _transport_registry["rest"] = FeatureOnlineStoreServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeatureOnlineStoreServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeatureOnlineStoreServiceRestTransport + ) def get_transport_class( cls, @@ -751,21 +753,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureOnlineStoreServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService", + "credentialsType": None, + } + ), ) def fetch_feature_values( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/base.py index ce98509035..66f6296664 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/base.py @@ -27,7 +27,9 @@ from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -290,13 +292,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc.py index 80c322a040..52dc8a5ced 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc.py @@ -30,7 +30,9 @@ import grpc # type: ignore import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -378,12 +380,12 @@ def streaming_fetch_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_fetch_feature_values" not in self._stubs: - self._stubs[ - "streaming_fetch_feature_values" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/StreamingFetchFeatureValues", - request_serializer=feature_online_store_service.StreamingFetchFeatureValuesRequest.serialize, - response_deserializer=feature_online_store_service.StreamingFetchFeatureValuesResponse.deserialize, + self._stubs["streaming_fetch_feature_values"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/StreamingFetchFeatureValues", + request_serializer=feature_online_store_service.StreamingFetchFeatureValuesRequest.serialize, + response_deserializer=feature_online_store_service.StreamingFetchFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_fetch_feature_values"] @@ -444,12 +446,12 @@ def feature_view_direct_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "feature_view_direct_write" not in self._stubs: - self._stubs[ - "feature_view_direct_write" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/FeatureViewDirectWrite", - request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, - response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + self._stubs["feature_view_direct_write"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/FeatureViewDirectWrite", + request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, + response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + ) ) return self._stubs["feature_view_direct_write"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc_asyncio.py index 34b49ca0d9..4e9473af7c 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/grpc_asyncio.py @@ -33,7 +33,9 @@ import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -386,12 +388,12 @@ def streaming_fetch_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_fetch_feature_values" not in self._stubs: - self._stubs[ - "streaming_fetch_feature_values" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/StreamingFetchFeatureValues", - request_serializer=feature_online_store_service.StreamingFetchFeatureValuesRequest.serialize, - response_deserializer=feature_online_store_service.StreamingFetchFeatureValuesResponse.deserialize, + self._stubs["streaming_fetch_feature_values"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/StreamingFetchFeatureValues", + request_serializer=feature_online_store_service.StreamingFetchFeatureValuesRequest.serialize, + response_deserializer=feature_online_store_service.StreamingFetchFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_fetch_feature_values"] @@ -452,12 +454,12 @@ def feature_view_direct_write( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "feature_view_direct_write" not in self._stubs: - self._stubs[ - "feature_view_direct_write" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/FeatureViewDirectWrite", - request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, - response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + self._stubs["feature_view_direct_write"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService/FeatureViewDirectWrite", + request_serializer=feature_online_store_service.FeatureViewDirectWriteRequest.serialize, + response_deserializer=feature_online_store_service.FeatureViewDirectWriteResponse.deserialize, + ) ) return self._stubs["feature_view_direct_write"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest.py index e68b411cad..d02d1c2b0a 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest.py @@ -36,7 +36,9 @@ import warnings -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -990,7 +992,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1135,7 +1136,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1281,7 +1281,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1432,7 +1431,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1583,7 +1581,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1733,7 +1730,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1852,7 +1848,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1971,7 +1966,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2116,7 +2110,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2261,7 +2254,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_asyncio.py index ba157e90ee..8489323ed2 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_asyncio.py @@ -48,7 +48,9 @@ from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -1080,7 +1082,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1231,7 +1232,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1383,7 +1383,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1540,7 +1539,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1699,7 +1697,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1855,7 +1852,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1978,7 +1974,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2101,7 +2096,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2252,7 +2246,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2403,7 +2396,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_base.py index 867036c6ed..644f5a3258 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_online_store_service/transports/rest_base.py @@ -27,7 +27,9 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/async_client.py index 718cf465ad..2aa4614416 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/async_client.py @@ -47,13 +47,19 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.feature_registry_service import pagers +from google.cloud.aiplatform_v1beta1.services.feature_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_group -from google.cloud.aiplatform_v1beta1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1beta1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor -from google.cloud.aiplatform_v1beta1.types import feature_monitor as gca_feature_monitor +from google.cloud.aiplatform_v1beta1.types import ( + feature_monitor as gca_feature_monitor, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor_job from google.cloud.aiplatform_v1beta1.types import ( feature_monitor_job as gca_feature_monitor_job, @@ -318,21 +324,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureRegistryServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", + "credentialsType": None, + } + ), ) async def create_feature_group( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/client.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/client.py index 8232ea3844..0d2c994a34 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/client.py @@ -63,13 +63,19 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.feature_registry_service import pagers +from google.cloud.aiplatform_v1beta1.services.feature_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_group -from google.cloud.aiplatform_v1beta1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1beta1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor -from google.cloud.aiplatform_v1beta1.types import feature_monitor as gca_feature_monitor +from google.cloud.aiplatform_v1beta1.types import ( + feature_monitor as gca_feature_monitor, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor_job from google.cloud.aiplatform_v1beta1.types import ( feature_monitor_job as gca_feature_monitor_job, @@ -840,21 +846,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeatureRegistryServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeatureRegistryService", + "credentialsType": None, + } + ), ) def create_feature_group( diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/base.py index 252aa14b1b..be6af8de71 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/base.py @@ -522,13 +522,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc.py index 2e5a731cb5..0bf92ef371 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc.py @@ -825,12 +825,12 @@ def create_feature_monitor_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_monitor_job" not in self._stubs: - self._stubs[ - "create_feature_monitor_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureRegistryService/CreateFeatureMonitorJob", - request_serializer=feature_registry_service.CreateFeatureMonitorJobRequest.serialize, - response_deserializer=gca_feature_monitor_job.FeatureMonitorJob.deserialize, + self._stubs["create_feature_monitor_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureRegistryService/CreateFeatureMonitorJob", + request_serializer=feature_registry_service.CreateFeatureMonitorJobRequest.serialize, + response_deserializer=gca_feature_monitor_job.FeatureMonitorJob.deserialize, + ) ) return self._stubs["create_feature_monitor_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc_asyncio.py index 000429c2bf..a09b626eb1 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/grpc_asyncio.py @@ -841,12 +841,12 @@ def create_feature_monitor_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_feature_monitor_job" not in self._stubs: - self._stubs[ - "create_feature_monitor_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.FeatureRegistryService/CreateFeatureMonitorJob", - request_serializer=feature_registry_service.CreateFeatureMonitorJobRequest.serialize, - response_deserializer=gca_feature_monitor_job.FeatureMonitorJob.deserialize, + self._stubs["create_feature_monitor_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.FeatureRegistryService/CreateFeatureMonitorJob", + request_serializer=feature_registry_service.CreateFeatureMonitorJobRequest.serialize, + response_deserializer=gca_feature_monitor_job.FeatureMonitorJob.deserialize, + ) ) return self._stubs["create_feature_monitor_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest.py index 59c2a63fa0..15338301cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest.py @@ -6876,7 +6876,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7019,7 +7018,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7163,7 +7161,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7312,7 +7309,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7461,7 +7457,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7613,7 +7608,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7732,7 +7726,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7851,7 +7844,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7994,7 +7986,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8139,7 +8130,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest_asyncio.py index e4925218af..e7ba3dc504 100644 --- a/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/feature_registry_service/transports/rest_asyncio.py @@ -1522,9 +1522,9 @@ def __init__( self._interceptor = interceptor or AsyncFeatureRegistryServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -7125,7 +7125,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7276,7 +7275,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7428,7 +7426,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7585,7 +7582,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7742,7 +7738,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7898,7 +7893,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8021,7 +8015,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8144,7 +8137,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8295,7 +8287,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8446,7 +8437,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/async_client.py index bd11fbd08b..d8ed488aa2 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/async_client.py @@ -296,21 +296,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeaturestoreOnlineServingServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", + "credentialsType": None, + } + ), ) async def read_feature_values( diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/client.py index b66971feae..f3d35d3919 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/client.py @@ -100,14 +100,14 @@ class FeaturestoreOnlineServingServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[FeaturestoreOnlineServingServiceTransport]] _transport_registry["grpc"] = FeaturestoreOnlineServingServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = FeaturestoreOnlineServingServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + FeaturestoreOnlineServingServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = FeaturestoreOnlineServingServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeaturestoreOnlineServingServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeaturestoreOnlineServingServiceRestTransport + ) def get_transport_class( cls, @@ -763,21 +763,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeaturestoreOnlineServingServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService", + "credentialsType": None, + } + ), ) def read_feature_values( diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/__init__.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/__init__.py index 21db02b7e4..c2b0c85800 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/__init__.py @@ -42,14 +42,14 @@ OrderedDict() ) # type: Dict[str, Type[FeaturestoreOnlineServingServiceTransport]] _transport_registry["grpc"] = FeaturestoreOnlineServingServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = FeaturestoreOnlineServingServiceGrpcAsyncIOTransport +_transport_registry["grpc_asyncio"] = ( + FeaturestoreOnlineServingServiceGrpcAsyncIOTransport +) _transport_registry["rest"] = FeaturestoreOnlineServingServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncFeaturestoreOnlineServingServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncFeaturestoreOnlineServingServiceRestTransport + ) __all__ = ( "FeaturestoreOnlineServingServiceTransport", diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/base.py index f0af33559a..26a731e646 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/base.py @@ -273,13 +273,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc.py index e9ff8085f5..e61bac4139 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc.py @@ -382,12 +382,12 @@ def streaming_read_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_read_feature_values" not in self._stubs: - self._stubs[ - "streaming_read_feature_values" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", - request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, - response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + self._stubs["streaming_read_feature_values"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", + request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, + response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_read_feature_values"] diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc_asyncio.py index f57bef29d3..ac69af142f 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/grpc_asyncio.py @@ -390,12 +390,12 @@ def streaming_read_feature_values( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_read_feature_values" not in self._stubs: - self._stubs[ - "streaming_read_feature_values" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", - request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, - response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + self._stubs["streaming_read_feature_values"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServingService/StreamingReadFeatureValues", + request_serializer=featurestore_online_service.StreamingReadFeatureValuesRequest.serialize, + response_deserializer=featurestore_online_service.ReadFeatureValuesResponse.deserialize, + ) ) return self._stubs["streaming_read_feature_values"] diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest.py index f229f7d871..e914ab2b3b 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest.py @@ -1144,7 +1144,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1287,7 +1286,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1431,7 +1429,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1580,7 +1577,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1731,7 +1727,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1881,7 +1876,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1998,7 +1992,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2115,7 +2108,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2258,7 +2250,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2401,7 +2392,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest_asyncio.py index fd46fbe7c7..bc7eae08de 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_online_serving_service/transports/rest_asyncio.py @@ -1232,7 +1232,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1385,7 +1384,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1539,7 +1537,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1698,7 +1695,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1857,7 +1853,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2015,7 +2010,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2140,7 +2134,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2265,7 +2258,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2418,7 +2410,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2571,7 +2562,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py index 762bda61cd..45ec540dca 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py @@ -47,16 +47,22 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.featurestore_service import pagers +from google.cloud.aiplatform_v1beta1.services.featurestore_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_monitor from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats from google.cloud.aiplatform_v1beta1.types import featurestore -from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore +from google.cloud.aiplatform_v1beta1.types import ( + featurestore as gca_featurestore, +) from google.cloud.aiplatform_v1beta1.types import featurestore_monitoring from google.cloud.aiplatform_v1beta1.types import featurestore_service from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -304,21 +310,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeaturestoreServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", + "credentialsType": None, + } + ), ) async def create_featurestore( diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py index ac9fa1c595..ad8dde514d 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py @@ -63,16 +63,22 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.featurestore_service import pagers +from google.cloud.aiplatform_v1beta1.services.featurestore_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_monitor from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats from google.cloud.aiplatform_v1beta1.types import featurestore -from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore +from google.cloud.aiplatform_v1beta1.types import ( + featurestore as gca_featurestore, +) from google.cloud.aiplatform_v1beta1.types import featurestore_monitoring from google.cloud.aiplatform_v1beta1.types import featurestore_service from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -808,21 +814,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.FeaturestoreServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.FeaturestoreService", + "credentialsType": None, + } + ), ) def create_featurestore( diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/base.py index 08b1678b60..0d387c99a6 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore @@ -539,13 +541,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc.py index d7c72dcfcc..4e7e120941 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc_asyncio.py index 2d8b94deee..211cdb90b2 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest.py index e50a68dc85..ffffb893b0 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore @@ -7310,7 +7312,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7453,7 +7454,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7597,7 +7597,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7746,7 +7745,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7895,7 +7893,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -8047,7 +8044,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8164,7 +8160,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8281,7 +8276,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8424,7 +8418,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8567,7 +8560,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_asyncio.py index 2ffadfe77c..a3a302e695 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore @@ -1626,9 +1628,9 @@ def __init__( self._interceptor = interceptor or AsyncFeaturestoreServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -7585,7 +7587,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7738,7 +7739,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7890,7 +7890,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -8049,7 +8048,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -8208,7 +8206,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -8364,7 +8361,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8487,7 +8483,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8610,7 +8605,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8763,7 +8757,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8914,7 +8907,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_base.py index 397b67ac0c..e491a2edc2 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import featurestore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/async_client.py index 4af4f45eb1..d4bc5eff93 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/async_client.py @@ -45,9 +45,13 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import pagers +from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service @@ -290,21 +294,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.GenAiCacheServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", + "credentialsType": None, + } + ), ) async def create_cached_content( diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/client.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/client.py index d7e488944d..6a336a08fb 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/client.py @@ -61,9 +61,13 @@ _LOGGER = std_logging.getLogger(__name__) -from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import pagers +from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service @@ -770,21 +774,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.GenAiCacheServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiCacheService", + "credentialsType": None, + } + ), ) def create_cached_content( diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/base.py index e9af8e7d8a..849558a20f 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/base.py @@ -28,7 +28,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -304,13 +306,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc.py index 1076388fd6..a36968af6f 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc.py @@ -31,7 +31,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc_asyncio.py index f98ff8f003..7b3dc5bdbd 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/grpc_asyncio.py @@ -34,7 +34,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest.py index 48c680c9a1..265d3b84da 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest.py @@ -37,7 +37,9 @@ from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -1511,7 +1513,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1654,7 +1655,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1797,7 +1797,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1945,7 +1944,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -2094,7 +2092,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2244,7 +2241,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2361,7 +2357,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2477,7 +2472,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2620,7 +2614,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2763,7 +2756,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_asyncio.py index fda8afd8be..c301493577 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_asyncio.py @@ -49,7 +49,9 @@ from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -1606,7 +1608,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1759,7 +1760,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1913,7 +1913,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -2072,7 +2071,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -2231,7 +2229,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2387,7 +2384,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2510,7 +2506,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2633,7 +2628,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2786,7 +2780,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2939,7 +2932,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_base.py index 70ec0211e1..4d8cfafc8b 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_cache_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/async_client.py index 85a187e9f0..cbd2ff6f85 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/async_client.py @@ -47,12 +47,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import pagers +from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -296,21 +300,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.GenAiTuningServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", + "credentialsType": None, + } + ), ) async def create_tuning_job( diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/client.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/client.py index 212759bd8f..5948936048 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/client.py @@ -63,12 +63,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import pagers +from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -838,21 +842,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.GenAiTuningServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.GenAiTuningService", + "credentialsType": None, + } + ), ) def create_tuning_job( diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/base.py index ba8578663e..cd86b03b90 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/base.py @@ -30,7 +30,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -304,13 +306,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc.py index d5e777abed..8908381cc2 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc.py @@ -33,7 +33,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc_asyncio.py index 38a12f54fe..8ddef1785a 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/grpc_asyncio.py @@ -36,7 +36,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest.py index 9462334130..0b8ee6cf1a 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest.py @@ -39,7 +39,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -3660,7 +3662,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3803,7 +3804,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3947,7 +3947,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4096,7 +4095,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4245,7 +4243,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4397,7 +4394,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4514,7 +4510,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4631,7 +4626,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4774,7 +4768,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4917,7 +4910,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_asyncio.py index 4b571a1355..b35c2e8c95 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_asyncio.py @@ -52,7 +52,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -667,9 +669,9 @@ def __init__( self._interceptor = interceptor or AsyncGenAiTuningServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3768,7 +3770,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3921,7 +3922,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4075,7 +4075,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4234,7 +4233,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4393,7 +4391,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4549,7 +4546,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4672,7 +4668,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4795,7 +4790,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4948,7 +4942,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5099,7 +5092,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_base.py index f92eedb370..2aa9668dea 100644 --- a/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/gen_ai_tuning_service/transports/rest_base.py @@ -29,7 +29,9 @@ from google.cloud.aiplatform_v1beta1.types import genai_tuning_service from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py index 8c6a8b4c3f..df06d158c3 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/async_client.py @@ -47,10 +47,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import pagers +from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import service_networking @@ -296,21 +300,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.IndexEndpointServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", + "credentialsType": None, + } + ), ) async def create_index_endpoint( diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py index 6d2e04ae30..14d0f971f7 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/client.py @@ -63,10 +63,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import pagers +from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import service_networking @@ -794,21 +798,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.IndexEndpointServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexEndpointService", + "credentialsType": None, + } + ), ) def create_index_endpoint( diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/base.py index 7da7c1521b..2bc1865fae 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -348,13 +350,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc.py index 309060074d..b1b486b84c 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc_asyncio.py index 5e314ed1f4..c3495197ee 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest.py index 5da6609e08..2680f1b9ae 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore @@ -4422,7 +4424,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4565,7 +4566,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4709,7 +4709,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4858,7 +4857,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5007,7 +5005,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5159,7 +5156,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5276,7 +5272,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5393,7 +5388,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5536,7 +5530,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5679,7 +5672,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_asyncio.py index 4c91124c34..2ac43df008 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore @@ -877,9 +879,9 @@ def __init__( self._interceptor = interceptor or AsyncIndexEndpointServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4555,7 +4557,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4708,7 +4709,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4860,7 +4860,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5017,7 +5016,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5174,7 +5172,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5330,7 +5327,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5453,7 +5449,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5576,7 +5571,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5727,7 +5721,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5878,7 +5871,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_base.py index 2adcef0b21..4377ec5d1a 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/index_endpoint_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py index f170687cdc..15516c8172 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/async_client.py @@ -287,21 +287,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.IndexServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", + "credentialsType": None, + } + ), ) async def create_index( diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/client.py b/google/cloud/aiplatform_v1beta1/services/index_service/client.py index 2817449ea6..475e506a1c 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/client.py @@ -765,21 +765,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.IndexServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.IndexService", + "credentialsType": None, + } + ), ) def create_index( diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/index_service/transports/base.py index eb2ed5878c..f2ad17a05f 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/transports/base.py @@ -349,13 +349,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest.py index fb5906cc1e..db35bf5d9f 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest.py @@ -4395,7 +4395,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4541,7 +4540,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4684,7 +4682,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4836,7 +4833,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4988,7 +4984,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5137,7 +5132,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5253,7 +5247,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5369,7 +5362,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5513,7 +5505,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5655,7 +5646,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest_asyncio.py index 10d01a44f2..e0fc27c257 100644 --- a/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/index_service/transports/rest_asyncio.py @@ -864,9 +864,9 @@ def __init__( self._interceptor = interceptor or AsyncIndexServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4548,7 +4548,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4702,7 +4701,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4855,7 +4853,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5015,7 +5012,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5176,7 +5172,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5333,7 +5328,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5457,7 +5451,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5581,7 +5574,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5733,7 +5725,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5885,7 +5876,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py index 58d13ccdea..9be543b172 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py @@ -54,7 +54,9 @@ ) from google.cloud.aiplatform_v1beta1.types import completion_stats from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -69,8 +71,12 @@ from google.cloud.aiplatform_v1beta1.types import job_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources -from google.cloud.aiplatform_v1beta1.types import manual_batch_tuning_parameters -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + manual_batch_tuning_parameters, +) +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -362,21 +368,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.JobServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.JobService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.JobService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.JobService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.JobService", + "credentialsType": None, + } + ), ) async def create_custom_job( diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index 9558b304c6..e88536d10b 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -70,7 +70,9 @@ ) from google.cloud.aiplatform_v1beta1.types import completion_stats from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -85,8 +87,12 @@ from google.cloud.aiplatform_v1beta1.types import job_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources -from google.cloud.aiplatform_v1beta1.types import manual_batch_tuning_parameters -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + manual_batch_tuning_parameters, +) +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -1147,21 +1153,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.JobServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.JobService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.JobService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.JobService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.JobService", + "credentialsType": None, + } + ), ) def create_custom_job( diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/pagers.py b/google/cloud/aiplatform_v1beta1/services/job_service/pagers.py index d97cedd8f1..67b31b2f2b 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/pagers.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/pagers.py @@ -42,7 +42,9 @@ from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py index 447f2698d3..a9a1d794ab 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py @@ -33,7 +33,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -43,7 +45,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -790,13 +794,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py index b2a045bd2b..c8c4e5843d 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py @@ -36,7 +36,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -46,7 +48,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -670,12 +674,12 @@ def create_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "create_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateHyperparameterTuningJob", - request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, - response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["create_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateHyperparameterTuningJob", + request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, + response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["create_hyperparameter_tuning_job"] @@ -701,12 +705,12 @@ def get_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "get_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/GetHyperparameterTuningJob", - request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, - response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["get_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/GetHyperparameterTuningJob", + request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, + response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["get_hyperparameter_tuning_job"] @@ -733,12 +737,12 @@ def list_hyperparameter_tuning_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_hyperparameter_tuning_jobs" not in self._stubs: - self._stubs[ - "list_hyperparameter_tuning_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListHyperparameterTuningJobs", - request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, - response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + self._stubs["list_hyperparameter_tuning_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListHyperparameterTuningJobs", + request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, + response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + ) ) return self._stubs["list_hyperparameter_tuning_jobs"] @@ -764,12 +768,12 @@ def delete_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "delete_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteHyperparameterTuningJob", - request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteHyperparameterTuningJob", + request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_hyperparameter_tuning_job"] @@ -806,12 +810,12 @@ def cancel_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "cancel_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CancelHyperparameterTuningJob", - request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CancelHyperparameterTuningJob", + request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_hyperparameter_tuning_job"] @@ -1033,12 +1037,12 @@ def create_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_batch_prediction_job" not in self._stubs: - self._stubs[ - "create_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateBatchPredictionJob", - request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, - response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + self._stubs["create_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateBatchPredictionJob", + request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, + response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + ) ) return self._stubs["create_batch_prediction_job"] @@ -1093,12 +1097,12 @@ def list_batch_prediction_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_batch_prediction_jobs" not in self._stubs: - self._stubs[ - "list_batch_prediction_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListBatchPredictionJobs", - request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, - response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + self._stubs["list_batch_prediction_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListBatchPredictionJobs", + request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, + response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + ) ) return self._stubs["list_batch_prediction_jobs"] @@ -1124,12 +1128,12 @@ def delete_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_batch_prediction_job" not in self._stubs: - self._stubs[ - "delete_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteBatchPredictionJob", - request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteBatchPredictionJob", + request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_batch_prediction_job"] @@ -1163,12 +1167,12 @@ def cancel_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_batch_prediction_job" not in self._stubs: - self._stubs[ - "cancel_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CancelBatchPredictionJob", - request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CancelBatchPredictionJob", + request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_batch_prediction_job"] @@ -1196,12 +1200,12 @@ def create_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateModelDeploymentMonitoringJob", - request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["create_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateModelDeploymentMonitoringJob", + request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["create_model_deployment_monitoring_job"] @@ -1229,12 +1233,12 @@ def search_model_deployment_monitoring_stats_anomalies( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_deployment_monitoring_stats_anomalies" not in self._stubs: - self._stubs[ - "search_model_deployment_monitoring_stats_anomalies" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", - request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, - response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + self._stubs["search_model_deployment_monitoring_stats_anomalies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", + request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, + response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + ) ) return self._stubs["search_model_deployment_monitoring_stats_anomalies"] @@ -1261,12 +1265,12 @@ def get_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "get_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/GetModelDeploymentMonitoringJob", - request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["get_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/GetModelDeploymentMonitoringJob", + request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["get_model_deployment_monitoring_job"] @@ -1293,12 +1297,12 @@ def list_model_deployment_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_deployment_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_deployment_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListModelDeploymentMonitoringJobs", - request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, - response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + self._stubs["list_model_deployment_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListModelDeploymentMonitoringJobs", + request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, + response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_deployment_monitoring_jobs"] @@ -1325,12 +1329,12 @@ def update_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "update_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/UpdateModelDeploymentMonitoringJob", - request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/UpdateModelDeploymentMonitoringJob", + request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_model_deployment_monitoring_job"] @@ -1357,12 +1361,12 @@ def delete_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteModelDeploymentMonitoringJob", - request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteModelDeploymentMonitoringJob", + request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_deployment_monitoring_job"] @@ -1391,12 +1395,12 @@ def pause_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pause_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "pause_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/PauseModelDeploymentMonitoringJob", - request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["pause_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/PauseModelDeploymentMonitoringJob", + request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["pause_model_deployment_monitoring_job"] @@ -1424,12 +1428,12 @@ def resume_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "resume_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "resume_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ResumeModelDeploymentMonitoringJob", - request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["resume_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ResumeModelDeploymentMonitoringJob", + request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["resume_model_deployment_monitoring_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py index 8008ae7fc1..1f3514b689 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py @@ -39,7 +39,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -49,7 +51,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -688,12 +692,12 @@ def create_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "create_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateHyperparameterTuningJob", - request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, - response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["create_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateHyperparameterTuningJob", + request_serializer=job_service.CreateHyperparameterTuningJobRequest.serialize, + response_deserializer=gca_hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["create_hyperparameter_tuning_job"] @@ -719,12 +723,12 @@ def get_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "get_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/GetHyperparameterTuningJob", - request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, - response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + self._stubs["get_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/GetHyperparameterTuningJob", + request_serializer=job_service.GetHyperparameterTuningJobRequest.serialize, + response_deserializer=hyperparameter_tuning_job.HyperparameterTuningJob.deserialize, + ) ) return self._stubs["get_hyperparameter_tuning_job"] @@ -751,12 +755,12 @@ def list_hyperparameter_tuning_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_hyperparameter_tuning_jobs" not in self._stubs: - self._stubs[ - "list_hyperparameter_tuning_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListHyperparameterTuningJobs", - request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, - response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + self._stubs["list_hyperparameter_tuning_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListHyperparameterTuningJobs", + request_serializer=job_service.ListHyperparameterTuningJobsRequest.serialize, + response_deserializer=job_service.ListHyperparameterTuningJobsResponse.deserialize, + ) ) return self._stubs["list_hyperparameter_tuning_jobs"] @@ -783,12 +787,12 @@ def delete_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "delete_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteHyperparameterTuningJob", - request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteHyperparameterTuningJob", + request_serializer=job_service.DeleteHyperparameterTuningJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_hyperparameter_tuning_job"] @@ -827,12 +831,12 @@ def cancel_hyperparameter_tuning_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_hyperparameter_tuning_job" not in self._stubs: - self._stubs[ - "cancel_hyperparameter_tuning_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CancelHyperparameterTuningJob", - request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_hyperparameter_tuning_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CancelHyperparameterTuningJob", + request_serializer=job_service.CancelHyperparameterTuningJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_hyperparameter_tuning_job"] @@ -1062,12 +1066,12 @@ def create_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_batch_prediction_job" not in self._stubs: - self._stubs[ - "create_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateBatchPredictionJob", - request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, - response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + self._stubs["create_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateBatchPredictionJob", + request_serializer=job_service.CreateBatchPredictionJobRequest.serialize, + response_deserializer=gca_batch_prediction_job.BatchPredictionJob.deserialize, + ) ) return self._stubs["create_batch_prediction_job"] @@ -1122,12 +1126,12 @@ def list_batch_prediction_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_batch_prediction_jobs" not in self._stubs: - self._stubs[ - "list_batch_prediction_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListBatchPredictionJobs", - request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, - response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + self._stubs["list_batch_prediction_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListBatchPredictionJobs", + request_serializer=job_service.ListBatchPredictionJobsRequest.serialize, + response_deserializer=job_service.ListBatchPredictionJobsResponse.deserialize, + ) ) return self._stubs["list_batch_prediction_jobs"] @@ -1154,12 +1158,12 @@ def delete_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_batch_prediction_job" not in self._stubs: - self._stubs[ - "delete_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteBatchPredictionJob", - request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteBatchPredictionJob", + request_serializer=job_service.DeleteBatchPredictionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_batch_prediction_job"] @@ -1195,12 +1199,12 @@ def cancel_batch_prediction_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_batch_prediction_job" not in self._stubs: - self._stubs[ - "cancel_batch_prediction_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CancelBatchPredictionJob", - request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["cancel_batch_prediction_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CancelBatchPredictionJob", + request_serializer=job_service.CancelBatchPredictionJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["cancel_batch_prediction_job"] @@ -1228,12 +1232,12 @@ def create_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/CreateModelDeploymentMonitoringJob", - request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["create_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/CreateModelDeploymentMonitoringJob", + request_serializer=job_service.CreateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["create_model_deployment_monitoring_job"] @@ -1261,12 +1265,12 @@ def search_model_deployment_monitoring_stats_anomalies( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_deployment_monitoring_stats_anomalies" not in self._stubs: - self._stubs[ - "search_model_deployment_monitoring_stats_anomalies" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", - request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, - response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + self._stubs["search_model_deployment_monitoring_stats_anomalies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/SearchModelDeploymentMonitoringStatsAnomalies", + request_serializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesRequest.serialize, + response_deserializer=job_service.SearchModelDeploymentMonitoringStatsAnomaliesResponse.deserialize, + ) ) return self._stubs["search_model_deployment_monitoring_stats_anomalies"] @@ -1293,12 +1297,12 @@ def get_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "get_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/GetModelDeploymentMonitoringJob", - request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + self._stubs["get_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/GetModelDeploymentMonitoringJob", + request_serializer=job_service.GetModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=model_deployment_monitoring_job.ModelDeploymentMonitoringJob.deserialize, + ) ) return self._stubs["get_model_deployment_monitoring_job"] @@ -1325,12 +1329,12 @@ def list_model_deployment_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_deployment_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_deployment_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ListModelDeploymentMonitoringJobs", - request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, - response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + self._stubs["list_model_deployment_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ListModelDeploymentMonitoringJobs", + request_serializer=job_service.ListModelDeploymentMonitoringJobsRequest.serialize, + response_deserializer=job_service.ListModelDeploymentMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_deployment_monitoring_jobs"] @@ -1357,12 +1361,12 @@ def update_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "update_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/UpdateModelDeploymentMonitoringJob", - request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/UpdateModelDeploymentMonitoringJob", + request_serializer=job_service.UpdateModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_model_deployment_monitoring_job"] @@ -1389,12 +1393,12 @@ def delete_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/DeleteModelDeploymentMonitoringJob", - request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/DeleteModelDeploymentMonitoringJob", + request_serializer=job_service.DeleteModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_deployment_monitoring_job"] @@ -1424,12 +1428,12 @@ def pause_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pause_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "pause_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/PauseModelDeploymentMonitoringJob", - request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["pause_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/PauseModelDeploymentMonitoringJob", + request_serializer=job_service.PauseModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["pause_model_deployment_monitoring_job"] @@ -1458,12 +1462,12 @@ def resume_model_deployment_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "resume_model_deployment_monitoring_job" not in self._stubs: - self._stubs[ - "resume_model_deployment_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.JobService/ResumeModelDeploymentMonitoringJob", - request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, + self._stubs["resume_model_deployment_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.JobService/ResumeModelDeploymentMonitoringJob", + request_serializer=job_service.ResumeModelDeploymentMonitoringJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) ) return self._stubs["resume_model_deployment_monitoring_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest.py index df56644b67..00fdc033ce 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest.py @@ -42,7 +42,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -52,7 +54,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -9985,7 +9989,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10131,7 +10134,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10278,7 +10280,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10432,7 +10433,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10586,7 +10586,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10735,7 +10734,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10851,7 +10849,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10967,7 +10964,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11113,7 +11109,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11257,7 +11252,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_asyncio.py index a5f8696b0b..bf15125baa 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_asyncio.py @@ -55,7 +55,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -65,7 +67,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -2187,9 +2191,9 @@ def __init__( self._interceptor = interceptor or AsyncJobServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -10460,7 +10464,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10614,7 +10617,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10769,7 +10771,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10931,7 +10932,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -11093,7 +11093,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -11250,7 +11249,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -11374,7 +11372,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -11498,7 +11495,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11652,7 +11648,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11804,7 +11799,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_base.py index ff44eeea67..5ae160b8d4 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/rest_base.py @@ -32,7 +32,9 @@ batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -42,7 +44,9 @@ hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import job_service -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) diff --git a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/async_client.py index 6fdace70ea..97a8d8b4cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/async_client.py @@ -278,21 +278,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.LlmUtilityServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", + "credentialsType": None, + } + ), ) async def compute_tokens( diff --git a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/client.py b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/client.py index 696fb003ee..5d1175b0e9 100644 --- a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/client.py @@ -740,21 +740,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.LlmUtilityServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.LlmUtilityService", + "credentialsType": None, + } + ), ) def compute_tokens( diff --git a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/base.py index 99c01e486d..3769a81323 100644 --- a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/base.py @@ -239,13 +239,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest.py index 491ea18b82..16642c86f4 100644 --- a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest.py @@ -687,7 +687,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -830,7 +829,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -973,7 +971,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1121,7 +1118,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1270,7 +1266,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1420,7 +1415,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1537,7 +1531,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1653,7 +1646,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1796,7 +1788,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1939,7 +1930,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest_asyncio.py index bf3c9e0e20..0d61b35066 100644 --- a/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/llm_utility_service/transports/rest_asyncio.py @@ -756,7 +756,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -909,7 +908,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1063,7 +1061,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1222,7 +1219,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1381,7 +1377,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1537,7 +1532,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1660,7 +1654,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1783,7 +1776,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1936,7 +1928,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2089,7 +2080,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/match_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/match_service/async_client.py index f31065317f..dffc409d22 100644 --- a/google/cloud/aiplatform_v1beta1/services/match_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/match_service/async_client.py @@ -274,21 +274,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MatchServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", + "credentialsType": None, + } + ), ) async def find_neighbors( diff --git a/google/cloud/aiplatform_v1beta1/services/match_service/client.py b/google/cloud/aiplatform_v1beta1/services/match_service/client.py index c5c43a363b..e64c463366 100644 --- a/google/cloud/aiplatform_v1beta1/services/match_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/match_service/client.py @@ -732,21 +732,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MatchServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MatchService", + "credentialsType": None, + } + ), ) def find_neighbors( diff --git a/google/cloud/aiplatform_v1beta1/services/match_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/match_service/transports/base.py index e247818239..0409d05093 100644 --- a/google/cloud/aiplatform_v1beta1/services/match_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/match_service/transports/base.py @@ -256,13 +256,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest.py index e4111d1ae4..0cd0fb96ed 100644 --- a/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest.py @@ -913,7 +913,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1059,7 +1058,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1202,7 +1200,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1354,7 +1351,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1506,7 +1502,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1655,7 +1650,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1771,7 +1765,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1887,7 +1880,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2031,7 +2023,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2173,7 +2164,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest_asyncio.py index 92d30cdab6..017248e04a 100644 --- a/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/match_service/transports/rest_asyncio.py @@ -993,7 +993,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1147,7 +1146,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1300,7 +1298,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1460,7 +1457,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1621,7 +1617,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1778,7 +1773,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1902,7 +1896,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2026,7 +2019,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2178,7 +2170,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2330,7 +2321,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/async_client.py index 632254d557..6619f739da 100644 --- a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.memory_bank_service import pagers +from google.cloud.aiplatform_v1beta1.services.memory_bank_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import memory_bank from google.cloud.aiplatform_v1beta1.types import memory_bank_service from google.cloud.location import locations_pb2 # type: ignore @@ -291,21 +293,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MemoryBankServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", + "credentialsType": None, + } + ), ) async def create_memory( diff --git a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/client.py b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/client.py index 4d66f0bb19..bab5cafe7a 100644 --- a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.memory_bank_service import pagers +from google.cloud.aiplatform_v1beta1.services.memory_bank_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import memory_bank from google.cloud.aiplatform_v1beta1.types import memory_bank_service from google.cloud.location import locations_pb2 # type: ignore @@ -795,21 +797,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MemoryBankServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MemoryBankService", + "credentialsType": None, + } + ), ) def create_memory( diff --git a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/base.py index b698b2c932..3d1d78563a 100644 --- a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/base.py @@ -333,13 +333,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest.py index 94b57a67c0..42f7d820f3 100644 --- a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest.py @@ -4154,7 +4154,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4297,7 +4296,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4440,7 +4438,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4588,7 +4585,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4737,7 +4733,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4887,7 +4882,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5004,7 +4998,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5120,7 +5113,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5263,7 +5255,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5406,7 +5397,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest_asyncio.py index 8db2533351..996623a9f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/memory_bank_service/transports/rest_asyncio.py @@ -813,9 +813,9 @@ def __init__( self._interceptor = interceptor or AsyncMemoryBankServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4308,7 +4308,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4461,7 +4460,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4615,7 +4613,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4774,7 +4771,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4933,7 +4929,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5089,7 +5084,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5212,7 +5206,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5335,7 +5328,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5488,7 +5480,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5641,7 +5632,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py index b325e3a681..db4c94e9b3 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py @@ -58,10 +58,14 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store -from google.cloud.aiplatform_v1beta1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1beta1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -308,21 +312,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MetadataServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", + "credentialsType": None, + } + ), ) async def create_metadata_store( diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py index 5363bc3c63..24220d50ad 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py @@ -74,10 +74,14 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store -from google.cloud.aiplatform_v1beta1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1beta1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -853,21 +857,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MetadataServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MetadataService", + "credentialsType": None, + } + ), ) def create_metadata_store( diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py index 7e5294b3b2..76eed42cef 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py @@ -36,7 +36,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -727,13 +729,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py index e57ef91d6f..d216bd8fd7 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py @@ -39,7 +39,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -808,12 +810,12 @@ def add_context_artifacts_and_executions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "add_context_artifacts_and_executions" not in self._stubs: - self._stubs[ - "add_context_artifacts_and_executions" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/AddContextArtifactsAndExecutions", - request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, - response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + self._stubs["add_context_artifacts_and_executions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/AddContextArtifactsAndExecutions", + request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, + response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + ) ) return self._stubs["add_context_artifacts_and_executions"] @@ -905,12 +907,12 @@ def query_context_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_context_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_context_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryContextLineageSubgraph", - request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_context_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryContextLineageSubgraph", + request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_context_lineage_subgraph"] @@ -1131,12 +1133,12 @@ def query_execution_inputs_and_outputs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_execution_inputs_and_outputs" not in self._stubs: - self._stubs[ - "query_execution_inputs_and_outputs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryExecutionInputsAndOutputs", - request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_execution_inputs_and_outputs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryExecutionInputsAndOutputs", + request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_execution_inputs_and_outputs"] @@ -1251,12 +1253,12 @@ def query_artifact_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_artifact_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_artifact_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryArtifactLineageSubgraph", - request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_artifact_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryArtifactLineageSubgraph", + request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_artifact_lineage_subgraph"] diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py index 71757e4b22..5d5403a657 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py @@ -42,7 +42,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.cloud.location import locations_pb2 # type: ignore @@ -837,12 +839,12 @@ def add_context_artifacts_and_executions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "add_context_artifacts_and_executions" not in self._stubs: - self._stubs[ - "add_context_artifacts_and_executions" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/AddContextArtifactsAndExecutions", - request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, - response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + self._stubs["add_context_artifacts_and_executions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/AddContextArtifactsAndExecutions", + request_serializer=metadata_service.AddContextArtifactsAndExecutionsRequest.serialize, + response_deserializer=metadata_service.AddContextArtifactsAndExecutionsResponse.deserialize, + ) ) return self._stubs["add_context_artifacts_and_executions"] @@ -934,12 +936,12 @@ def query_context_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_context_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_context_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryContextLineageSubgraph", - request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_context_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryContextLineageSubgraph", + request_serializer=metadata_service.QueryContextLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_context_lineage_subgraph"] @@ -1170,12 +1172,12 @@ def query_execution_inputs_and_outputs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_execution_inputs_and_outputs" not in self._stubs: - self._stubs[ - "query_execution_inputs_and_outputs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryExecutionInputsAndOutputs", - request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_execution_inputs_and_outputs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryExecutionInputsAndOutputs", + request_serializer=metadata_service.QueryExecutionInputsAndOutputsRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_execution_inputs_and_outputs"] @@ -1291,12 +1293,12 @@ def query_artifact_lineage_subgraph( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "query_artifact_lineage_subgraph" not in self._stubs: - self._stubs[ - "query_artifact_lineage_subgraph" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MetadataService/QueryArtifactLineageSubgraph", - request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, - response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + self._stubs["query_artifact_lineage_subgraph"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/QueryArtifactLineageSubgraph", + request_serializer=metadata_service.QueryArtifactLineageSubgraphRequest.serialize, + response_deserializer=lineage_subgraph.LineageSubgraph.deserialize, + ) ) return self._stubs["query_artifact_lineage_subgraph"] diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest.py index 041fd7c75f..a6d6031750 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest.py @@ -45,7 +45,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore @@ -9667,7 +9669,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9809,7 +9810,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -9952,7 +9952,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10100,7 +10099,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10249,7 +10247,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10398,7 +10395,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10514,7 +10510,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10630,7 +10625,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10772,7 +10766,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -10914,7 +10907,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_asyncio.py index a8a1876c38..bff3733be9 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_asyncio.py @@ -58,7 +58,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore @@ -2258,9 +2260,9 @@ def __init__( self._interceptor = interceptor or AsyncMetadataServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -10130,7 +10132,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10283,7 +10284,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10437,7 +10437,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10596,7 +10595,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10755,7 +10753,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10911,7 +10908,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -11036,7 +11032,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -11161,7 +11156,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11314,7 +11308,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11467,7 +11460,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_base.py index b7c22332e1..bfff89a6f0 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/rest_base.py @@ -35,7 +35,9 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py index 74f17247c3..f4aa3efe43 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py @@ -295,21 +295,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MigrationServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", + "credentialsType": None, + } + ), ) async def search_migratable_resources( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 786e8ef90f..4c8c4db4d0 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -870,21 +870,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.MigrationServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.MigrationService", + "credentialsType": None, + } + ), ) def search_migratable_resources( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py index 512632b278..6ca2aa0e61 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py @@ -259,13 +259,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py index 8dc4a6f8d3..78c3168923 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py @@ -368,12 +368,12 @@ def search_migratable_resources( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_migratable_resources" not in self._stubs: - self._stubs[ - "search_migratable_resources" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MigrationService/SearchMigratableResources", - request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, - response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + self._stubs["search_migratable_resources"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MigrationService/SearchMigratableResources", + request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, + response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + ) ) return self._stubs["search_migratable_resources"] diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py index a88d485b3e..86c2483931 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py @@ -376,12 +376,12 @@ def search_migratable_resources( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_migratable_resources" not in self._stubs: - self._stubs[ - "search_migratable_resources" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.MigrationService/SearchMigratableResources", - request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, - response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + self._stubs["search_migratable_resources"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MigrationService/SearchMigratableResources", + request_serializer=migration_service.SearchMigratableResourcesRequest.serialize, + response_deserializer=migration_service.SearchMigratableResourcesResponse.deserialize, + ) ) return self._stubs["search_migratable_resources"] diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest.py index 32e1886ec9..ed6024b30d 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest.py @@ -3088,7 +3088,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3230,7 +3229,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3373,7 +3371,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3521,7 +3518,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3670,7 +3666,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3820,7 +3815,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3937,7 +3931,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4053,7 +4046,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4195,7 +4187,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4337,7 +4328,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest_asyncio.py index 91f4f66141..163000c7c0 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/rest_asyncio.py @@ -532,9 +532,9 @@ def __init__( self._interceptor = interceptor or AsyncMigrationServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3168,7 +3168,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3321,7 +3320,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3475,7 +3473,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3634,7 +3631,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3793,7 +3789,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3949,7 +3944,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4074,7 +4068,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4199,7 +4192,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4352,7 +4344,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4505,7 +4496,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/async_client.py index 4e8a12a787..3e3223daa2 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/async_client.py @@ -48,7 +48,9 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.model_garden_service import pagers +from google.cloud.aiplatform_v1beta1.services.model_garden_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import model_garden_service from google.cloud.aiplatform_v1beta1.types import publisher_model @@ -293,21 +295,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelGardenServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", + "credentialsType": None, + } + ), ) async def get_publisher_model( diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/client.py index a7aedc80fa..ca4cff5d56 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.model_garden_service import pagers +from google.cloud.aiplatform_v1beta1.services.model_garden_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import model_garden_service from google.cloud.aiplatform_v1beta1.types import publisher_model @@ -805,21 +807,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelGardenServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelGardenService", + "credentialsType": None, + } + ), ) def get_publisher_model( diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/base.py index b01d103d5f..b3bf65ba91 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/base.py @@ -338,13 +338,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc.py index b439f6f237..6ac4abb47f 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc.py @@ -507,12 +507,12 @@ def check_publisher_model_eula_acceptance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_publisher_model_eula_acceptance" not in self._stubs: - self._stubs[ - "check_publisher_model_eula_acceptance" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelGardenService/CheckPublisherModelEulaAcceptance", - request_serializer=model_garden_service.CheckPublisherModelEulaAcceptanceRequest.serialize, - response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + self._stubs["check_publisher_model_eula_acceptance"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelGardenService/CheckPublisherModelEulaAcceptance", + request_serializer=model_garden_service.CheckPublisherModelEulaAcceptanceRequest.serialize, + response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + ) ) return self._stubs["check_publisher_model_eula_acceptance"] @@ -539,12 +539,12 @@ def accept_publisher_model_eula( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "accept_publisher_model_eula" not in self._stubs: - self._stubs[ - "accept_publisher_model_eula" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelGardenService/AcceptPublisherModelEula", - request_serializer=model_garden_service.AcceptPublisherModelEulaRequest.serialize, - response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + self._stubs["accept_publisher_model_eula"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelGardenService/AcceptPublisherModelEula", + request_serializer=model_garden_service.AcceptPublisherModelEulaRequest.serialize, + response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + ) ) return self._stubs["accept_publisher_model_eula"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc_asyncio.py index 20659a8145..314c5e7be1 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/grpc_asyncio.py @@ -520,12 +520,12 @@ def check_publisher_model_eula_acceptance( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_publisher_model_eula_acceptance" not in self._stubs: - self._stubs[ - "check_publisher_model_eula_acceptance" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelGardenService/CheckPublisherModelEulaAcceptance", - request_serializer=model_garden_service.CheckPublisherModelEulaAcceptanceRequest.serialize, - response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + self._stubs["check_publisher_model_eula_acceptance"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelGardenService/CheckPublisherModelEulaAcceptance", + request_serializer=model_garden_service.CheckPublisherModelEulaAcceptanceRequest.serialize, + response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + ) ) return self._stubs["check_publisher_model_eula_acceptance"] @@ -552,12 +552,12 @@ def accept_publisher_model_eula( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "accept_publisher_model_eula" not in self._stubs: - self._stubs[ - "accept_publisher_model_eula" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelGardenService/AcceptPublisherModelEula", - request_serializer=model_garden_service.AcceptPublisherModelEulaRequest.serialize, - response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + self._stubs["accept_publisher_model_eula"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelGardenService/AcceptPublisherModelEula", + request_serializer=model_garden_service.AcceptPublisherModelEulaRequest.serialize, + response_deserializer=model_garden_service.PublisherModelEulaAcceptance.deserialize, + ) ) return self._stubs["accept_publisher_model_eula"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest.py index 80c09f94b3..57683c6e28 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest.py @@ -4220,7 +4220,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4363,7 +4362,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4507,7 +4505,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4656,7 +4653,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4805,7 +4801,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4957,7 +4952,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5074,7 +5068,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5191,7 +5184,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5334,7 +5326,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5477,7 +5468,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest_asyncio.py index 954d87bd72..6373a08df7 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_garden_service/transports/rest_asyncio.py @@ -822,9 +822,9 @@ def __init__( self._interceptor = interceptor or AsyncModelGardenServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4344,7 +4344,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4497,7 +4496,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4651,7 +4649,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4810,7 +4807,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4969,7 +4965,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5125,7 +5120,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5248,7 +5242,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5371,7 +5364,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5524,7 +5516,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5675,7 +5666,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/async_client.py index 97782afdff..370aa12d11 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/async_client.py @@ -47,12 +47,16 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import pagers +from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import model_monitor -from google.cloud.aiplatform_v1beta1.types import model_monitor as gca_model_monitor +from google.cloud.aiplatform_v1beta1.types import ( + model_monitor as gca_model_monitor, +) from google.cloud.aiplatform_v1beta1.types import model_monitoring_alert from google.cloud.aiplatform_v1beta1.types import model_monitoring_job from google.cloud.aiplatform_v1beta1.types import ( @@ -327,21 +331,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelMonitoringServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", + "credentialsType": None, + } + ), ) async def create_model_monitor( diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/client.py index b600d7ff30..efd383d661 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/client.py @@ -63,12 +63,16 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import pagers +from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import model_monitor -from google.cloud.aiplatform_v1beta1.types import model_monitor as gca_model_monitor +from google.cloud.aiplatform_v1beta1.types import ( + model_monitor as gca_model_monitor, +) from google.cloud.aiplatform_v1beta1.types import model_monitoring_alert from google.cloud.aiplatform_v1beta1.types import model_monitoring_job from google.cloud.aiplatform_v1beta1.types import ( @@ -919,21 +923,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelMonitoringServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelMonitoringService", + "credentialsType": None, + } + ), ) def create_model_monitor( diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/base.py index ce32d3ae27..2987835d08 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/base.py @@ -405,13 +405,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc.py index 6acae5c181..86c52d7b2c 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc.py @@ -512,12 +512,12 @@ def create_model_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/CreateModelMonitoringJob", - request_serializer=model_monitoring_service.CreateModelMonitoringJobRequest.serialize, - response_deserializer=gca_model_monitoring_job.ModelMonitoringJob.deserialize, + self._stubs["create_model_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/CreateModelMonitoringJob", + request_serializer=model_monitoring_service.CreateModelMonitoringJobRequest.serialize, + response_deserializer=gca_model_monitoring_job.ModelMonitoringJob.deserialize, + ) ) return self._stubs["create_model_monitoring_job"] @@ -577,12 +577,12 @@ def list_model_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/ListModelMonitoringJobs", - request_serializer=model_monitoring_service.ListModelMonitoringJobsRequest.serialize, - response_deserializer=model_monitoring_service.ListModelMonitoringJobsResponse.deserialize, + self._stubs["list_model_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/ListModelMonitoringJobs", + request_serializer=model_monitoring_service.ListModelMonitoringJobsRequest.serialize, + response_deserializer=model_monitoring_service.ListModelMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_monitoring_jobs"] @@ -608,12 +608,12 @@ def delete_model_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/DeleteModelMonitoringJob", - request_serializer=model_monitoring_service.DeleteModelMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/DeleteModelMonitoringJob", + request_serializer=model_monitoring_service.DeleteModelMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_monitoring_job"] @@ -640,12 +640,12 @@ def search_model_monitoring_stats( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_monitoring_stats" not in self._stubs: - self._stubs[ - "search_model_monitoring_stats" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringStats", - request_serializer=model_monitoring_service.SearchModelMonitoringStatsRequest.serialize, - response_deserializer=model_monitoring_service.SearchModelMonitoringStatsResponse.deserialize, + self._stubs["search_model_monitoring_stats"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringStats", + request_serializer=model_monitoring_service.SearchModelMonitoringStatsRequest.serialize, + response_deserializer=model_monitoring_service.SearchModelMonitoringStatsResponse.deserialize, + ) ) return self._stubs["search_model_monitoring_stats"] @@ -671,12 +671,12 @@ def search_model_monitoring_alerts( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_monitoring_alerts" not in self._stubs: - self._stubs[ - "search_model_monitoring_alerts" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringAlerts", - request_serializer=model_monitoring_service.SearchModelMonitoringAlertsRequest.serialize, - response_deserializer=model_monitoring_service.SearchModelMonitoringAlertsResponse.deserialize, + self._stubs["search_model_monitoring_alerts"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringAlerts", + request_serializer=model_monitoring_service.SearchModelMonitoringAlertsRequest.serialize, + response_deserializer=model_monitoring_service.SearchModelMonitoringAlertsResponse.deserialize, + ) ) return self._stubs["search_model_monitoring_alerts"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc_asyncio.py index 09258b1f1f..b91af42e77 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/grpc_asyncio.py @@ -524,12 +524,12 @@ def create_model_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_model_monitoring_job" not in self._stubs: - self._stubs[ - "create_model_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/CreateModelMonitoringJob", - request_serializer=model_monitoring_service.CreateModelMonitoringJobRequest.serialize, - response_deserializer=gca_model_monitoring_job.ModelMonitoringJob.deserialize, + self._stubs["create_model_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/CreateModelMonitoringJob", + request_serializer=model_monitoring_service.CreateModelMonitoringJobRequest.serialize, + response_deserializer=gca_model_monitoring_job.ModelMonitoringJob.deserialize, + ) ) return self._stubs["create_model_monitoring_job"] @@ -589,12 +589,12 @@ def list_model_monitoring_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_monitoring_jobs" not in self._stubs: - self._stubs[ - "list_model_monitoring_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/ListModelMonitoringJobs", - request_serializer=model_monitoring_service.ListModelMonitoringJobsRequest.serialize, - response_deserializer=model_monitoring_service.ListModelMonitoringJobsResponse.deserialize, + self._stubs["list_model_monitoring_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/ListModelMonitoringJobs", + request_serializer=model_monitoring_service.ListModelMonitoringJobsRequest.serialize, + response_deserializer=model_monitoring_service.ListModelMonitoringJobsResponse.deserialize, + ) ) return self._stubs["list_model_monitoring_jobs"] @@ -620,12 +620,12 @@ def delete_model_monitoring_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_model_monitoring_job" not in self._stubs: - self._stubs[ - "delete_model_monitoring_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/DeleteModelMonitoringJob", - request_serializer=model_monitoring_service.DeleteModelMonitoringJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_model_monitoring_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/DeleteModelMonitoringJob", + request_serializer=model_monitoring_service.DeleteModelMonitoringJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_model_monitoring_job"] @@ -652,12 +652,12 @@ def search_model_monitoring_stats( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_monitoring_stats" not in self._stubs: - self._stubs[ - "search_model_monitoring_stats" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringStats", - request_serializer=model_monitoring_service.SearchModelMonitoringStatsRequest.serialize, - response_deserializer=model_monitoring_service.SearchModelMonitoringStatsResponse.deserialize, + self._stubs["search_model_monitoring_stats"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringStats", + request_serializer=model_monitoring_service.SearchModelMonitoringStatsRequest.serialize, + response_deserializer=model_monitoring_service.SearchModelMonitoringStatsResponse.deserialize, + ) ) return self._stubs["search_model_monitoring_stats"] @@ -683,12 +683,12 @@ def search_model_monitoring_alerts( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "search_model_monitoring_alerts" not in self._stubs: - self._stubs[ - "search_model_monitoring_alerts" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringAlerts", - request_serializer=model_monitoring_service.SearchModelMonitoringAlertsRequest.serialize, - response_deserializer=model_monitoring_service.SearchModelMonitoringAlertsResponse.deserialize, + self._stubs["search_model_monitoring_alerts"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelMonitoringService/SearchModelMonitoringAlerts", + request_serializer=model_monitoring_service.SearchModelMonitoringAlertsRequest.serialize, + response_deserializer=model_monitoring_service.SearchModelMonitoringAlertsResponse.deserialize, + ) ) return self._stubs["search_model_monitoring_alerts"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest.py index 6e4170f8dd..acada77b5b 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest.py @@ -5133,7 +5133,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5276,7 +5275,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5420,7 +5418,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5569,7 +5566,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5718,7 +5714,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5870,7 +5865,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5989,7 +5983,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6108,7 +6101,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6251,7 +6243,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6396,7 +6387,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest_asyncio.py index b098d2c6c4..2f99cd70e1 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_monitoring_service/transports/rest_asyncio.py @@ -1067,9 +1067,9 @@ def __init__( self._interceptor = interceptor or AsyncModelMonitoringServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5315,7 +5315,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5466,7 +5465,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5618,7 +5616,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5775,7 +5772,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5932,7 +5928,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6088,7 +6083,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6211,7 +6205,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6334,7 +6327,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6485,7 +6477,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6636,7 +6627,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py index 7320cefbb3..1b14493287 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py @@ -306,21 +306,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", + "credentialsType": None, + } + ), ) async def upload_model( diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_service/client.py index 699ad88648..4dc6a7c254 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/client.py @@ -864,21 +864,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ModelServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ModelService", + "credentialsType": None, + } + ), ) def upload_model( diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py index 03b9f8cd9c..5b2df2217d 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py @@ -547,13 +547,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py index d0badf1885..033682c283 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py @@ -476,12 +476,12 @@ def list_model_version_checkpoints( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_version_checkpoints" not in self._stubs: - self._stubs[ - "list_model_version_checkpoints" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/ListModelVersionCheckpoints", - request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, - response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + self._stubs["list_model_version_checkpoints"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/ListModelVersionCheckpoints", + request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, + response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + ) ) return self._stubs["list_model_version_checkpoints"] @@ -533,12 +533,12 @@ def update_explanation_dataset( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_explanation_dataset" not in self._stubs: - self._stubs[ - "update_explanation_dataset" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/UpdateExplanationDataset", - request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_explanation_dataset"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/UpdateExplanationDataset", + request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_explanation_dataset"] @@ -748,12 +748,12 @@ def batch_import_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_model_evaluation_slices" not in self._stubs: - self._stubs[ - "batch_import_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportModelEvaluationSlices", - request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + self._stubs["batch_import_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportModelEvaluationSlices", + request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["batch_import_model_evaluation_slices"] @@ -781,12 +781,12 @@ def batch_import_evaluated_annotations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_evaluated_annotations" not in self._stubs: - self._stubs[ - "batch_import_evaluated_annotations" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportEvaluatedAnnotations", - request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, - response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + self._stubs["batch_import_evaluated_annotations"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportEvaluatedAnnotations", + request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, + response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + ) ) return self._stubs["batch_import_evaluated_annotations"] @@ -869,12 +869,12 @@ def get_model_evaluation_slice( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_evaluation_slice" not in self._stubs: - self._stubs[ - "get_model_evaluation_slice" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/GetModelEvaluationSlice", - request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, - response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + self._stubs["get_model_evaluation_slice"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/GetModelEvaluationSlice", + request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, + response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + ) ) return self._stubs["get_model_evaluation_slice"] @@ -900,12 +900,12 @@ def list_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_evaluation_slices" not in self._stubs: - self._stubs[ - "list_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/ListModelEvaluationSlices", - request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + self._stubs["list_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/ListModelEvaluationSlices", + request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["list_model_evaluation_slices"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py index e2c392f004..de5ffc1757 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py @@ -490,12 +490,12 @@ def list_model_version_checkpoints( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_version_checkpoints" not in self._stubs: - self._stubs[ - "list_model_version_checkpoints" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/ListModelVersionCheckpoints", - request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, - response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + self._stubs["list_model_version_checkpoints"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/ListModelVersionCheckpoints", + request_serializer=model_service.ListModelVersionCheckpointsRequest.serialize, + response_deserializer=model_service.ListModelVersionCheckpointsResponse.deserialize, + ) ) return self._stubs["list_model_version_checkpoints"] @@ -548,12 +548,12 @@ def update_explanation_dataset( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_explanation_dataset" not in self._stubs: - self._stubs[ - "update_explanation_dataset" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/UpdateExplanationDataset", - request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_explanation_dataset"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/UpdateExplanationDataset", + request_serializer=model_service.UpdateExplanationDatasetRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_explanation_dataset"] @@ -771,12 +771,12 @@ def batch_import_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_model_evaluation_slices" not in self._stubs: - self._stubs[ - "batch_import_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportModelEvaluationSlices", - request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + self._stubs["batch_import_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportModelEvaluationSlices", + request_serializer=model_service.BatchImportModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.BatchImportModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["batch_import_model_evaluation_slices"] @@ -804,12 +804,12 @@ def batch_import_evaluated_annotations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_import_evaluated_annotations" not in self._stubs: - self._stubs[ - "batch_import_evaluated_annotations" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportEvaluatedAnnotations", - request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, - response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + self._stubs["batch_import_evaluated_annotations"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/BatchImportEvaluatedAnnotations", + request_serializer=model_service.BatchImportEvaluatedAnnotationsRequest.serialize, + response_deserializer=model_service.BatchImportEvaluatedAnnotationsResponse.deserialize, + ) ) return self._stubs["batch_import_evaluated_annotations"] @@ -893,12 +893,12 @@ def get_model_evaluation_slice( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_model_evaluation_slice" not in self._stubs: - self._stubs[ - "get_model_evaluation_slice" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/GetModelEvaluationSlice", - request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, - response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + self._stubs["get_model_evaluation_slice"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/GetModelEvaluationSlice", + request_serializer=model_service.GetModelEvaluationSliceRequest.serialize, + response_deserializer=model_evaluation_slice.ModelEvaluationSlice.deserialize, + ) ) return self._stubs["get_model_evaluation_slice"] @@ -924,12 +924,12 @@ def list_model_evaluation_slices( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_model_evaluation_slices" not in self._stubs: - self._stubs[ - "list_model_evaluation_slices" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.ModelService/ListModelEvaluationSlices", - request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, - response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + self._stubs["list_model_evaluation_slices"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.ModelService/ListModelEvaluationSlices", + request_serializer=model_service.ListModelEvaluationSlicesRequest.serialize, + response_deserializer=model_service.ListModelEvaluationSlicesResponse.deserialize, + ) ) return self._stubs["list_model_evaluation_slices"] diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest.py index 29b401b3c8..ab78602bd1 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest.py @@ -7098,7 +7098,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7244,7 +7243,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7387,7 +7385,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7539,7 +7536,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7691,7 +7687,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7840,7 +7835,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7956,7 +7950,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8072,7 +8065,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8216,7 +8208,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8358,7 +8349,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest_asyncio.py index 7ecfb31ee3..5cf58e9018 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/rest_asyncio.py @@ -1564,9 +1564,9 @@ def __init__( self._interceptor = interceptor or AsyncModelServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -7396,7 +7396,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -7550,7 +7549,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -7703,7 +7701,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -7863,7 +7860,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -8024,7 +8020,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -8181,7 +8176,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -8305,7 +8299,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -8429,7 +8422,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -8581,7 +8573,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -8733,7 +8724,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/async_client.py index 40bd9ece30..28f244e870 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/async_client.py @@ -57,12 +57,16 @@ from google.cloud.aiplatform_v1beta1.types import ( notebook_execution_job as gca_notebook_execution_job, ) -from google.cloud.aiplatform_v1beta1.types import notebook_idle_shutdown_config +from google.cloud.aiplatform_v1beta1.types import ( + notebook_idle_shutdown_config, +) from google.cloud.aiplatform_v1beta1.types import notebook_runtime from google.cloud.aiplatform_v1beta1.types import ( notebook_runtime as gca_notebook_runtime, ) -from google.cloud.aiplatform_v1beta1.types import notebook_runtime_template_ref +from google.cloud.aiplatform_v1beta1.types import ( + notebook_runtime_template_ref, +) from google.cloud.aiplatform_v1beta1.types import notebook_service from google.cloud.aiplatform_v1beta1.types import notebook_software_config from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -324,21 +328,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.NotebookServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", + "credentialsType": None, + } + ), ) async def create_notebook_runtime_template( diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/client.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/client.py index d930ac2724..4474109e87 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/client.py @@ -73,12 +73,16 @@ from google.cloud.aiplatform_v1beta1.types import ( notebook_execution_job as gca_notebook_execution_job, ) -from google.cloud.aiplatform_v1beta1.types import notebook_idle_shutdown_config +from google.cloud.aiplatform_v1beta1.types import ( + notebook_idle_shutdown_config, +) from google.cloud.aiplatform_v1beta1.types import notebook_runtime from google.cloud.aiplatform_v1beta1.types import ( notebook_runtime as gca_notebook_runtime, ) -from google.cloud.aiplatform_v1beta1.types import notebook_runtime_template_ref +from google.cloud.aiplatform_v1beta1.types import ( + notebook_runtime_template_ref, +) from google.cloud.aiplatform_v1beta1.types import notebook_service from google.cloud.aiplatform_v1beta1.types import notebook_software_config from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -892,21 +896,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.NotebookServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.NotebookService", + "credentialsType": None, + } + ), ) def create_notebook_runtime_template( diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/base.py index 33366838ab..adb2b3d9f1 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/base.py @@ -475,13 +475,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc.py index f3d650bc87..b5dbee8646 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc.py @@ -368,12 +368,12 @@ def create_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_runtime_template" not in self._stubs: - self._stubs[ - "create_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookRuntimeTemplate", - request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookRuntimeTemplate", + request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_runtime_template"] @@ -399,12 +399,12 @@ def get_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_runtime_template" not in self._stubs: - self._stubs[ - "get_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookRuntimeTemplate", - request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["get_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookRuntimeTemplate", + request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["get_notebook_runtime_template"] @@ -431,12 +431,12 @@ def list_notebook_runtime_templates( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_runtime_templates" not in self._stubs: - self._stubs[ - "list_notebook_runtime_templates" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookRuntimeTemplates", - request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, - response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + self._stubs["list_notebook_runtime_templates"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookRuntimeTemplates", + request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, + response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + ) ) return self._stubs["list_notebook_runtime_templates"] @@ -463,12 +463,12 @@ def delete_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_runtime_template" not in self._stubs: - self._stubs[ - "delete_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookRuntimeTemplate", - request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookRuntimeTemplate", + request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_runtime_template"] @@ -495,12 +495,12 @@ def update_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_notebook_runtime_template" not in self._stubs: - self._stubs[ - "update_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/UpdateNotebookRuntimeTemplate", - request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["update_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/UpdateNotebookRuntimeTemplate", + request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["update_notebook_runtime_template"] @@ -724,12 +724,12 @@ def create_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_execution_job" not in self._stubs: - self._stubs[ - "create_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookExecutionJob", - request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookExecutionJob", + request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_execution_job"] @@ -755,12 +755,12 @@ def get_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_execution_job" not in self._stubs: - self._stubs[ - "get_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookExecutionJob", - request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, - response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + self._stubs["get_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookExecutionJob", + request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, + response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + ) ) return self._stubs["get_notebook_execution_job"] @@ -786,12 +786,12 @@ def list_notebook_execution_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_execution_jobs" not in self._stubs: - self._stubs[ - "list_notebook_execution_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookExecutionJobs", - request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, - response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + self._stubs["list_notebook_execution_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookExecutionJobs", + request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, + response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + ) ) return self._stubs["list_notebook_execution_jobs"] @@ -816,12 +816,12 @@ def delete_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_execution_job" not in self._stubs: - self._stubs[ - "delete_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookExecutionJob", - request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookExecutionJob", + request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_execution_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc_asyncio.py index a6d011f988..340f7b689d 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/grpc_asyncio.py @@ -376,12 +376,12 @@ def create_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_runtime_template" not in self._stubs: - self._stubs[ - "create_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookRuntimeTemplate", - request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookRuntimeTemplate", + request_serializer=notebook_service.CreateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_runtime_template"] @@ -407,12 +407,12 @@ def get_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_runtime_template" not in self._stubs: - self._stubs[ - "get_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookRuntimeTemplate", - request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["get_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookRuntimeTemplate", + request_serializer=notebook_service.GetNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["get_notebook_runtime_template"] @@ -439,12 +439,12 @@ def list_notebook_runtime_templates( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_runtime_templates" not in self._stubs: - self._stubs[ - "list_notebook_runtime_templates" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookRuntimeTemplates", - request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, - response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + self._stubs["list_notebook_runtime_templates"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookRuntimeTemplates", + request_serializer=notebook_service.ListNotebookRuntimeTemplatesRequest.serialize, + response_deserializer=notebook_service.ListNotebookRuntimeTemplatesResponse.deserialize, + ) ) return self._stubs["list_notebook_runtime_templates"] @@ -471,12 +471,12 @@ def delete_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_runtime_template" not in self._stubs: - self._stubs[ - "delete_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookRuntimeTemplate", - request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookRuntimeTemplate", + request_serializer=notebook_service.DeleteNotebookRuntimeTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_runtime_template"] @@ -503,12 +503,12 @@ def update_notebook_runtime_template( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_notebook_runtime_template" not in self._stubs: - self._stubs[ - "update_notebook_runtime_template" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/UpdateNotebookRuntimeTemplate", - request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, - response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + self._stubs["update_notebook_runtime_template"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/UpdateNotebookRuntimeTemplate", + request_serializer=notebook_service.UpdateNotebookRuntimeTemplateRequest.serialize, + response_deserializer=notebook_runtime.NotebookRuntimeTemplate.deserialize, + ) ) return self._stubs["update_notebook_runtime_template"] @@ -739,12 +739,12 @@ def create_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_notebook_execution_job" not in self._stubs: - self._stubs[ - "create_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookExecutionJob", - request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/CreateNotebookExecutionJob", + request_serializer=notebook_service.CreateNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_notebook_execution_job"] @@ -770,12 +770,12 @@ def get_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_notebook_execution_job" not in self._stubs: - self._stubs[ - "get_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookExecutionJob", - request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, - response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + self._stubs["get_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/GetNotebookExecutionJob", + request_serializer=notebook_service.GetNotebookExecutionJobRequest.serialize, + response_deserializer=notebook_execution_job.NotebookExecutionJob.deserialize, + ) ) return self._stubs["get_notebook_execution_job"] @@ -801,12 +801,12 @@ def list_notebook_execution_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notebook_execution_jobs" not in self._stubs: - self._stubs[ - "list_notebook_execution_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookExecutionJobs", - request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, - response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + self._stubs["list_notebook_execution_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/ListNotebookExecutionJobs", + request_serializer=notebook_service.ListNotebookExecutionJobsRequest.serialize, + response_deserializer=notebook_service.ListNotebookExecutionJobsResponse.deserialize, + ) ) return self._stubs["list_notebook_execution_jobs"] @@ -832,12 +832,12 @@ def delete_notebook_execution_job( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_notebook_execution_job" not in self._stubs: - self._stubs[ - "delete_notebook_execution_job" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookExecutionJob", - request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_notebook_execution_job"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.NotebookService/DeleteNotebookExecutionJob", + request_serializer=notebook_service.DeleteNotebookExecutionJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_notebook_execution_job"] diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest.py index 2e70851396..17ec073e20 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest.py @@ -6254,7 +6254,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6396,7 +6395,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6539,7 +6537,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6687,7 +6684,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6836,7 +6832,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6985,7 +6980,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7101,7 +7095,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7217,7 +7210,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7359,7 +7351,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7501,7 +7492,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest_asyncio.py index e24da58283..8d2ac5fec3 100644 --- a/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/notebook_service/transports/rest_asyncio.py @@ -1349,9 +1349,9 @@ def __init__( self._interceptor = interceptor or AsyncNotebookServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -6504,7 +6504,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6657,7 +6656,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6811,7 +6809,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6970,7 +6967,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -7129,7 +7125,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -7285,7 +7280,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -7410,7 +7404,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7535,7 +7528,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7688,7 +7680,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7841,7 +7832,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/async_client.py index 944eebe8bf..310e82ef5c 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import pagers +from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import persistent_resource @@ -325,21 +327,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PersistentResourceServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", + "credentialsType": None, + } + ), ) async def create_persistent_resource( diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/client.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/client.py index e9d89d6145..b804458221 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import pagers +from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import persistent_resource @@ -109,9 +111,9 @@ class PersistentResourceServiceClientMeta(type): _transport_registry["grpc_asyncio"] = PersistentResourceServiceGrpcAsyncIOTransport _transport_registry["rest"] = PersistentResourceServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncPersistentResourceServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncPersistentResourceServiceRestTransport + ) def get_transport_class( cls, @@ -846,21 +848,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PersistentResourceServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PersistentResourceService", + "credentialsType": None, + } + ), ) def create_persistent_resource( diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/base.py index 304990d9a8..675d9a25ec 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/base.py @@ -319,13 +319,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc.py index 38ba897c1f..346c161101 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc.py @@ -366,12 +366,12 @@ def create_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_persistent_resource" not in self._stubs: - self._stubs[ - "create_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/CreatePersistentResource", - request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/CreatePersistentResource", + request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_persistent_resource"] @@ -455,12 +455,12 @@ def delete_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_persistent_resource" not in self._stubs: - self._stubs[ - "delete_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/DeletePersistentResource", - request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/DeletePersistentResource", + request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_persistent_resource"] @@ -486,12 +486,12 @@ def update_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_persistent_resource" not in self._stubs: - self._stubs[ - "update_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/UpdatePersistentResource", - request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/UpdatePersistentResource", + request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_persistent_resource"] @@ -517,12 +517,12 @@ def reboot_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reboot_persistent_resource" not in self._stubs: - self._stubs[ - "reboot_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/RebootPersistentResource", - request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["reboot_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/RebootPersistentResource", + request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["reboot_persistent_resource"] diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc_asyncio.py index 9c27907d2f..50fe5765b1 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/grpc_asyncio.py @@ -374,12 +374,12 @@ def create_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_persistent_resource" not in self._stubs: - self._stubs[ - "create_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/CreatePersistentResource", - request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["create_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/CreatePersistentResource", + request_serializer=persistent_resource_service.CreatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["create_persistent_resource"] @@ -463,12 +463,12 @@ def delete_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_persistent_resource" not in self._stubs: - self._stubs[ - "delete_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/DeletePersistentResource", - request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/DeletePersistentResource", + request_serializer=persistent_resource_service.DeletePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_persistent_resource"] @@ -494,12 +494,12 @@ def update_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_persistent_resource" not in self._stubs: - self._stubs[ - "update_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/UpdatePersistentResource", - request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["update_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/UpdatePersistentResource", + request_serializer=persistent_resource_service.UpdatePersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["update_persistent_resource"] @@ -525,12 +525,12 @@ def reboot_persistent_resource( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "reboot_persistent_resource" not in self._stubs: - self._stubs[ - "reboot_persistent_resource" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PersistentResourceService/RebootPersistentResource", - request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["reboot_persistent_resource"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PersistentResourceService/RebootPersistentResource", + request_serializer=persistent_resource_service.RebootPersistentResourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["reboot_persistent_resource"] diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest.py index c648737427..89659af53c 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest.py @@ -3988,7 +3988,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4133,7 +4132,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4279,7 +4277,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4430,7 +4427,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4581,7 +4577,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4731,7 +4726,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4850,7 +4844,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4969,7 +4962,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5114,7 +5106,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5259,7 +5250,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest_asyncio.py index edc54b4223..09733b8169 100644 --- a/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/persistent_resource_service/transports/rest_asyncio.py @@ -767,9 +767,9 @@ def __init__( ) self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4129,7 +4129,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4280,7 +4279,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4432,7 +4430,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4589,7 +4586,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4748,7 +4744,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4904,7 +4899,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5027,7 +5021,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5150,7 +5143,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5301,7 +5293,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5452,7 +5443,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py index 82dabdc686..f014098009 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py @@ -52,7 +52,9 @@ from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import pipeline_state from google.cloud.aiplatform_v1beta1.types import service_networking @@ -323,21 +325,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PipelineServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", + "credentialsType": None, + } + ), ) async def create_training_pipeline( diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index 93a81b3479..9ffde922c8 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import pipeline_state from google.cloud.aiplatform_v1beta1.types import service_networking @@ -959,21 +961,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PipelineServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PipelineService", + "credentialsType": None, + } + ), ) def create_training_pipeline( diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py index b2a6338353..1f5d55f3a6 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py @@ -29,7 +29,9 @@ import google.protobuf from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( @@ -415,13 +417,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py index 4a3a51c942..2bfeb826d2 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py @@ -32,7 +32,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( @@ -642,12 +644,12 @@ def batch_delete_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_delete_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_delete_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PipelineService/BatchDeletePipelineJobs", - request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_delete_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PipelineService/BatchDeletePipelineJobs", + request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_delete_pipeline_jobs"] @@ -718,12 +720,12 @@ def batch_cancel_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_cancel_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_cancel_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PipelineService/BatchCancelPipelineJobs", - request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_cancel_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PipelineService/BatchCancelPipelineJobs", + request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_cancel_pipeline_jobs"] diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py index 946fe495e0..14d0f062da 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py @@ -35,7 +35,9 @@ from grpc.experimental import aio # type: ignore from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( @@ -657,12 +659,12 @@ def batch_delete_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_delete_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_delete_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PipelineService/BatchDeletePipelineJobs", - request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_delete_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PipelineService/BatchDeletePipelineJobs", + request_serializer=pipeline_service.BatchDeletePipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_delete_pipeline_jobs"] @@ -736,12 +738,12 @@ def batch_cancel_pipeline_jobs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_cancel_pipeline_jobs" not in self._stubs: - self._stubs[ - "batch_cancel_pipeline_jobs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.PipelineService/BatchCancelPipelineJobs", - request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["batch_cancel_pipeline_jobs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.PipelineService/BatchCancelPipelineJobs", + request_serializer=pipeline_service.BatchCancelPipelineJobsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["batch_cancel_pipeline_jobs"] diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest.py index a474fd74aa..d49c1686c9 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest.py @@ -38,7 +38,9 @@ from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( @@ -5164,7 +5166,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5306,7 +5307,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5449,7 +5449,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5597,7 +5596,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5746,7 +5744,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5895,7 +5892,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6011,7 +6007,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6127,7 +6122,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6269,7 +6263,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6411,7 +6404,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_asyncio.py index a86ed4458c..2410fe0065 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_asyncio.py @@ -51,7 +51,9 @@ from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( @@ -1041,9 +1043,9 @@ def __init__( self._interceptor = interceptor or AsyncPipelineServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5346,7 +5348,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5499,7 +5500,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5653,7 +5653,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5812,7 +5811,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5971,7 +5969,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6127,7 +6124,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6252,7 +6248,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6377,7 +6372,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6530,7 +6524,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6683,7 +6676,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_base.py index 58dd5c2167..d3329c2031 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import training_pipeline from google.cloud.aiplatform_v1beta1.types import ( diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py index 11e9083240..b18e5e74c5 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py @@ -297,21 +297,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PredictionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", + "credentialsType": None, + } + ), ) async def predict( diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py index 2a58d3dc6f..f4a49a3f44 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py @@ -836,21 +836,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.PredictionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.PredictionService", + "credentialsType": None, + } + ), ) def predict( diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py index a7205a207c..c7da589953 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py @@ -472,13 +472,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py index 6f3f6b0ada..7df86baea8 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py @@ -531,12 +531,12 @@ def stream_direct_raw_predict( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_direct_raw_predict" not in self._stubs: - self._stubs[ - "stream_direct_raw_predict" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.PredictionService/StreamDirectRawPredict", - request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, - response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + self._stubs["stream_direct_raw_predict"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.PredictionService/StreamDirectRawPredict", + request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, + response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + ) ) return self._stubs["stream_direct_raw_predict"] diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py index 169c437dbb..788cdb1a09 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py @@ -544,12 +544,12 @@ def stream_direct_raw_predict( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_direct_raw_predict" not in self._stubs: - self._stubs[ - "stream_direct_raw_predict" - ] = self._logged_channel.stream_stream( - "/google.cloud.aiplatform.v1beta1.PredictionService/StreamDirectRawPredict", - request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, - response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + self._stubs["stream_direct_raw_predict"] = ( + self._logged_channel.stream_stream( + "/google.cloud.aiplatform.v1beta1.PredictionService/StreamDirectRawPredict", + request_serializer=prediction_service.StreamDirectRawPredictRequest.serialize, + response_deserializer=prediction_service.StreamDirectRawPredictResponse.deserialize, + ) ) return self._stubs["stream_direct_raw_predict"] diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest.py index 135a592bc7..d4b81203a6 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest.py @@ -3126,7 +3126,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3269,7 +3268,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3412,7 +3410,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3560,7 +3557,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3709,7 +3705,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -3859,7 +3854,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3976,7 +3970,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4092,7 +4085,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4235,7 +4227,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4378,7 +4369,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest_asyncio.py index f5e2cd4bb5..9ab121c218 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/rest_asyncio.py @@ -3309,7 +3309,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3462,7 +3461,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -3616,7 +3614,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -3775,7 +3772,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -3934,7 +3930,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4090,7 +4085,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4213,7 +4207,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4336,7 +4329,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4489,7 +4481,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -4642,7 +4633,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/async_client.py index 2cf4c6d666..2d6a85629e 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/async_client.py @@ -48,7 +48,9 @@ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -297,21 +299,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ReasoningEngineExecutionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", + "credentialsType": None, + } + ), ) async def query_reasoning_engine( diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/client.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/client.py index 124ff4bbfd..693c116b7c 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/client.py @@ -63,7 +63,9 @@ _LOGGER = std_logging.getLogger(__name__) from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -101,14 +103,14 @@ class ReasoningEngineExecutionServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[ReasoningEngineExecutionServiceTransport]] _transport_registry["grpc"] = ReasoningEngineExecutionServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = ReasoningEngineExecutionServiceGrpcAsyncIOTransport + _transport_registry["grpc_asyncio"] = ( + ReasoningEngineExecutionServiceGrpcAsyncIOTransport + ) _transport_registry["rest"] = ReasoningEngineExecutionServiceRestTransport if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncReasoningEngineExecutionServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncReasoningEngineExecutionServiceRestTransport + ) def get_transport_class( cls, @@ -764,21 +766,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ReasoningEngineExecutionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService", + "credentialsType": None, + } + ), ) def query_reasoning_engine( diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/__init__.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/__init__.py index 06c5aa0858..8d2c6f6d5a 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/__init__.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/__init__.py @@ -42,14 +42,14 @@ OrderedDict() ) # type: Dict[str, Type[ReasoningEngineExecutionServiceTransport]] _transport_registry["grpc"] = ReasoningEngineExecutionServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = ReasoningEngineExecutionServiceGrpcAsyncIOTransport +_transport_registry["grpc_asyncio"] = ( + ReasoningEngineExecutionServiceGrpcAsyncIOTransport +) _transport_registry["rest"] = ReasoningEngineExecutionServiceRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry[ - "rest_asyncio" - ] = AsyncReasoningEngineExecutionServiceRestTransport + _transport_registry["rest_asyncio"] = ( + AsyncReasoningEngineExecutionServiceRestTransport + ) __all__ = ( "ReasoningEngineExecutionServiceTransport", diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/base.py index 9b0c008799..5d2c7098ae 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/base.py @@ -28,7 +28,9 @@ import google.protobuf from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -254,13 +256,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc.py index f0913fe02f..c11b50b41d 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc.py @@ -31,7 +31,9 @@ import proto # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -378,12 +380,12 @@ def stream_query_reasoning_engine( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_query_reasoning_engine" not in self._stubs: - self._stubs[ - "stream_query_reasoning_engine" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", - request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, - response_deserializer=httpbody_pb2.HttpBody.FromString, + self._stubs["stream_query_reasoning_engine"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", + request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) ) return self._stubs["stream_query_reasoning_engine"] diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py index a7b4ff5fcf..d71359bad8 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/grpc_asyncio.py @@ -34,7 +34,9 @@ from grpc.experimental import aio # type: ignore from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -386,12 +388,12 @@ def stream_query_reasoning_engine( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "stream_query_reasoning_engine" not in self._stubs: - self._stubs[ - "stream_query_reasoning_engine" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", - request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, - response_deserializer=httpbody_pb2.HttpBody.FromString, + self._stubs["stream_query_reasoning_engine"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.ReasoningEngineExecutionService/StreamQueryReasoningEngine", + request_serializer=reasoning_engine_execution_service.StreamQueryReasoningEngineRequest.serialize, + response_deserializer=httpbody_pb2.HttpBody.FromString, + ) ) return self._stubs["stream_query_reasoning_engine"] diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest.py index 8fb379e0ab..6f09efe78b 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest.py @@ -37,7 +37,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -956,7 +958,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1101,7 +1102,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1245,7 +1245,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1394,7 +1393,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1545,7 +1543,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1695,7 +1692,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1812,7 +1808,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1929,7 +1924,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2072,7 +2066,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2215,7 +2208,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_asyncio.py index 23cba28645..5bc23fb559 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_asyncio.py @@ -49,7 +49,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore @@ -1035,7 +1037,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1188,7 +1189,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1342,7 +1342,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1501,7 +1500,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1660,7 +1658,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1818,7 +1815,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1943,7 +1939,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2068,7 +2063,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2221,7 +2215,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2374,7 +2367,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_base.py index 0f4c652851..5ce4bdf1d5 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_execution_service/transports/rest_base.py @@ -28,7 +28,9 @@ from google.api import httpbody_pb2 # type: ignore -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.longrunning import operations_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/async_client.py index f48f4ed1a8..5f373c2746 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import pagers +from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import reasoning_engine @@ -303,21 +305,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ReasoningEngineServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", + "credentialsType": None, + } + ), ) async def create_reasoning_engine( diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/client.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/client.py index da95cb0eee..3c4c5e107b 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import pagers +from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import reasoning_engine @@ -797,21 +799,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ReasoningEngineServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ReasoningEngineService", + "credentialsType": None, + } + ), ) def create_reasoning_engine( diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/base.py index e788cd7e2f..76b9505bc9 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/base.py @@ -305,13 +305,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest.py index 439f438279..a94e47607a 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest.py @@ -3750,7 +3750,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3893,7 +3892,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4037,7 +4035,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4186,7 +4183,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4335,7 +4331,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4487,7 +4482,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4606,7 +4600,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4725,7 +4718,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4868,7 +4860,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5013,7 +5004,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest_asyncio.py index 3af4aaf791..c23c36df99 100644 --- a/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/reasoning_engine_service/transports/rest_asyncio.py @@ -707,9 +707,9 @@ def __init__( self._interceptor = interceptor or AsyncReasoningEngineServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3871,7 +3871,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4022,7 +4021,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4174,7 +4172,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4331,7 +4328,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4488,7 +4484,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4644,7 +4639,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4767,7 +4761,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4890,7 +4883,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5041,7 +5033,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5192,7 +5183,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/schedule_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/schedule_service/async_client.py index bda62e058b..af90b4f682 100644 --- a/google/cloud/aiplatform_v1beta1/services/schedule_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/schedule_service/async_client.py @@ -346,21 +346,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ScheduleServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", + "credentialsType": None, + } + ), ) async def create_schedule( diff --git a/google/cloud/aiplatform_v1beta1/services/schedule_service/client.py b/google/cloud/aiplatform_v1beta1/services/schedule_service/client.py index 04358158b8..ccae8c4ec7 100644 --- a/google/cloud/aiplatform_v1beta1/services/schedule_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/schedule_service/client.py @@ -1108,21 +1108,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.ScheduleServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.ScheduleService", + "credentialsType": None, + } + ), ) def create_schedule( diff --git a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/base.py index 8b2a7e0d39..d05975f42e 100644 --- a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/base.py @@ -332,13 +332,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest.py index 2becca397b..94312208f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest.py @@ -3993,7 +3993,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4135,7 +4134,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4278,7 +4276,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4426,7 +4423,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4575,7 +4571,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4724,7 +4719,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4840,7 +4834,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4956,7 +4949,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5098,7 +5090,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5240,7 +5231,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest_asyncio.py index 432a2b14bd..c0e77f544f 100644 --- a/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/schedule_service/transports/rest_asyncio.py @@ -735,9 +735,9 @@ def __init__( self._interceptor = interceptor or AsyncScheduleServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4149,7 +4149,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4302,7 +4301,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4456,7 +4454,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4615,7 +4612,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4774,7 +4770,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4930,7 +4925,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5055,7 +5049,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5180,7 +5173,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5333,7 +5325,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5486,7 +5477,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/session_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/session_service/async_client.py index 0f370d3232..2b47672c61 100644 --- a/google/cloud/aiplatform_v1beta1/services/session_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/session_service/async_client.py @@ -292,21 +292,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.SessionServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", + "credentialsType": None, + } + ), ) async def create_session( diff --git a/google/cloud/aiplatform_v1beta1/services/session_service/client.py b/google/cloud/aiplatform_v1beta1/services/session_service/client.py index 9e50ba9829..07b6913c8c 100644 --- a/google/cloud/aiplatform_v1beta1/services/session_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/session_service/client.py @@ -795,21 +795,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.SessionServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.SessionService", + "credentialsType": None, + } + ), ) def create_session( diff --git a/google/cloud/aiplatform_v1beta1/services/session_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/session_service/transports/base.py index 9e876b91f6..616be870dc 100644 --- a/google/cloud/aiplatform_v1beta1/services/session_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/session_service/transports/base.py @@ -337,13 +337,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest.py index 41db692c2c..3bb605b010 100644 --- a/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest.py @@ -4147,7 +4147,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4289,7 +4288,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4432,7 +4430,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4580,7 +4577,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4728,7 +4724,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4877,7 +4872,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4993,7 +4987,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5109,7 +5102,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5251,7 +5243,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5393,7 +5384,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest_asyncio.py index 2832144b86..aec5d8a689 100644 --- a/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/session_service/transports/rest_asyncio.py @@ -812,9 +812,9 @@ def __init__( self._interceptor = interceptor or AsyncSessionServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -4297,7 +4297,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4449,7 +4448,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4602,7 +4600,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4760,7 +4757,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4919,7 +4915,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -5075,7 +5070,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -5200,7 +5194,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -5324,7 +5317,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5477,7 +5469,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5629,7 +5620,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py index 1b997207e3..4946259fb1 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py @@ -47,10 +47,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import pagers +from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import specialist_pool -from google.cloud.aiplatform_v1beta1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1beta1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1beta1.types import specialist_pool_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -297,21 +301,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.SpecialistPoolServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", + "credentialsType": None, + } + ), ) async def create_specialist_pool( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py index 4272300086..0ac040a250 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py @@ -63,10 +63,14 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import pagers +from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import specialist_pool -from google.cloud.aiplatform_v1beta1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1beta1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1beta1.types import specialist_pool_service from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -755,21 +759,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.SpecialistPoolServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.SpecialistPoolService", + "credentialsType": None, + } + ), ) def create_specialist_pool( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py index cea5555613..7cb7575de1 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py @@ -304,13 +304,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest.py index bcd87401c1..09fc1cca1d 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest.py @@ -3762,7 +3762,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -3905,7 +3904,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4049,7 +4047,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4198,7 +4195,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4347,7 +4343,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4499,7 +4494,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4618,7 +4612,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4737,7 +4730,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -4880,7 +4872,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5023,7 +5014,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest_asyncio.py index 23a1ef19ac..736bf82726 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/rest_asyncio.py @@ -708,9 +708,9 @@ def __init__( self._interceptor = interceptor or AsyncSpecialistPoolServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -3860,7 +3860,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -4011,7 +4010,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -4163,7 +4161,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -4320,7 +4317,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -4477,7 +4473,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -4633,7 +4628,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -4756,7 +4750,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -4879,7 +4872,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -5030,7 +5022,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -5181,7 +5172,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py index e23e123639..e1ed0397c8 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/async_client.py @@ -49,18 +49,24 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.tensorboard_service import pagers +from google.cloud.aiplatform_v1beta1.services.tensorboard_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import tensorboard -from google.cloud.aiplatform_v1beta1.types import tensorboard as gca_tensorboard +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard as gca_tensorboard, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_data from google.cloud.aiplatform_v1beta1.types import tensorboard_experiment from google.cloud.aiplatform_v1beta1.types import ( tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -318,21 +324,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.TensorboardServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", + "credentialsType": None, + } + ), ) async def create_tensorboard( diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py index 2315d1235e..98a756e87f 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/client.py @@ -64,18 +64,24 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.tensorboard_service import pagers +from google.cloud.aiplatform_v1beta1.services.tensorboard_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import tensorboard -from google.cloud.aiplatform_v1beta1.types import tensorboard as gca_tensorboard +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard as gca_tensorboard, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_data from google.cloud.aiplatform_v1beta1.types import tensorboard_experiment from google.cloud.aiplatform_v1beta1.types import ( tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -841,21 +847,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.TensorboardServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.TensorboardService", + "credentialsType": None, + } + ), ) def create_tensorboard( diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/base.py index c40e706729..1dacf58414 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/base.py @@ -34,7 +34,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -727,13 +729,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc.py index 7ae2aaaaee..f0eba0b08f 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc.py @@ -37,7 +37,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -574,12 +576,12 @@ def create_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_experiment" not in self._stubs: - self._stubs[ - "create_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardExperiment", - request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["create_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardExperiment", + request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["create_tensorboard_experiment"] @@ -605,12 +607,12 @@ def get_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_experiment" not in self._stubs: - self._stubs[ - "get_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardExperiment", - request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, - response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["get_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardExperiment", + request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, + response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["get_tensorboard_experiment"] @@ -636,12 +638,12 @@ def update_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_experiment" not in self._stubs: - self._stubs[ - "update_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardExperiment", - request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["update_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardExperiment", + request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["update_tensorboard_experiment"] @@ -667,12 +669,12 @@ def list_tensorboard_experiments( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_experiments" not in self._stubs: - self._stubs[ - "list_tensorboard_experiments" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardExperiments", - request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + self._stubs["list_tensorboard_experiments"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardExperiments", + request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + ) ) return self._stubs["list_tensorboard_experiments"] @@ -698,12 +700,12 @@ def delete_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_experiment" not in self._stubs: - self._stubs[ - "delete_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardExperiment", - request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardExperiment", + request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_experiment"] @@ -758,12 +760,12 @@ def batch_create_tensorboard_runs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_runs" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_runs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardRuns", - request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + self._stubs["batch_create_tensorboard_runs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardRuns", + request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_runs"] @@ -905,12 +907,12 @@ def batch_create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardTimeSeries", - request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + self._stubs["batch_create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardTimeSeries", + request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_time_series"] @@ -936,12 +938,12 @@ def create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardTimeSeries", - request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardTimeSeries", + request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["create_tensorboard_time_series"] @@ -967,12 +969,12 @@ def get_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_time_series" not in self._stubs: - self._stubs[ - "get_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardTimeSeries", - request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["get_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardTimeSeries", + request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["get_tensorboard_time_series"] @@ -998,12 +1000,12 @@ def update_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_time_series" not in self._stubs: - self._stubs[ - "update_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardTimeSeries", - request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["update_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardTimeSeries", + request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["update_tensorboard_time_series"] @@ -1029,12 +1031,12 @@ def list_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_time_series" not in self._stubs: - self._stubs[ - "list_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardTimeSeries", - request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + self._stubs["list_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardTimeSeries", + request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["list_tensorboard_time_series"] @@ -1060,12 +1062,12 @@ def delete_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_time_series" not in self._stubs: - self._stubs[ - "delete_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardTimeSeries", - request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardTimeSeries", + request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_time_series"] @@ -1097,12 +1099,12 @@ def batch_read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "batch_read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["batch_read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["batch_read_tensorboard_time_series_data"] @@ -1133,12 +1135,12 @@ def read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_time_series_data"] @@ -1167,12 +1169,12 @@ def read_tensorboard_blob_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_blob_data" not in self._stubs: - self._stubs[ - "read_tensorboard_blob_data" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardBlobData", - request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + self._stubs["read_tensorboard_blob_data"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardBlobData", + request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_blob_data"] @@ -1201,12 +1203,12 @@ def write_tensorboard_experiment_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_experiment_data" not in self._stubs: - self._stubs[ - "write_tensorboard_experiment_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardExperimentData", - request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + self._stubs["write_tensorboard_experiment_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardExperimentData", + request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_experiment_data"] @@ -1234,12 +1236,12 @@ def write_tensorboard_run_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_run_data" not in self._stubs: - self._stubs[ - "write_tensorboard_run_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardRunData", - request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + self._stubs["write_tensorboard_run_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardRunData", + request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_run_data"] @@ -1267,12 +1269,12 @@ def export_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "export_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ExportTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["export_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ExportTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["export_tensorboard_time_series_data"] diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc_asyncio.py index b1b2111e4f..ac7f2cfc2f 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/grpc_asyncio.py @@ -40,7 +40,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -587,12 +589,12 @@ def create_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_experiment" not in self._stubs: - self._stubs[ - "create_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardExperiment", - request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["create_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardExperiment", + request_serializer=tensorboard_service.CreateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["create_tensorboard_experiment"] @@ -618,12 +620,12 @@ def get_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_experiment" not in self._stubs: - self._stubs[ - "get_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardExperiment", - request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, - response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["get_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardExperiment", + request_serializer=tensorboard_service.GetTensorboardExperimentRequest.serialize, + response_deserializer=tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["get_tensorboard_experiment"] @@ -649,12 +651,12 @@ def update_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_experiment" not in self._stubs: - self._stubs[ - "update_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardExperiment", - request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, - response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + self._stubs["update_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardExperiment", + request_serializer=tensorboard_service.UpdateTensorboardExperimentRequest.serialize, + response_deserializer=gca_tensorboard_experiment.TensorboardExperiment.deserialize, + ) ) return self._stubs["update_tensorboard_experiment"] @@ -680,12 +682,12 @@ def list_tensorboard_experiments( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_experiments" not in self._stubs: - self._stubs[ - "list_tensorboard_experiments" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardExperiments", - request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + self._stubs["list_tensorboard_experiments"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardExperiments", + request_serializer=tensorboard_service.ListTensorboardExperimentsRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardExperimentsResponse.deserialize, + ) ) return self._stubs["list_tensorboard_experiments"] @@ -711,12 +713,12 @@ def delete_tensorboard_experiment( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_experiment" not in self._stubs: - self._stubs[ - "delete_tensorboard_experiment" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardExperiment", - request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_experiment"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardExperiment", + request_serializer=tensorboard_service.DeleteTensorboardExperimentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_experiment"] @@ -771,12 +773,12 @@ def batch_create_tensorboard_runs( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_runs" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_runs" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardRuns", - request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + self._stubs["batch_create_tensorboard_runs"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardRuns", + request_serializer=tensorboard_service.BatchCreateTensorboardRunsRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardRunsResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_runs"] @@ -920,12 +922,12 @@ def batch_create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "batch_create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardTimeSeries", - request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + self._stubs["batch_create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchCreateTensorboardTimeSeries", + request_serializer=tensorboard_service.BatchCreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.BatchCreateTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["batch_create_tensorboard_time_series"] @@ -951,12 +953,12 @@ def create_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_tensorboard_time_series" not in self._stubs: - self._stubs[ - "create_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardTimeSeries", - request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["create_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/CreateTensorboardTimeSeries", + request_serializer=tensorboard_service.CreateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["create_tensorboard_time_series"] @@ -982,12 +984,12 @@ def get_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_tensorboard_time_series" not in self._stubs: - self._stubs[ - "get_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardTimeSeries", - request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["get_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/GetTensorboardTimeSeries", + request_serializer=tensorboard_service.GetTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["get_tensorboard_time_series"] @@ -1013,12 +1015,12 @@ def update_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_tensorboard_time_series" not in self._stubs: - self._stubs[ - "update_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardTimeSeries", - request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, - response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + self._stubs["update_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/UpdateTensorboardTimeSeries", + request_serializer=tensorboard_service.UpdateTensorboardTimeSeriesRequest.serialize, + response_deserializer=gca_tensorboard_time_series.TensorboardTimeSeries.deserialize, + ) ) return self._stubs["update_tensorboard_time_series"] @@ -1044,12 +1046,12 @@ def list_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_tensorboard_time_series" not in self._stubs: - self._stubs[ - "list_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardTimeSeries", - request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, - response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + self._stubs["list_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ListTensorboardTimeSeries", + request_serializer=tensorboard_service.ListTensorboardTimeSeriesRequest.serialize, + response_deserializer=tensorboard_service.ListTensorboardTimeSeriesResponse.deserialize, + ) ) return self._stubs["list_tensorboard_time_series"] @@ -1075,12 +1077,12 @@ def delete_tensorboard_time_series( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_tensorboard_time_series" not in self._stubs: - self._stubs[ - "delete_tensorboard_time_series" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardTimeSeries", - request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["delete_tensorboard_time_series"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/DeleteTensorboardTimeSeries", + request_serializer=tensorboard_service.DeleteTensorboardTimeSeriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["delete_tensorboard_time_series"] @@ -1112,12 +1114,12 @@ def batch_read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "batch_read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["batch_read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/BatchReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.BatchReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["batch_read_tensorboard_time_series_data"] @@ -1148,12 +1150,12 @@ def read_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "read_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["read_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ReadTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_time_series_data"] @@ -1182,12 +1184,12 @@ def read_tensorboard_blob_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "read_tensorboard_blob_data" not in self._stubs: - self._stubs[ - "read_tensorboard_blob_data" - ] = self._logged_channel.unary_stream( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardBlobData", - request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, - response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + self._stubs["read_tensorboard_blob_data"] = ( + self._logged_channel.unary_stream( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ReadTensorboardBlobData", + request_serializer=tensorboard_service.ReadTensorboardBlobDataRequest.serialize, + response_deserializer=tensorboard_service.ReadTensorboardBlobDataResponse.deserialize, + ) ) return self._stubs["read_tensorboard_blob_data"] @@ -1216,12 +1218,12 @@ def write_tensorboard_experiment_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_experiment_data" not in self._stubs: - self._stubs[ - "write_tensorboard_experiment_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardExperimentData", - request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + self._stubs["write_tensorboard_experiment_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardExperimentData", + request_serializer=tensorboard_service.WriteTensorboardExperimentDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardExperimentDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_experiment_data"] @@ -1249,12 +1251,12 @@ def write_tensorboard_run_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "write_tensorboard_run_data" not in self._stubs: - self._stubs[ - "write_tensorboard_run_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardRunData", - request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, - response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + self._stubs["write_tensorboard_run_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/WriteTensorboardRunData", + request_serializer=tensorboard_service.WriteTensorboardRunDataRequest.serialize, + response_deserializer=tensorboard_service.WriteTensorboardRunDataResponse.deserialize, + ) ) return self._stubs["write_tensorboard_run_data"] @@ -1282,12 +1284,12 @@ def export_tensorboard_time_series_data( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "export_tensorboard_time_series_data" not in self._stubs: - self._stubs[ - "export_tensorboard_time_series_data" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.TensorboardService/ExportTensorboardTimeSeriesData", - request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, - response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + self._stubs["export_tensorboard_time_series_data"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.TensorboardService/ExportTensorboardTimeSeriesData", + request_serializer=tensorboard_service.ExportTensorboardTimeSeriesDataRequest.serialize, + response_deserializer=tensorboard_service.ExportTensorboardTimeSeriesDataResponse.deserialize, + ) ) return self._stubs["export_tensorboard_time_series_data"] diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest.py index c0e126cf82..16ed934d49 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest.py @@ -43,7 +43,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -9492,7 +9494,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -9635,7 +9636,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -9779,7 +9779,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -9928,7 +9927,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10077,7 +10075,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10229,7 +10226,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10346,7 +10342,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10463,7 +10458,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -10606,7 +10600,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -10749,7 +10742,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_asyncio.py index ba919006f2..924e6543fc 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_asyncio.py @@ -56,7 +56,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -2199,9 +2201,9 @@ def __init__( self._interceptor = interceptor or AsyncTensorboardServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -9889,7 +9891,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -10042,7 +10043,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -10196,7 +10196,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -10355,7 +10354,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -10514,7 +10512,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -10670,7 +10667,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -10793,7 +10789,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -10916,7 +10911,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -11069,7 +11063,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -11220,7 +11213,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_base.py b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_base.py index 53d784edb9..4c59eecaae 100644 --- a/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_base.py +++ b/google/cloud/aiplatform_v1beta1/services/tensorboard_service/transports/rest_base.py @@ -33,7 +33,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/async_client.py index d48b167100..1bb62f67f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/async_client.py @@ -47,7 +47,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import pagers +from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -305,21 +307,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VertexRagDataServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", + "credentialsType": None, + } + ), ) async def create_rag_corpus( diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/client.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/client.py index c1f2b88f60..0ec5d0e15b 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/client.py @@ -63,7 +63,9 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import pagers +from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -859,21 +861,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VertexRagDataServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagDataService", + "credentialsType": None, + } + ), ) def create_rag_corpus( diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/base.py index aeb3a83b6f..abd52bc21f 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/base.py @@ -408,13 +408,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest.py index 3f6e85ab52..e499bfdf61 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest.py @@ -5281,7 +5281,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5424,7 +5423,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5568,7 +5566,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5717,7 +5714,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -5866,7 +5862,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6018,7 +6013,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6135,7 +6129,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6252,7 +6245,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6395,7 +6387,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6538,7 +6529,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest_asyncio.py index dcb3f400d3..f8cfa58d07 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/transports/rest_asyncio.py @@ -1110,9 +1110,9 @@ def __init__( self._interceptor = interceptor or AsyncVertexRagDataServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5469,7 +5469,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5622,7 +5621,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -5774,7 +5772,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -5931,7 +5928,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6088,7 +6084,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6244,7 +6239,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6367,7 +6361,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6490,7 +6483,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6641,7 +6633,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -6792,7 +6783,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/async_client.py index fd655227c7..706e470e49 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/async_client.py @@ -278,21 +278,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VertexRagServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", + "credentialsType": None, + } + ), ) async def retrieve_contexts( diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/client.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/client.py index 5d2008b2d7..70306999ac 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/client.py @@ -740,21 +740,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VertexRagServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VertexRagService", + "credentialsType": None, + } + ), ) def retrieve_contexts( diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/base.py index 118fbe2e25..94a2a7c87f 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/base.py @@ -273,13 +273,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest.py index e39ad93b44..892dba8c00 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest.py @@ -1142,7 +1142,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1284,7 +1283,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1427,7 +1425,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1575,7 +1572,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1724,7 +1720,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -1874,7 +1869,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -1991,7 +1985,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2107,7 +2100,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2249,7 +2241,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2391,7 +2382,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest_asyncio.py index 38acef26ce..81328e22d6 100644 --- a/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/vertex_rag_service/transports/rest_asyncio.py @@ -1230,7 +1230,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -1383,7 +1382,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -1537,7 +1535,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1696,7 +1693,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1855,7 +1851,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -2011,7 +2006,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -2136,7 +2130,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -2261,7 +2254,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -2414,7 +2406,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -2567,7 +2558,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/async_client.py index 48c44c5db3..2a14e3d1ef 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/async_client.py @@ -288,21 +288,23 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VizierServiceAsyncClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", - "universeDomain": getattr( - self._client._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._client._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", + "credentialsType": None, + } + ), ) async def create_study( diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/client.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/client.py index 4d10956fc0..39bfc64adc 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/client.py @@ -789,21 +789,25 @@ def __init__( ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.aiplatform_v1beta1.VizierServiceClient`.", - extra={ - "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", - "universeDomain": getattr( - self._transport._credentials, "universe_domain", "" - ), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr( - self.transport._credentials, "get_cred_info", lambda: None - )(), - } - if hasattr(self._transport, "_credentials") - else { - "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", - "credentialsType": None, - }, + extra=( + { + "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, + "get_cred_info", + lambda: None, + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.aiplatform.v1beta1.VizierService", + "credentialsType": None, + } + ), ) def create_study( diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/base.py index bdfe764be0..2974a066ca 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/base.py @@ -445,13 +445,19 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: raise NotImplementedError() @property diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc.py index af589dd0bd..f22a3a128a 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc.py @@ -695,12 +695,12 @@ def check_trial_early_stopping_state( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_trial_early_stopping_state" not in self._stubs: - self._stubs[ - "check_trial_early_stopping_state" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.VizierService/CheckTrialEarlyStoppingState", - request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["check_trial_early_stopping_state"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.VizierService/CheckTrialEarlyStoppingState", + request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["check_trial_early_stopping_state"] diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc_asyncio.py index ffd61a1601..472a7d3f49 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc_asyncio.py @@ -711,12 +711,12 @@ def check_trial_early_stopping_state( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "check_trial_early_stopping_state" not in self._stubs: - self._stubs[ - "check_trial_early_stopping_state" - ] = self._logged_channel.unary_unary( - "/google.cloud.aiplatform.v1beta1.VizierService/CheckTrialEarlyStoppingState", - request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + self._stubs["check_trial_early_stopping_state"] = ( + self._logged_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.VizierService/CheckTrialEarlyStoppingState", + request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) return self._stubs["check_trial_early_stopping_state"] diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest.py index 2ab8ec2707..43c4be18b9 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest.py @@ -5752,7 +5752,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -5896,7 +5895,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6039,7 +6037,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6187,7 +6184,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6335,7 +6331,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6484,7 +6479,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6600,7 +6594,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -6716,7 +6709,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -6858,7 +6850,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7000,7 +6991,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest_asyncio.py index a5ec9aee7f..4f0ff724ba 100644 --- a/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/rest_asyncio.py @@ -1160,9 +1160,9 @@ def __init__( self._interceptor = interceptor or AsyncVizierServiceRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[ - operations_v1.AsyncOperationsRestClient - ] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -5977,7 +5977,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: - r"""Call the get location method over HTTP. Args: @@ -6129,7 +6128,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: - r"""Call the list locations method over HTTP. Args: @@ -6282,7 +6280,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -6440,7 +6437,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -6599,7 +6595,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: @@ -6756,7 +6751,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -6880,7 +6874,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -7004,7 +6997,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -7156,7 +7148,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -7308,7 +7299,6 @@ async def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py index d2693f6c30..ee6a121999 100644 --- a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py +++ b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py @@ -72,6 +72,7 @@ class AcceleratorType(proto.Enum): TPU_V5_LITEPOD (12): TPU v5. """ + ACCELERATOR_TYPE_UNSPECIFIED = 0 NVIDIA_TESLA_K80 = 1 NVIDIA_TESLA_P100 = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/artifact.py b/google/cloud/aiplatform_v1beta1/types/artifact.py index 09a243f294..baf36b2528 100644 --- a/google/cloud/aiplatform_v1beta1/types/artifact.py +++ b/google/cloud/aiplatform_v1beta1/types/artifact.py @@ -112,6 +112,7 @@ class State(proto.Enum): exist, unless something external to the system deletes it. """ + STATE_UNSPECIFIED = 0 PENDING = 1 LIVE = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index 6964e64157..8b25f30ae3 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -22,7 +22,9 @@ from google.cloud.aiplatform_v1beta1.types import ( completion_stats as gca_completion_stats, ) -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import job_state @@ -30,7 +32,9 @@ from google.cloud.aiplatform_v1beta1.types import ( manual_batch_tuning_parameters as gca_manual_batch_tuning_parameters, ) -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import model_monitoring from google.cloud.aiplatform_v1beta1.types import ( unmanaged_container_model as gca_unmanaged_container_model, @@ -629,7 +633,9 @@ class OutputInfo(proto.Message): proto.STRING, number=29, ) - manual_batch_tuning_parameters: gca_manual_batch_tuning_parameters.ManualBatchTuningParameters = proto.Field( + manual_batch_tuning_parameters: ( + gca_manual_batch_tuning_parameters.ManualBatchTuningParameters + ) = proto.Field( proto.MESSAGE, number=8, message=gca_manual_batch_tuning_parameters.ManualBatchTuningParameters, diff --git a/google/cloud/aiplatform_v1beta1/types/cached_content.py b/google/cloud/aiplatform_v1beta1/types/cached_content.py index a202168461..69de9502d6 100644 --- a/google/cloud/aiplatform_v1beta1/types/cached_content.py +++ b/google/cloud/aiplatform_v1beta1/types/cached_content.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import content -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import tool from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/content.py b/google/cloud/aiplatform_v1beta1/types/content.py index 4697755957..f0c8974f2d 100644 --- a/google/cloud/aiplatform_v1beta1/types/content.py +++ b/google/cloud/aiplatform_v1beta1/types/content.py @@ -80,6 +80,7 @@ class HarmCategory(proto.Enum): Deprecated: Election filter is not longer supported. The harm category is civic integrity. """ + HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_HATE_SPEECH = 1 HARM_CATEGORY_DANGEROUS_CONTENT = 2 @@ -105,6 +106,7 @@ class Modality(proto.Enum): DOCUMENT (5): Document, e.g. PDF. """ + MODALITY_UNSPECIFIED = 0 TEXT = 1 IMAGE = 2 @@ -551,6 +553,7 @@ class Modality(proto.Enum): AUDIO (3): Audio modality. """ + MODALITY_UNSPECIFIED = 0 TEXT = 1 IMAGE = 2 @@ -570,6 +573,7 @@ class MediaResolution(proto.Enum): Media resolution set to high (zoomed reframing with 256 tokens). """ + MEDIA_RESOLUTION_UNSPECIFIED = 0 MEDIA_RESOLUTION_LOW = 1 MEDIA_RESOLUTION_MEDIUM = 2 @@ -625,12 +629,15 @@ class ModelRoutingPreference(proto.Enum): PRIORITIZE_COST (3): Prefer lower cost over higher quality. """ + UNKNOWN = 0 PRIORITIZE_QUALITY = 1 BALANCED = 2 PRIORITIZE_COST = 3 - model_routing_preference: "GenerationConfig.RoutingConfig.AutoRoutingMode.ModelRoutingPreference" = proto.Field( + model_routing_preference: ( + "GenerationConfig.RoutingConfig.AutoRoutingMode.ModelRoutingPreference" + ) = proto.Field( proto.ENUM, number=1, optional=True, @@ -722,12 +729,15 @@ class FeatureSelectionPreference(proto.Enum): PRIORITIZE_COST (3): Prefer lower cost over higher quality. """ + FEATURE_SELECTION_PREFERENCE_UNSPECIFIED = 0 PRIORITIZE_QUALITY = 1 BALANCED = 2 PRIORITIZE_COST = 3 - feature_selection_preference: "GenerationConfig.ModelConfig.FeatureSelectionPreference" = proto.Field( + feature_selection_preference: ( + "GenerationConfig.ModelConfig.FeatureSelectionPreference" + ) = proto.Field( proto.ENUM, number=1, enum="GenerationConfig.ModelConfig.FeatureSelectionPreference", @@ -876,6 +886,7 @@ class HarmBlockThreshold(proto.Enum): OFF (5): Turn off the safety filter. """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 @@ -896,6 +907,7 @@ class HarmBlockMethod(proto.Enum): The harm block method uses the probability score. """ + HARM_BLOCK_METHOD_UNSPECIFIED = 0 SEVERITY = 1 PROBABILITY = 2 @@ -953,6 +965,7 @@ class HarmProbability(proto.Enum): HIGH (4): High level of harm. """ + HARM_PROBABILITY_UNSPECIFIED = 0 NEGLIGIBLE = 1 LOW = 2 @@ -974,6 +987,7 @@ class HarmSeverity(proto.Enum): HARM_SEVERITY_HIGH (4): High level of harm severity. """ + HARM_SEVERITY_UNSPECIFIED = 0 HARM_SEVERITY_NEGLIGIBLE = 1 HARM_SEVERITY_LOW = 2 @@ -1153,6 +1167,7 @@ class FinishReason(proto.Enum): The model response was blocked by Model Armor. """ + FINISH_REASON_UNSPECIFIED = 0 STOP = 1 MAX_TOKENS = 2 @@ -1251,6 +1266,7 @@ class UrlRetrievalStatus(proto.Enum): URL_RETRIEVAL_STATUS_ERROR (2): Url retrieval is failed due to error. """ + URL_RETRIEVAL_STATUS_UNSPECIFIED = 0 URL_RETRIEVAL_STATUS_SUCCESS = 1 URL_RETRIEVAL_STATUS_ERROR = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index 0724b61af2..1bfb9d94d9 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import env_var from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import job_state @@ -601,6 +603,7 @@ class Strategy(proto.Enum): Flex Start strategy uses DWS to queue for resources. """ + STRATEGY_UNSPECIFIED = 0 ON_DEMAND = 1 LOW_COST = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py index 6423968905..354bfce02a 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py +++ b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import job_state from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -320,6 +322,7 @@ class SampleStrategy(proto.Enum): UNCERTAINTY (1): Sample the most uncertain data to label. """ + SAMPLE_STRATEGY_UNSPECIFIED = 0 UNCERTAINTY = 1 diff --git a/google/cloud/aiplatform_v1beta1/types/dataset.py b/google/cloud/aiplatform_v1beta1/types/dataset.py index eed3016c3d..98a78082e8 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import saved_query from google.protobuf import struct_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/dataset_service.py b/google/cloud/aiplatform_v1beta1/types/dataset_service.py index da09bfdd35..0ab8823d95 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset_service.py @@ -23,9 +23,13 @@ from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import data_item as gca_data_item from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import saved_query as gca_saved_query +from google.cloud.aiplatform_v1beta1.types import ( + saved_query as gca_saved_query, +) from google.cloud.aiplatform_v1beta1.types import tool from google.protobuf import field_mask_pb2 # type: ignore @@ -576,12 +580,12 @@ class ListDatasetVersionsResponse(proto.Message): def raw_page(self): return self - dataset_versions: MutableSequence[ - gca_dataset_version.DatasetVersion - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset_version.DatasetVersion, + dataset_versions: MutableSequence[gca_dataset_version.DatasetVersion] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_dataset_version.DatasetVersion, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1196,6 +1200,7 @@ class DatasetUsage(proto.Enum): SFT_VALIDATION (2): Supervised fine-tuning validation dataset. """ + DATASET_USAGE_UNSPECIFIED = 0 SFT_TRAINING = 1 SFT_VALIDATION = 2 @@ -1204,7 +1209,9 @@ class DatasetUsage(proto.Enum): proto.STRING, number=1, ) - dataset_usage: "AssessDataRequest.TuningValidationAssessmentConfig.DatasetUsage" = proto.Field( + dataset_usage: ( + "AssessDataRequest.TuningValidationAssessmentConfig.DatasetUsage" + ) = proto.Field( proto.ENUM, number=2, enum="AssessDataRequest.TuningValidationAssessmentConfig.DatasetUsage", @@ -1267,13 +1274,17 @@ class BatchPredictionResourceUsageAssessmentConfig(proto.Message): message=TuningResourceUsageAssessmentConfig, ) ) - batch_prediction_validation_assessment_config: BatchPredictionValidationAssessmentConfig = proto.Field( + batch_prediction_validation_assessment_config: ( + BatchPredictionValidationAssessmentConfig + ) = proto.Field( proto.MESSAGE, number=6, oneof="assessment_config", message=BatchPredictionValidationAssessmentConfig, ) - batch_prediction_resource_usage_assessment_config: BatchPredictionResourceUsageAssessmentConfig = proto.Field( + batch_prediction_resource_usage_assessment_config: ( + BatchPredictionResourceUsageAssessmentConfig + ) = proto.Field( proto.MESSAGE, number=7, oneof="assessment_config", @@ -1396,13 +1407,17 @@ class BatchPredictionResourceUsageAssessmentResult(proto.Message): message=TuningResourceUsageAssessmentResult, ) ) - batch_prediction_validation_assessment_result: BatchPredictionValidationAssessmentResult = proto.Field( + batch_prediction_validation_assessment_result: ( + BatchPredictionValidationAssessmentResult + ) = proto.Field( proto.MESSAGE, number=3, oneof="assessment_result", message=BatchPredictionValidationAssessmentResult, ) - batch_prediction_resource_usage_assessment_result: BatchPredictionResourceUsageAssessmentResult = proto.Field( + batch_prediction_resource_usage_assessment_result: ( + BatchPredictionResourceUsageAssessmentResult + ) = proto.Field( proto.MESSAGE, number=4, oneof="assessment_result", diff --git a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py index 3e9ef44148..e8b78d9c35 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py +++ b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import machine_resources from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py index aa7d65e35f..972ebc621e 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py +++ b/google/cloud/aiplatform_v1beta1/types/deployment_resource_pool_service.py @@ -310,12 +310,12 @@ def raw_page(self): proto.STRING, number=2, ) - deployed_model_refs: MutableSequence[ - deployed_model_ref.DeployedModelRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=deployed_model_ref.DeployedModelRef, + deployed_model_refs: MutableSequence[deployed_model_ref.DeployedModelRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=deployed_model_ref.DeployedModelRef, + ) ) total_deployed_model_count: int = proto.Field( proto.INT32, diff --git a/google/cloud/aiplatform_v1beta1/types/deployment_stage.py b/google/cloud/aiplatform_v1beta1/types/deployment_stage.py index c7a073ea50..08790976ea 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployment_stage.py +++ b/google/cloud/aiplatform_v1beta1/types/deployment_stage.py @@ -56,6 +56,7 @@ class DeploymentStage(proto.Enum): DEPLOYMENT_TERMINATED (10): The deployment has terminated. """ + DEPLOYMENT_STAGE_UNSPECIFIED = 0 STARTING_DEPLOYMENT = 5 PREPARING_MODEL = 6 diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint.py b/google/cloud/aiplatform_v1beta1/types/endpoint.py index bb45f97db5..1c6f507307 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import machine_resources diff --git a/google/cloud/aiplatform_v1beta1/types/evaluated_annotation.py b/google/cloud/aiplatform_v1beta1/types/evaluated_annotation.py index 768170ccb5..020f58e1f2 100644 --- a/google/cloud/aiplatform_v1beta1/types/evaluated_annotation.py +++ b/google/cloud/aiplatform_v1beta1/types/evaluated_annotation.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import explanation as gca_explanation +from google.cloud.aiplatform_v1beta1.types import ( + explanation as gca_explanation, +) from google.protobuf import struct_pb2 # type: ignore @@ -126,6 +128,7 @@ class EvaluatedAnnotationType(proto.Enum): has a ground truth annotation which is not matched by any of the model created predictions. """ + EVALUATED_ANNOTATION_TYPE_UNSPECIFIED = 0 TRUE_POSITIVE = 1 FALSE_POSITIVE = 2 @@ -155,19 +158,19 @@ class EvaluatedAnnotationType(proto.Enum): proto.STRING, number=6, ) - explanations: MutableSequence[ - "EvaluatedAnnotationExplanation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message="EvaluatedAnnotationExplanation", + explanations: MutableSequence["EvaluatedAnnotationExplanation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=8, + message="EvaluatedAnnotationExplanation", + ) ) - error_analysis_annotations: MutableSequence[ - "ErrorAnalysisAnnotation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message="ErrorAnalysisAnnotation", + error_analysis_annotations: MutableSequence["ErrorAnalysisAnnotation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message="ErrorAnalysisAnnotation", + ) ) @@ -234,6 +237,7 @@ class QueryType(proto.Enum): Query dissimilar samples from the same class of the input sample. """ + QUERY_TYPE_UNSPECIFIED = 0 ALL_SIMILAR = 1 SAME_CLASS_SIMILAR = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/evaluation_service.py b/google/cloud/aiplatform_v1beta1/types/evaluation_service.py index 518d188b9b..a07bfcaaac 100644 --- a/google/cloud/aiplatform_v1beta1/types/evaluation_service.py +++ b/google/cloud/aiplatform_v1beta1/types/evaluation_service.py @@ -205,6 +205,7 @@ class PairwiseChoice(proto.Enum): TIE (3): Winner cannot be determined """ + PAIRWISE_CHOICE_UNSPECIFIED = 0 BASELINE = 1 CANDIDATE = 2 @@ -509,6 +510,7 @@ class AggregationMetric(proto.Enum): 99th percentile aggregation metric. Not supported for pairwise metric. """ + AGGREGATION_METRIC_UNSPECIFIED = 0 AVERAGE = 1 MODE = 2 @@ -887,7 +889,9 @@ class EvaluateInstancesRequest(proto.Message): oneof="metric_inputs", message="QuestionAnsweringQualityInput", ) - pairwise_question_answering_quality_input: "PairwiseQuestionAnsweringQualityInput" = proto.Field( + pairwise_question_answering_quality_input: ( + "PairwiseQuestionAnsweringQualityInput" + ) = proto.Field( proto.MESSAGE, number=24, oneof="metric_inputs", @@ -1255,7 +1259,9 @@ class EvaluateInstancesResponse(proto.Message): oneof="evaluation_results", message="QuestionAnsweringQualityResult", ) - pairwise_question_answering_quality_result: "PairwiseQuestionAnsweringQualityResult" = proto.Field( + pairwise_question_answering_quality_result: ( + "PairwiseQuestionAnsweringQualityResult" + ) = proto.Field( proto.MESSAGE, number=23, oneof="evaluation_results", @@ -1369,7 +1375,9 @@ class EvaluateInstancesResponse(proto.Message): oneof="evaluation_results", message="TrajectorySingleToolUseResults", ) - rubric_based_instruction_following_result: "RubricBasedInstructionFollowingResult" = proto.Field( + rubric_based_instruction_following_result: ( + "RubricBasedInstructionFollowingResult" + ) = proto.Field( proto.MESSAGE, number=38, oneof="evaluation_results", @@ -1443,12 +1451,12 @@ class ExactMatchResults(proto.Message): Output only. Exact match metric values. """ - exact_match_metric_values: MutableSequence[ - "ExactMatchMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ExactMatchMetricValue", + exact_match_metric_values: MutableSequence["ExactMatchMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExactMatchMetricValue", + ) ) @@ -3752,12 +3760,12 @@ class ToolCallValidResults(proto.Message): Output only. Tool call valid metric values. """ - tool_call_valid_metric_values: MutableSequence[ - "ToolCallValidMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ToolCallValidMetricValue", + tool_call_valid_metric_values: MutableSequence["ToolCallValidMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ToolCallValidMetricValue", + ) ) @@ -3843,12 +3851,12 @@ class ToolNameMatchResults(proto.Message): Output only. Tool name match metric values. """ - tool_name_match_metric_values: MutableSequence[ - "ToolNameMatchMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="ToolNameMatchMetricValue", + tool_name_match_metric_values: MutableSequence["ToolNameMatchMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ToolNameMatchMetricValue", + ) ) @@ -4121,6 +4129,7 @@ class CometVersion(proto.Enum): Comet 22 for translation + source + reference (source-reference-combined). """ + COMET_VERSION_UNSPECIFIED = 0 COMET_22_SRC_REF = 2 @@ -4258,6 +4267,7 @@ class MetricxVersion(proto.Enum): MetricX 2024 (2.6) for translation + source + reference (source-reference-combined). """ + METRICX_VERSION_UNSPECIFIED = 0 METRICX_24_REF = 1 METRICX_24_SRC = 2 @@ -4418,12 +4428,12 @@ class RubricBasedInstructionFollowingResult(proto.Message): number=1, optional=True, ) - rubric_critique_results: MutableSequence[ - "RubricCritiqueResult" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="RubricCritiqueResult", + rubric_critique_results: MutableSequence["RubricCritiqueResult"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RubricCritiqueResult", + ) ) @@ -4920,12 +4930,12 @@ class TrajectoryRecallResults(proto.Message): Output only. TrajectoryRecall metric values. """ - trajectory_recall_metric_values: MutableSequence[ - "TrajectoryRecallMetricValue" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="TrajectoryRecallMetricValue", + trajectory_recall_metric_values: MutableSequence["TrajectoryRecallMetricValue"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TrajectoryRecallMetricValue", + ) ) diff --git a/google/cloud/aiplatform_v1beta1/types/event.py b/google/cloud/aiplatform_v1beta1/types/event.py index da4430e5da..27e3145268 100644 --- a/google/cloud/aiplatform_v1beta1/types/event.py +++ b/google/cloud/aiplatform_v1beta1/types/event.py @@ -75,6 +75,7 @@ class Type(proto.Enum): OUTPUT (2): An output of the Execution. """ + TYPE_UNSPECIFIED = 0 INPUT = 1 OUTPUT = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/example_store.py b/google/cloud/aiplatform_v1beta1/types/example_store.py index 398af28308..79e11ff7ee 100644 --- a/google/cloud/aiplatform_v1beta1/types/example_store.py +++ b/google/cloud/aiplatform_v1beta1/types/example_store.py @@ -178,7 +178,9 @@ class ContentSearchKey(proto.Message): number=1, message=content.Content, ) - search_key_generation_method: example.StoredContentsExample.SearchKeyGenerationMethod = proto.Field( + search_key_generation_method: ( + example.StoredContentsExample.SearchKeyGenerationMethod + ) = proto.Field( proto.MESSAGE, number=2, message=example.StoredContentsExample.SearchKeyGenerationMethod, @@ -229,6 +231,7 @@ class ArrayOperator(proto.Enum): The metadata array field in the example must contain all of the values. """ + ARRAY_OPERATOR_UNSPECIFIED = 0 CONTAINS_ANY = 1 CONTAINS_ALL = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/example_store_service.py b/google/cloud/aiplatform_v1beta1/types/example_store_service.py index 498f4e5057..c2312fae4b 100644 --- a/google/cloud/aiplatform_v1beta1/types/example_store_service.py +++ b/google/cloud/aiplatform_v1beta1/types/example_store_service.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import example as gca_example -from google.cloud.aiplatform_v1beta1.types import example_store as gca_example_store +from google.cloud.aiplatform_v1beta1.types import ( + example_store as gca_example_store, +) from google.cloud.aiplatform_v1beta1.types import operation from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -243,12 +245,12 @@ class ListExampleStoresResponse(proto.Message): def raw_page(self): return self - example_stores: MutableSequence[ - gca_example_store.ExampleStore - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_example_store.ExampleStore, + example_stores: MutableSequence[gca_example_store.ExampleStore] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_example_store.ExampleStore, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -468,7 +470,9 @@ class SearchExamplesRequest(proto.Message): return. """ - stored_contents_example_parameters: gca_example_store.StoredContentsExampleParameters = proto.Field( + stored_contents_example_parameters: ( + gca_example_store.StoredContentsExampleParameters + ) = proto.Field( proto.MESSAGE, number=6, oneof="parameters", diff --git a/google/cloud/aiplatform_v1beta1/types/execution.py b/google/cloud/aiplatform_v1beta1/types/execution.py index 937238d3eb..89c9264224 100644 --- a/google/cloud/aiplatform_v1beta1/types/execution.py +++ b/google/cloud/aiplatform_v1beta1/types/execution.py @@ -111,6 +111,7 @@ class State(proto.Enum): CANCELLED (6): The Execution was cancelled. """ + STATE_UNSPECIFIED = 0 NEW = 1 RUNNING = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/explanation.py b/google/cloud/aiplatform_v1beta1/types/explanation.py index 432fa58d7a..f2214ef8a0 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation.py @@ -778,6 +778,7 @@ class DataFormat(proto.Enum): JSONL (1): Examples are stored in JSONL files. """ + DATA_FORMAT_UNSPECIFIED = 0 JSONL = 1 @@ -854,6 +855,7 @@ class Query(proto.Enum): Faster response as a trade-off against less precise neighbors. """ + PRECISE = 0 FAST = 1 @@ -871,6 +873,7 @@ class Modality(proto.Enum): TABULAR (3): TABULAR modality """ + MODALITY_UNSPECIFIED = 0 IMAGE = 1 TEXT = 2 @@ -1005,6 +1008,7 @@ class DataFormat(proto.Enum): EMBEDDINGS (2): Provided data is a set of embeddings. """ + DATA_FORMAT_UNSPECIFIED = 0 INSTANCES = 1 EMBEDDINGS = 2 @@ -1017,12 +1021,12 @@ class DataFormat(proto.Enum): proto.INT32, number=2, ) - restrictions: MutableSequence[ - "ExamplesRestrictionsNamespace" - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message="ExamplesRestrictionsNamespace", + restrictions: MutableSequence["ExamplesRestrictionsNamespace"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ExamplesRestrictionsNamespace", + ) ) return_embeddings: bool = proto.Field( proto.BOOL, diff --git a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py index bfd83ccb9f..6824252d68 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py @@ -234,6 +234,7 @@ class Encoding(proto.Enum): [0.5, 0.3, 0.1, 0.2, 0.4], [0.4, 0.3, 0.2, 0.5, 0.1]] """ + ENCODING_UNSPECIFIED = 0 IDENTITY = 1 BAG_OF_FEATURES = 2 @@ -346,6 +347,7 @@ class Type(proto.Enum): Shows which region contributed to the image prediction by outlining the region. """ + TYPE_UNSPECIFIED = 0 PIXELS = 1 OUTLINES = 2 @@ -368,6 +370,7 @@ class Polarity(proto.Enum): Shows both positive and negative attributions. """ + POLARITY_UNSPECIFIED = 0 POSITIVE = 1 NEGATIVE = 2 @@ -396,6 +399,7 @@ class ColorMap(proto.Enum): PINK_WHITE_GREEN (5): PiYG palette. """ + COLOR_MAP_UNSPECIFIED = 0 PINK_GREEN = 1 VIRIDIS = 2 @@ -423,6 +427,7 @@ class OverlayType(proto.Enum): predictive parts of the image and hide the un-predictive parts. """ + OVERLAY_TYPE_UNSPECIFIED = 0 NONE = 1 ORIGINAL = 2 @@ -456,7 +461,9 @@ class OverlayType(proto.Enum): proto.FLOAT, number=5, ) - overlay_type: "ExplanationMetadata.InputMetadata.Visualization.OverlayType" = proto.Field( + overlay_type: ( + "ExplanationMetadata.InputMetadata.Visualization.OverlayType" + ) = proto.Field( proto.ENUM, number=6, enum="ExplanationMetadata.InputMetadata.Visualization.OverlayType", diff --git a/google/cloud/aiplatform_v1beta1/types/extension.py b/google/cloud/aiplatform_v1beta1/types/extension.py index 9ac178c5e8..802b86b140 100644 --- a/google/cloud/aiplatform_v1beta1/types/extension.py +++ b/google/cloud/aiplatform_v1beta1/types/extension.py @@ -56,6 +56,7 @@ class HttpElementLocation(proto.Enum): HTTP_IN_COOKIE (5): Element is in the HTTP request cookie. """ + HTTP_IN_UNSPECIFIED = 0 HTTP_IN_QUERY = 1 HTTP_IN_HEADER = 2 @@ -83,6 +84,7 @@ class AuthType(proto.Enum): OIDC_AUTH (8): OpenID Connect (OIDC) Auth. """ + AUTH_TYPE_UNSPECIFIED = 0 NO_AUTH = 1 API_KEY_AUTH = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/feature.py b/google/cloud/aiplatform_v1beta1/types/feature.py index 4758418929..8ac5f7afd3 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature.py +++ b/google/cloud/aiplatform_v1beta1/types/feature.py @@ -159,6 +159,7 @@ class ValueType(proto.Enum): STRUCT (14): Used for Feature that is struct. """ + VALUE_TYPE_UNSPECIFIED = 0 BOOL = 1 BOOL_ARRAY = 2 @@ -204,6 +205,7 @@ class Objective(proto.Enum): SNAPSHOT_ANALYSIS (2): Stats are generated by Snapshot Analysis. """ + OBJECTIVE_UNSPECIFIED = 0 IMPORT_FEATURE_ANALYSIS = 1 SNAPSHOT_ANALYSIS = 2 @@ -264,19 +266,19 @@ class Objective(proto.Enum): proto.BOOL, number=12, ) - monitoring_stats: MutableSequence[ - feature_monitoring_stats.FeatureStatsAnomaly - ] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=feature_monitoring_stats.FeatureStatsAnomaly, + monitoring_stats: MutableSequence[feature_monitoring_stats.FeatureStatsAnomaly] = ( + proto.RepeatedField( + proto.MESSAGE, + number=10, + message=feature_monitoring_stats.FeatureStatsAnomaly, + ) ) - monitoring_stats_anomalies: MutableSequence[ - MonitoringStatsAnomaly - ] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=MonitoringStatsAnomaly, + monitoring_stats_anomalies: MutableSequence[MonitoringStatsAnomaly] = ( + proto.RepeatedField( + proto.MESSAGE, + number=11, + message=MonitoringStatsAnomaly, + ) ) feature_stats_and_anomaly: MutableSequence[ feature_monitor.FeatureStatsAndAnomaly diff --git a/google/cloud/aiplatform_v1beta1/types/feature_group.py b/google/cloud/aiplatform_v1beta1/types/feature_group.py index a94d2d95d8..72d0d2b39d 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_group.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_group.py @@ -110,6 +110,7 @@ class ServiceAgentType(proto.Enum): This service account will be used to read from the source BigQuery table during jobs under a FeatureGroup. """ + SERVICE_AGENT_TYPE_UNSPECIFIED = 0 SERVICE_AGENT_TYPE_PROJECT = 1 SERVICE_AGENT_TYPE_FEATURE_GROUP = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/feature_monitor_job.py b/google/cloud/aiplatform_v1beta1/types/feature_monitor_job.py index f59d9b31a8..399492cf36 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_monitor_job.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_monitor_job.py @@ -95,6 +95,7 @@ class FeatureMonitorJobTrigger(proto.Enum): Triggered on demand by CreateFeatureMonitorJob request. """ + FEATURE_MONITOR_JOB_TRIGGER_UNSPECIFIED = 0 FEATURE_MONITOR_JOB_TRIGGER_PERIODIC = 1 FEATURE_MONITOR_JOB_TRIGGER_ON_DEMAND = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/feature_online_store.py b/google/cloud/aiplatform_v1beta1/types/feature_online_store.py index 232047bfca..e08bd6e896 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_online_store.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_online_store.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -133,6 +135,7 @@ class State(proto.Enum): featureOnlineStore is still usable in this state. """ + STATE_UNSPECIFIED = 0 STABLE = 1 UPDATING = 2 @@ -219,7 +222,9 @@ class DedicatedServingEndpoint(proto.Message): proto.STRING, number=2, ) - private_service_connect_config: service_networking.PrivateServiceConnectConfig = proto.Field( + private_service_connect_config: ( + service_networking.PrivateServiceConnectConfig + ) = proto.Field( proto.MESSAGE, number=3, message=service_networking.PrivateServiceConnectConfig, diff --git a/google/cloud/aiplatform_v1beta1/types/feature_online_store_admin_service.py b/google/cloud/aiplatform_v1beta1/types/feature_online_store_admin_service.py index cc4bf3fd34..8084d83f8f 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_online_store_admin_service.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_online_store_admin_service.py @@ -22,7 +22,9 @@ from google.cloud.aiplatform_v1beta1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1beta1.types import feature_view as gca_feature_view +from google.cloud.aiplatform_v1beta1.types import ( + feature_view as gca_feature_view, +) from google.cloud.aiplatform_v1beta1.types import ( feature_view_sync as gca_feature_view_sync, ) @@ -707,12 +709,12 @@ class ListFeatureViewSyncsResponse(proto.Message): def raw_page(self): return self - feature_view_syncs: MutableSequence[ - gca_feature_view_sync.FeatureViewSync - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_view_sync.FeatureViewSync, + feature_view_syncs: MutableSequence[gca_feature_view_sync.FeatureViewSync] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_view_sync.FeatureViewSync, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/feature_online_store_service.py b/google/cloud/aiplatform_v1beta1/types/feature_online_store_service.py index a2adad853c..a1ea511583 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_online_store_service.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_online_store_service.py @@ -56,6 +56,7 @@ class FeatureViewDataFormat(proto.Enum): PROTO_STRUCT (2): Return response data in proto Struct format. """ + FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED = 0 KEY_VALUE = 1 PROTO_STRUCT = 2 @@ -155,6 +156,7 @@ class Format(proto.Enum): PROTO_STRUCT (2): Return response data in proto Struct format. """ + _pb_options = {"deprecated": True} FORMAT_UNSPECIFIED = 0 KEY_VALUE = 1 @@ -498,6 +500,7 @@ class Operator(proto.Enum): Entities are eligible if their value is != the query's. """ + OPERATOR_UNSPECIFIED = 0 LESS = 1 LESS_EQUAL = 2 @@ -797,12 +800,12 @@ class FeatureValueAndTimestamp(proto.Message): proto.STRING, number=1, ) - data_key_and_feature_values: MutableSequence[ - DataKeyAndFeatureValues - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=DataKeyAndFeatureValues, + data_key_and_feature_values: MutableSequence[DataKeyAndFeatureValues] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=DataKeyAndFeatureValues, + ) ) diff --git a/google/cloud/aiplatform_v1beta1/types/feature_registry_service.py b/google/cloud/aiplatform_v1beta1/types/feature_registry_service.py index c68bae124e..ed3e957aa7 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_registry_service.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_registry_service.py @@ -19,8 +19,12 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import feature_group as gca_feature_group -from google.cloud.aiplatform_v1beta1.types import feature_monitor as gca_feature_monitor +from google.cloud.aiplatform_v1beta1.types import ( + feature_group as gca_feature_group, +) +from google.cloud.aiplatform_v1beta1.types import ( + feature_monitor as gca_feature_monitor, +) from google.cloud.aiplatform_v1beta1.types import ( feature_monitor_job as gca_feature_monitor_job, ) @@ -200,12 +204,12 @@ class ListFeatureGroupsResponse(proto.Message): def raw_page(self): return self - feature_groups: MutableSequence[ - gca_feature_group.FeatureGroup - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_group.FeatureGroup, + feature_groups: MutableSequence[gca_feature_group.FeatureGroup] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_group.FeatureGroup, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -475,12 +479,12 @@ class ListFeatureMonitorsResponse(proto.Message): def raw_page(self): return self - feature_monitors: MutableSequence[ - gca_feature_monitor.FeatureMonitor - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_monitor.FeatureMonitor, + feature_monitors: MutableSequence[gca_feature_monitor.FeatureMonitor] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_monitor.FeatureMonitor, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -709,12 +713,12 @@ class ListFeatureMonitorJobsResponse(proto.Message): def raw_page(self): return self - feature_monitor_jobs: MutableSequence[ - gca_feature_monitor_job.FeatureMonitorJob - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_feature_monitor_job.FeatureMonitorJob, + feature_monitor_jobs: MutableSequence[gca_feature_monitor_job.FeatureMonitorJob] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_feature_monitor_job.FeatureMonitorJob, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/feature_view.py b/google/cloud/aiplatform_v1beta1/types/feature_view.py index d4140b647d..48e6ef3dfd 100644 --- a/google/cloud/aiplatform_v1beta1/types/feature_view.py +++ b/google/cloud/aiplatform_v1beta1/types/feature_view.py @@ -146,6 +146,7 @@ class ServiceAgentType(proto.Enum): service account will be used to read from the source BigQuery table during sync. """ + SERVICE_AGENT_TYPE_UNSPECIFIED = 0 SERVICE_AGENT_TYPE_PROJECT = 1 SERVICE_AGENT_TYPE_FEATURE_VIEW = 2 @@ -269,6 +270,7 @@ class DistanceMeasureType(proto.Enum): Dot Product Distance. Defined as a negative of the dot product. """ + DISTANCE_MEASURE_TYPE_UNSPECIFIED = 0 SQUARED_L2_DISTANCE = 1 COSINE_DISTANCE = 2 @@ -412,6 +414,7 @@ class DistanceMeasureType(proto.Enum): Dot Product Distance. Defined as a negative of the dot product. """ + DISTANCE_MEASURE_TYPE_UNSPECIFIED = 0 SQUARED_L2_DISTANCE = 1 COSINE_DISTANCE = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/featurestore.py b/google/cloud/aiplatform_v1beta1/types/featurestore.py index cf3c4062d2..f47c193f62 100644 --- a/google/cloud/aiplatform_v1beta1/types/featurestore.py +++ b/google/cloud/aiplatform_v1beta1/types/featurestore.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore @@ -120,6 +122,7 @@ class State(proto.Enum): still be the original value of ``fixed_node_count``. The featurestore is still usable in this state. """ + STATE_UNSPECIFIED = 0 STABLE = 1 UPDATING = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/featurestore_monitoring.py b/google/cloud/aiplatform_v1beta1/types/featurestore_monitoring.py index c7fee86beb..c13f230760 100644 --- a/google/cloud/aiplatform_v1beta1/types/featurestore_monitoring.py +++ b/google/cloud/aiplatform_v1beta1/types/featurestore_monitoring.py @@ -155,6 +155,7 @@ class State(proto.Enum): features analysis regardless of the EntityType-level config. """ + STATE_UNSPECIFIED = 0 DEFAULT = 1 ENABLED = 2 @@ -182,6 +183,7 @@ class Baseline(proto.Enum): Use the statistics generated by the previous import features analysis if exists. """ + BASELINE_UNSPECIFIED = 0 LATEST_STATS = 1 MOST_RECENT_SNAPSHOT_STATS = 2 @@ -194,7 +196,9 @@ class Baseline(proto.Enum): enum="FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.State", ) ) - anomaly_detection_baseline: "FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline" = proto.Field( + anomaly_detection_baseline: ( + "FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline" + ) = proto.Field( proto.ENUM, number=2, enum="FeaturestoreMonitoringConfig.ImportFeaturesAnalysis.Baseline", diff --git a/google/cloud/aiplatform_v1beta1/types/featurestore_online_service.py b/google/cloud/aiplatform_v1beta1/types/featurestore_online_service.py index eea5066b8a..97a8ddcd58 100644 --- a/google/cloud/aiplatform_v1beta1/types/featurestore_online_service.py +++ b/google/cloud/aiplatform_v1beta1/types/featurestore_online_service.py @@ -252,12 +252,12 @@ class Data(proto.Message): proto.STRING, number=1, ) - data: MutableSequence[ - "ReadFeatureValuesResponse.EntityView.Data" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="ReadFeatureValuesResponse.EntityView.Data", + data: MutableSequence["ReadFeatureValuesResponse.EntityView.Data"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ReadFeatureValuesResponse.EntityView.Data", + ) ) header: Header = proto.Field( diff --git a/google/cloud/aiplatform_v1beta1/types/featurestore_service.py b/google/cloud/aiplatform_v1beta1/types/featurestore_service.py index 261f61d931..500d79b2dd 100644 --- a/google/cloud/aiplatform_v1beta1/types/featurestore_service.py +++ b/google/cloud/aiplatform_v1beta1/types/featurestore_service.py @@ -19,13 +19,17 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_monitor from google.cloud.aiplatform_v1beta1.types import ( feature_selector as gca_feature_selector, ) -from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore +from google.cloud.aiplatform_v1beta1.types import ( + featurestore as gca_featurestore, +) from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import operation from google.protobuf import field_mask_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/gen_ai_cache_service.py b/google/cloud/aiplatform_v1beta1/types/gen_ai_cache_service.py index d1acc31bda..e2c669e6d0 100644 --- a/google/cloud/aiplatform_v1beta1/types/gen_ai_cache_service.py +++ b/google/cloud/aiplatform_v1beta1/types/gen_ai_cache_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -169,12 +171,12 @@ class ListCachedContentsResponse(proto.Message): def raw_page(self): return self - cached_contents: MutableSequence[ - gca_cached_content.CachedContent - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_cached_content.CachedContent, + cached_contents: MutableSequence[gca_cached_content.CachedContent] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_cached_content.CachedContent, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/genai_tuning_service.py b/google/cloud/aiplatform_v1beta1/types/genai_tuning_service.py index 1adc3daedd..f2241d81c8 100644 --- a/google/cloud/aiplatform_v1beta1/types/genai_tuning_service.py +++ b/google/cloud/aiplatform_v1beta1/types/genai_tuning_service.py @@ -21,7 +21,9 @@ from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) __protobuf__ = proto.module( diff --git a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py index 50eefe529a..127126b1b8 100644 --- a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py +++ b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import study from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/index.py b/google/cloud/aiplatform_v1beta1/types/index.py index b6af850069..07a0a07038 100644 --- a/google/cloud/aiplatform_v1beta1/types/index.py +++ b/google/cloud/aiplatform_v1beta1/types/index.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import deployed_index_ref -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -130,6 +132,7 @@ class IndexUpdateMethod(proto.Enum): corresponding DeployedIndexes in nearly real-time. """ + INDEX_UPDATE_METHOD_UNSPECIFIED = 0 BATCH_UPDATE = 1 STREAM_UPDATE = 2 @@ -155,12 +158,12 @@ class IndexUpdateMethod(proto.Enum): number=6, message=struct_pb2.Value, ) - deployed_indexes: MutableSequence[ - deployed_index_ref.DeployedIndexRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=deployed_index_ref.DeployedIndexRef, + deployed_indexes: MutableSequence[deployed_index_ref.DeployedIndexRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=7, + message=deployed_index_ref.DeployedIndexRef, + ) ) etag: str = proto.Field( proto.STRING, @@ -355,6 +358,7 @@ class Operator(proto.Enum): Datapoints are eligible iff their value is != the query's. """ + OPERATOR_UNSPECIFIED = 0 LESS = 1 LESS_EQUAL = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/index_endpoint.py b/google/cloud/aiplatform_v1beta1/types/index_endpoint.py index db2f174824..c59aed3118 100644 --- a/google/cloud/aiplatform_v1beta1/types/index_endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/index_endpoint.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -407,12 +409,12 @@ class DeployedIndex(proto.Message): proto.STRING, number=11, ) - psc_automation_configs: MutableSequence[ - service_networking.PSCAutomationConfig - ] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message=service_networking.PSCAutomationConfig, + psc_automation_configs: MutableSequence[service_networking.PSCAutomationConfig] = ( + proto.RepeatedField( + proto.MESSAGE, + number=19, + message=service_networking.PSCAutomationConfig, + ) ) diff --git a/google/cloud/aiplatform_v1beta1/types/index_endpoint_service.py b/google/cloud/aiplatform_v1beta1/types/index_endpoint_service.py index 1919ed1e9b..ae6327b7e1 100644 --- a/google/cloud/aiplatform_v1beta1/types/index_endpoint_service.py +++ b/google/cloud/aiplatform_v1beta1/types/index_endpoint_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -190,12 +192,12 @@ class ListIndexEndpointsResponse(proto.Message): def raw_page(self): return self - index_endpoints: MutableSequence[ - gca_index_endpoint.IndexEndpoint - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_index_endpoint.IndexEndpoint, + index_endpoints: MutableSequence[gca_index_endpoint.IndexEndpoint] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_index_endpoint.IndexEndpoint, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/index_service.py b/google/cloud/aiplatform_v1beta1/types/index_service.py index 322e736045..9f09c0e3bf 100644 --- a/google/cloud/aiplatform_v1beta1/types/index_service.py +++ b/google/cloud/aiplatform_v1beta1/types/index_service.py @@ -87,7 +87,9 @@ class CreateIndexOperationMetadata(proto.Message): number=1, message=operation.GenericOperationMetadata, ) - nearest_neighbor_search_operation_metadata: "NearestNeighborSearchOperationMetadata" = proto.Field( + nearest_neighbor_search_operation_metadata: ( + "NearestNeighborSearchOperationMetadata" + ) = proto.Field( proto.MESSAGE, number=2, message="NearestNeighborSearchOperationMetadata", @@ -227,7 +229,9 @@ class UpdateIndexOperationMetadata(proto.Message): number=1, message=operation.GenericOperationMetadata, ) - nearest_neighbor_search_operation_metadata: "NearestNeighborSearchOperationMetadata" = proto.Field( + nearest_neighbor_search_operation_metadata: ( + "NearestNeighborSearchOperationMetadata" + ) = proto.Field( proto.MESSAGE, number=2, message="NearestNeighborSearchOperationMetadata", @@ -343,6 +347,7 @@ class ValueType(proto.Enum): DOUBLE (3): Represents 64 bit float. """ + VALUE_TYPE_UNSPECIFIED = 0 INT = 1 FLOAT = 2 @@ -406,13 +411,17 @@ class BigQuerySourceConfig(proto.Message): proto.STRING, number=1, ) - datapoint_field_mapping: "ImportIndexRequest.ConnectorConfig.DatapointFieldMapping" = proto.Field( + datapoint_field_mapping: ( + "ImportIndexRequest.ConnectorConfig.DatapointFieldMapping" + ) = proto.Field( proto.MESSAGE, number=2, message="ImportIndexRequest.ConnectorConfig.DatapointFieldMapping", ) - big_query_source_config: "ImportIndexRequest.ConnectorConfig.BigQuerySourceConfig" = proto.Field( + big_query_source_config: ( + "ImportIndexRequest.ConnectorConfig.BigQuerySourceConfig" + ) = proto.Field( proto.MESSAGE, number=1, oneof="source", @@ -632,6 +641,7 @@ class RecordErrorType(proto.Enum): EMBEDDING_METADATA_EXCEEDS_SIZE_LIMIT (19): Embedding metadata exceeds size limit. """ + ERROR_TYPE_UNSPECIFIED = 0 EMPTY_LINE = 1 INVALID_JSON_SYNTAX = 2 @@ -653,7 +663,9 @@ class RecordErrorType(proto.Enum): INVALID_EMBEDDING_METADATA = 18 EMBEDDING_METADATA_EXCEEDS_SIZE_LIMIT = 19 - error_type: "NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType" = proto.Field( + error_type: ( + "NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType" + ) = proto.Field( proto.ENUM, number=1, enum="NearestNeighborSearchOperationMetadata.RecordError.RecordErrorType", @@ -729,12 +741,12 @@ class ContentValidationStats(proto.Message): number=6, ) - content_validation_stats: MutableSequence[ - ContentValidationStats - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=ContentValidationStats, + content_validation_stats: MutableSequence[ContentValidationStats] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ContentValidationStats, + ) ) data_bytes_count: int = proto.Field( proto.INT64, diff --git a/google/cloud/aiplatform_v1beta1/types/io.py b/google/cloud/aiplatform_v1beta1/types/io.py index 93e761fdfe..654a4158b5 100644 --- a/google/cloud/aiplatform_v1beta1/types/io.py +++ b/google/cloud/aiplatform_v1beta1/types/io.py @@ -238,6 +238,7 @@ class ResourceType(proto.Enum): RESOURCE_TYPE_FOLDER (2): Folder resource type. """ + RESOURCE_TYPE_UNSPECIFIED = 0 RESOURCE_TYPE_FILE = 1 RESOURCE_TYPE_FOLDER = 2 @@ -320,12 +321,12 @@ class SlackChannel(proto.Message): message=timestamp_pb2.Timestamp, ) - channels: MutableSequence[ - "SlackSource.SlackChannels.SlackChannel" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="SlackSource.SlackChannels.SlackChannel", + channels: MutableSequence["SlackSource.SlackChannels.SlackChannel"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SlackSource.SlackChannels.SlackChannel", + ) ) api_key_config: api_auth.ApiAuth.ApiKeyConfig = proto.Field( proto.MESSAGE, diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index 656229283f..c1900d495c 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -22,7 +22,9 @@ from google.cloud.aiplatform_v1beta1.types import ( batch_prediction_job as gca_batch_prediction_job, ) -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, ) @@ -386,12 +388,12 @@ class ListDataLabelingJobsResponse(proto.Message): def raw_page(self): return self - data_labeling_jobs: MutableSequence[ - gca_data_labeling_job.DataLabelingJob - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_data_labeling_job.DataLabelingJob, + data_labeling_jobs: MutableSequence[gca_data_labeling_job.DataLabelingJob] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_data_labeling_job.DataLabelingJob, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -844,12 +846,12 @@ class ListNasTrialDetailsResponse(proto.Message): def raw_page(self): return self - nas_trial_details: MutableSequence[ - gca_nas_job.NasTrialDetail - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_nas_job.NasTrialDetail, + nas_trial_details: MutableSequence[gca_nas_job.NasTrialDetail] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_nas_job.NasTrialDetail, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1049,7 +1051,9 @@ class CreateModelDeploymentMonitoringJobRequest(proto.Message): proto.STRING, number=1, ) - model_deployment_monitoring_job: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob = proto.Field( + model_deployment_monitoring_job: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob + ) = proto.Field( proto.MESSAGE, number=2, message=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob, @@ -1109,7 +1113,9 @@ class StatsAnomaliesObjective(proto.Message): latest monitoring run. """ - type_: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType = proto.Field( + type_: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType + ) = proto.Field( proto.ENUM, number=1, enum=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringObjectiveType, @@ -1334,7 +1340,9 @@ class UpdateModelDeploymentMonitoringJobRequest(proto.Message): - ``model_deployment_monitoring_objective_configs.objective_config.prediction_drift_detection_config`` """ - model_deployment_monitoring_job: gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob = proto.Field( + model_deployment_monitoring_job: ( + gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob + ) = proto.Field( proto.MESSAGE, number=1, message=gca_model_deployment_monitoring_job.ModelDeploymentMonitoringJob, diff --git a/google/cloud/aiplatform_v1beta1/types/job_state.py b/google/cloud/aiplatform_v1beta1/types/job_state.py index daaf4ff41f..d47eb5e7b8 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_state.py +++ b/google/cloud/aiplatform_v1beta1/types/job_state.py @@ -63,6 +63,7 @@ class JobState(proto.Enum): The job is partially succeeded, some results may be missing due to errors. """ + JOB_STATE_UNSPECIFIED = 0 JOB_STATE_QUEUED = 1 JOB_STATE_PENDING = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/machine_resources.py b/google/cloud/aiplatform_v1beta1/types/machine_resources.py index 7a79c0c1ee..8c248f110f 100644 --- a/google/cloud/aiplatform_v1beta1/types/machine_resources.py +++ b/google/cloud/aiplatform_v1beta1/types/machine_resources.py @@ -233,12 +233,12 @@ class DedicatedResources(proto.Message): proto.INT32, number=9, ) - autoscaling_metric_specs: MutableSequence[ - "AutoscalingMetricSpec" - ] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="AutoscalingMetricSpec", + autoscaling_metric_specs: MutableSequence["AutoscalingMetricSpec"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AutoscalingMetricSpec", + ) ) spot: bool = proto.Field( proto.BOOL, diff --git a/google/cloud/aiplatform_v1beta1/types/match_service.py b/google/cloud/aiplatform_v1beta1/types/match_service.py index f2b235b566..841054cda6 100644 --- a/google/cloud/aiplatform_v1beta1/types/match_service.py +++ b/google/cloud/aiplatform_v1beta1/types/match_service.py @@ -222,12 +222,12 @@ class NearestNeighbors(proto.Message): proto.STRING, number=1, ) - neighbors: MutableSequence[ - "FindNeighborsResponse.Neighbor" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="FindNeighborsResponse.Neighbor", + neighbors: MutableSequence["FindNeighborsResponse.Neighbor"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="FindNeighborsResponse.Neighbor", + ) ) nearest_neighbors: MutableSequence[NearestNeighbors] = proto.RepeatedField( diff --git a/google/cloud/aiplatform_v1beta1/types/memory_bank_service.py b/google/cloud/aiplatform_v1beta1/types/memory_bank_service.py index 5ba5fad007..b3b2771ea0 100644 --- a/google/cloud/aiplatform_v1beta1/types/memory_bank_service.py +++ b/google/cloud/aiplatform_v1beta1/types/memory_bank_service.py @@ -470,6 +470,7 @@ class Action(proto.Enum): DELETED (3): The memory was deleted. """ + ACTION_UNSPECIFIED = 0 CREATED = 1 UPDATED = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/metadata_schema.py b/google/cloud/aiplatform_v1beta1/types/metadata_schema.py index c87a321ff8..4ef173d69c 100644 --- a/google/cloud/aiplatform_v1beta1/types/metadata_schema.py +++ b/google/cloud/aiplatform_v1beta1/types/metadata_schema.py @@ -79,6 +79,7 @@ class MetadataSchemaType(proto.Enum): A state indicating that the MetadataSchema will be used by Contexts. """ + METADATA_SCHEMA_TYPE_UNSPECIFIED = 0 ARTIFACT_TYPE = 1 EXECUTION_TYPE = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/metadata_service.py b/google/cloud/aiplatform_v1beta1/types/metadata_service.py index 417da16bdf..f686f6b720 100644 --- a/google/cloud/aiplatform_v1beta1/types/metadata_service.py +++ b/google/cloud/aiplatform_v1beta1/types/metadata_service.py @@ -23,8 +23,12 @@ from google.cloud.aiplatform_v1beta1.types import context as gca_context from google.cloud.aiplatform_v1beta1.types import event from google.cloud.aiplatform_v1beta1.types import execution as gca_execution -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) +from google.cloud.aiplatform_v1beta1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1beta1.types import operation from google.protobuf import field_mask_pb2 # type: ignore @@ -213,12 +217,12 @@ class ListMetadataStoresResponse(proto.Message): def raw_page(self): return self - metadata_stores: MutableSequence[ - gca_metadata_store.MetadataStore - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_metadata_store.MetadataStore, + metadata_stores: MutableSequence[gca_metadata_store.MetadataStore] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_metadata_store.MetadataStore, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1492,12 +1496,12 @@ class ListMetadataSchemasResponse(proto.Message): def raw_page(self): return self - metadata_schemas: MutableSequence[ - gca_metadata_schema.MetadataSchema - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_metadata_schema.MetadataSchema, + metadata_schemas: MutableSequence[gca_metadata_schema.MetadataSchema] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_metadata_schema.MetadataSchema, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/metadata_store.py b/google/cloud/aiplatform_v1beta1/types/metadata_store.py index c9b49214a4..6f1ffe7924 100644 --- a/google/cloud/aiplatform_v1beta1/types/metadata_store.py +++ b/google/cloud/aiplatform_v1beta1/types/metadata_store.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/migration_service.py b/google/cloud/aiplatform_v1beta1/types/migration_service.py index 39646f1895..77aa401549 100644 --- a/google/cloud/aiplatform_v1beta1/types/migration_service.py +++ b/google/cloud/aiplatform_v1beta1/types/migration_service.py @@ -147,12 +147,12 @@ class BatchMigrateResourcesRequest(proto.Message): proto.STRING, number=1, ) - migrate_resource_requests: MutableSequence[ - "MigrateResourceRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="MigrateResourceRequest", + migrate_resource_requests: MutableSequence["MigrateResourceRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="MigrateResourceRequest", + ) ) @@ -367,12 +367,12 @@ class BatchMigrateResourcesResponse(proto.Message): Successfully migrated resources. """ - migrate_resource_responses: MutableSequence[ - "MigrateResourceResponse" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="MigrateResourceResponse", + migrate_resource_responses: MutableSequence["MigrateResourceResponse"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MigrateResourceResponse", + ) ) diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 23718d8834..6e7513187d 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import deployed_model_ref -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import env_var from google.cloud.aiplatform_v1beta1.types import explanation from google.protobuf import duration_pb2 # type: ignore @@ -340,6 +342,7 @@ class DeploymentResourcesType(proto.Enum): [DeploymentResourcePool][google.cloud.aiplatform.v1beta1.DeploymentResourcePool] is required. """ + DEPLOYMENT_RESOURCES_TYPE_UNSPECIFIED = 0 DEDICATED_RESOURCES = 1 AUTOMATIC_RESOURCES = 2 @@ -395,6 +398,7 @@ class ExportableContent(proto.Enum): [ExportModelRequest.output_config][google.cloud.aiplatform.v1beta1.ExportModelRequest.output_config] object. """ + EXPORTABLE_CONTENT_UNSPECIFIED = 0 ARTIFACT = 1 IMAGE = 2 @@ -403,12 +407,12 @@ class ExportableContent(proto.Enum): proto.STRING, number=1, ) - exportable_contents: MutableSequence[ - "Model.ExportFormat.ExportableContent" - ] = proto.RepeatedField( - proto.ENUM, - number=2, - enum="Model.ExportFormat.ExportableContent", + exportable_contents: MutableSequence["Model.ExportFormat.ExportableContent"] = ( + proto.RepeatedField( + proto.ENUM, + number=2, + enum="Model.ExportFormat.ExportableContent", + ) ) class OriginalModelInfo(proto.Message): @@ -534,12 +538,12 @@ class BaseModelSource(proto.Message): proto.STRING, number=26, ) - supported_deployment_resources_types: MutableSequence[ - DeploymentResourcesType - ] = proto.RepeatedField( - proto.ENUM, - number=10, - enum=DeploymentResourcesType, + supported_deployment_resources_types: MutableSequence[DeploymentResourcesType] = ( + proto.RepeatedField( + proto.ENUM, + number=10, + enum=DeploymentResourcesType, + ) ) supported_input_storage_formats: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -559,12 +563,12 @@ class BaseModelSource(proto.Message): number=14, message=timestamp_pb2.Timestamp, ) - deployed_models: MutableSequence[ - deployed_model_ref.DeployedModelRef - ] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=deployed_model_ref.DeployedModelRef, + deployed_models: MutableSequence[deployed_model_ref.DeployedModelRef] = ( + proto.RepeatedField( + proto.MESSAGE, + number=15, + message=deployed_model_ref.DeployedModelRef, + ) ) explanation_spec: explanation.ExplanationSpec = proto.Field( proto.MESSAGE, @@ -1149,6 +1153,7 @@ class ModelSourceType(proto.Enum): MARKETPLACE (7): The Model is saved or tuned from Marketplace. """ + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 AUTOML = 1 CUSTOM = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/model_deployment_monitoring_job.py b/google/cloud/aiplatform_v1beta1/types/model_deployment_monitoring_job.py index f261aefc74..8890f70aca 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_deployment_monitoring_job.py +++ b/google/cloud/aiplatform_v1beta1/types/model_deployment_monitoring_job.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import job_state @@ -63,6 +65,7 @@ class ModelDeploymentMonitoringObjectiveType(proto.Enum): between Prediction datasets collected within different time windows. """ + MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED = 0 RAW_FEATURE_SKEW = 1 RAW_FEATURE_DRIFT = 2 @@ -218,6 +221,7 @@ class MonitoringScheduleState(proto.Enum): RUNNING (3): The pipeline is running. """ + MONITORING_SCHEDULE_STATE_UNSPECIFIED = 0 PENDING = 1 OFFLINE = 2 @@ -280,7 +284,9 @@ class LatestMonitoringPipelineMetadata(proto.Message): number=6, message="ModelDeploymentMonitoringObjectiveConfig", ) - model_deployment_monitoring_schedule_config: "ModelDeploymentMonitoringScheduleConfig" = proto.Field( + model_deployment_monitoring_schedule_config: ( + "ModelDeploymentMonitoringScheduleConfig" + ) = proto.Field( proto.MESSAGE, number=7, message="ModelDeploymentMonitoringScheduleConfig", @@ -310,12 +316,12 @@ class LatestMonitoringPipelineMetadata(proto.Message): proto.STRING, number=16, ) - bigquery_tables: MutableSequence[ - "ModelDeploymentMonitoringBigQueryTable" - ] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="ModelDeploymentMonitoringBigQueryTable", + bigquery_tables: MutableSequence["ModelDeploymentMonitoringBigQueryTable"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=10, + message="ModelDeploymentMonitoringBigQueryTable", + ) ) log_ttl: duration_pb2.Duration = proto.Field( proto.MESSAGE, @@ -402,6 +408,7 @@ class LogSource(proto.Enum): SERVING (2): Logs coming from Serving traffic. """ + LOG_SOURCE_UNSPECIFIED = 0 TRAINING = 1 SERVING = 2 @@ -417,6 +424,7 @@ class LogType(proto.Enum): EXPLAIN (2): Explain logs. """ + LOG_TYPE_UNSPECIFIED = 0 PREDICT = 1 EXPLAIN = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py index ad91a51f73..558cc7bf00 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py @@ -195,12 +195,12 @@ class BiasConfig(proto.Message): number=8, message=explanation.ModelExplanation, ) - explanation_specs: MutableSequence[ - ModelEvaluationExplanationSpec - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=ModelEvaluationExplanationSpec, + explanation_specs: MutableSequence[ModelEvaluationExplanationSpec] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message=ModelEvaluationExplanationSpec, + ) ) metadata: struct_pb2.Value = proto.Field( proto.MESSAGE, diff --git a/google/cloud/aiplatform_v1beta1/types/model_garden_service.py b/google/cloud/aiplatform_v1beta1/types/model_garden_service.py index 3bb033057d..8ae4fbadfa 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_garden_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_garden_service.py @@ -23,7 +23,9 @@ from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import model as gca_model from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import publisher_model as gca_publisher_model +from google.cloud.aiplatform_v1beta1.types import ( + publisher_model as gca_publisher_model, +) __protobuf__ = proto.module( @@ -65,6 +67,7 @@ class PublisherModelView(proto.Enum): Include: VersionId, ModelVersionExternalName, and SupportedActions. """ + PUBLISHER_MODEL_VIEW_UNSPECIFIED = 0 PUBLISHER_MODEL_VIEW_BASIC = 1 PUBLISHER_MODEL_VIEW_FULL = 2 @@ -213,12 +216,12 @@ class ListPublisherModelsResponse(proto.Message): def raw_page(self): return self - publisher_models: MutableSequence[ - gca_publisher_model.PublisherModel - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_publisher_model.PublisherModel, + publisher_models: MutableSequence[gca_publisher_model.PublisherModel] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_publisher_model.PublisherModel, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/model_monitor.py b/google/cloud/aiplatform_v1beta1/types/model_monitor.py index ed33b24075..00df78680f 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_monitor.py +++ b/google/cloud/aiplatform_v1beta1/types/model_monitor.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import model_monitoring_spec from google.protobuf import timestamp_pb2 # type: ignore @@ -152,7 +154,9 @@ class VertexModelSource(proto.Message): ) ) - tabular_objective: model_monitoring_spec.ModelMonitoringObjectiveSpec.TabularObjective = proto.Field( + tabular_objective: ( + model_monitoring_spec.ModelMonitoringObjectiveSpec.TabularObjective + ) = proto.Field( proto.MESSAGE, number=11, oneof="default_objective", diff --git a/google/cloud/aiplatform_v1beta1/types/model_monitoring.py b/google/cloud/aiplatform_v1beta1/types/model_monitoring.py index 026fe69329..7cd07145c1 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_monitoring.py +++ b/google/cloud/aiplatform_v1beta1/types/model_monitoring.py @@ -63,12 +63,12 @@ class ModelMonitoringConfig(proto.Message): and anomalies. """ - objective_configs: MutableSequence[ - "ModelMonitoringObjectiveConfig" - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message="ModelMonitoringObjectiveConfig", + objective_configs: MutableSequence["ModelMonitoringObjectiveConfig"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ModelMonitoringObjectiveConfig", + ) ) alert_config: "ModelMonitoringAlertConfig" = proto.Field( proto.MESSAGE, @@ -217,13 +217,13 @@ class TrainingPredictionSkewDetectionConfig(proto.Message): number=1, message="ThresholdConfig", ) - attribution_score_skew_thresholds: MutableMapping[ - str, "ThresholdConfig" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message="ThresholdConfig", + attribution_score_skew_thresholds: MutableMapping[str, "ThresholdConfig"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="ThresholdConfig", + ) ) default_skew_threshold: "ThresholdConfig" = proto.Field( proto.MESSAGE, @@ -260,13 +260,13 @@ class PredictionDriftDetectionConfig(proto.Message): number=1, message="ThresholdConfig", ) - attribution_score_drift_thresholds: MutableMapping[ - str, "ThresholdConfig" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message="ThresholdConfig", + attribution_score_drift_thresholds: MutableMapping[str, "ThresholdConfig"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="ThresholdConfig", + ) ) default_drift_threshold: "ThresholdConfig" = proto.Field( proto.MESSAGE, @@ -330,6 +330,7 @@ class PredictionFormat(proto.Enum): BIGQUERY (3): Predictions are in BigQuery. """ + PREDICTION_FORMAT_UNSPECIFIED = 0 JSONL = 2 BIGQUERY = 3 @@ -356,7 +357,9 @@ class PredictionFormat(proto.Enum): proto.BOOL, number=1, ) - explanation_baseline: "ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline" = proto.Field( + explanation_baseline: ( + "ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline" + ) = proto.Field( proto.MESSAGE, number=2, message="ModelMonitoringObjectiveConfig.ExplanationConfig.ExplanationBaseline", diff --git a/google/cloud/aiplatform_v1beta1/types/model_monitoring_service.py b/google/cloud/aiplatform_v1beta1/types/model_monitoring_service.py index 0f18edf5ab..9668d07b19 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_monitoring_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_monitoring_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import model_monitor as gca_model_monitor +from google.cloud.aiplatform_v1beta1.types import ( + model_monitor as gca_model_monitor, +) from google.cloud.aiplatform_v1beta1.types import model_monitoring_alert from google.cloud.aiplatform_v1beta1.types import ( model_monitoring_job as gca_model_monitoring_job, @@ -222,12 +224,12 @@ class ListModelMonitorsResponse(proto.Message): def raw_page(self): return self - model_monitors: MutableSequence[ - gca_model_monitor.ModelMonitor - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model_monitor.ModelMonitor, + model_monitors: MutableSequence[gca_model_monitor.ModelMonitor] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_model_monitor.ModelMonitor, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -460,12 +462,12 @@ class SearchModelMonitoringStatsResponse(proto.Message): def raw_page(self): return self - monitoring_stats: MutableSequence[ - model_monitoring_stats.ModelMonitoringStats - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=model_monitoring_stats.ModelMonitoringStats, + monitoring_stats: MutableSequence[model_monitoring_stats.ModelMonitoringStats] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=model_monitoring_stats.ModelMonitoringStats, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/model_monitoring_spec.py b/google/cloud/aiplatform_v1beta1/types/model_monitoring_spec.py index fcab13f678..e11607220c 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_monitoring_spec.py +++ b/google/cloud/aiplatform_v1beta1/types/model_monitoring_spec.py @@ -141,12 +141,16 @@ class DataDriftSpec(proto.Message): proto.STRING, number=3, ) - default_categorical_alert_condition: model_monitoring_alert.ModelMonitoringAlertCondition = proto.Field( + default_categorical_alert_condition: ( + model_monitoring_alert.ModelMonitoringAlertCondition + ) = proto.Field( proto.MESSAGE, number=4, message=model_monitoring_alert.ModelMonitoringAlertCondition, ) - default_numeric_alert_condition: model_monitoring_alert.ModelMonitoringAlertCondition = proto.Field( + default_numeric_alert_condition: ( + model_monitoring_alert.ModelMonitoringAlertCondition + ) = proto.Field( proto.MESSAGE, number=5, message=model_monitoring_alert.ModelMonitoringAlertCondition, @@ -185,7 +189,9 @@ class FeatureAttributionSpec(proto.Message): proto.STRING, number=1, ) - default_alert_condition: model_monitoring_alert.ModelMonitoringAlertCondition = proto.Field( + default_alert_condition: ( + model_monitoring_alert.ModelMonitoringAlertCondition + ) = proto.Field( proto.MESSAGE, number=2, message=model_monitoring_alert.ModelMonitoringAlertCondition, @@ -198,7 +204,9 @@ class FeatureAttributionSpec(proto.Message): number=3, message=model_monitoring_alert.ModelMonitoringAlertCondition, ) - batch_explanation_dedicated_resources: machine_resources.BatchDedicatedResources = proto.Field( + batch_explanation_dedicated_resources: ( + machine_resources.BatchDedicatedResources + ) = proto.Field( proto.MESSAGE, number=4, message=machine_resources.BatchDedicatedResources, @@ -230,7 +238,9 @@ class TabularObjective(proto.Message): message="ModelMonitoringObjectiveSpec.DataDriftSpec", ) ) - feature_attribution_spec: "ModelMonitoringObjectiveSpec.FeatureAttributionSpec" = proto.Field( + feature_attribution_spec: ( + "ModelMonitoringObjectiveSpec.FeatureAttributionSpec" + ) = proto.Field( proto.MESSAGE, number=12, message="ModelMonitoringObjectiveSpec.FeatureAttributionSpec", @@ -368,6 +378,7 @@ class DataFormat(proto.Enum): JSONL (3): JsonL files. """ + DATA_FORMAT_UNSPECIFIED = 0 CSV = 1 TF_RECORD = 2 @@ -424,13 +435,17 @@ class ModelMonitoringBigQuerySource(proto.Message): number=1, oneof="data_location", ) - gcs_source: "ModelMonitoringInput.ModelMonitoringDataset.ModelMonitoringGcsSource" = proto.Field( + gcs_source: ( + "ModelMonitoringInput.ModelMonitoringDataset.ModelMonitoringGcsSource" + ) = proto.Field( proto.MESSAGE, number=2, oneof="data_location", message="ModelMonitoringInput.ModelMonitoringDataset.ModelMonitoringGcsSource", ) - bigquery_source: "ModelMonitoringInput.ModelMonitoringDataset.ModelMonitoringBigQuerySource" = proto.Field( + bigquery_source: ( + "ModelMonitoringInput.ModelMonitoringDataset.ModelMonitoringBigQuerySource" + ) = proto.Field( proto.MESSAGE, number=6, oneof="data_location", @@ -588,12 +603,12 @@ class NotificationChannelConfig(proto.Message): proto.BOOL, number=2, ) - notification_channel_configs: MutableSequence[ - NotificationChannelConfig - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=NotificationChannelConfig, + notification_channel_configs: MutableSequence[NotificationChannelConfig] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=NotificationChannelConfig, + ) ) diff --git a/google/cloud/aiplatform_v1beta1/types/model_monitoring_stats.py b/google/cloud/aiplatform_v1beta1/types/model_monitoring_stats.py index 501b5826e7..8914306c3a 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_monitoring_stats.py +++ b/google/cloud/aiplatform_v1beta1/types/model_monitoring_stats.py @@ -131,7 +131,9 @@ class DistributionDataValue(proto.Message): number=1, oneof="value", ) - distribution_value: "ModelMonitoringStatsDataPoint.TypedValue.DistributionDataValue" = proto.Field( + distribution_value: ( + "ModelMonitoringStatsDataPoint.TypedValue.DistributionDataValue" + ) = proto.Field( proto.MESSAGE, number=2, oneof="value", diff --git a/google/cloud/aiplatform_v1beta1/types/model_service.py b/google/cloud/aiplatform_v1beta1/types/model_service.py index 15123c2f63..b3969651ab 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_service.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import evaluated_annotation from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import io @@ -984,12 +986,12 @@ class BatchImportEvaluatedAnnotationsRequest(proto.Message): proto.STRING, number=1, ) - evaluated_annotations: MutableSequence[ - evaluated_annotation.EvaluatedAnnotation - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=evaluated_annotation.EvaluatedAnnotation, + evaluated_annotations: MutableSequence[evaluated_annotation.EvaluatedAnnotation] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=evaluated_annotation.EvaluatedAnnotation, + ) ) @@ -1089,12 +1091,12 @@ class ListModelEvaluationsResponse(proto.Message): def raw_page(self): return self - model_evaluations: MutableSequence[ - gca_model_evaluation.ModelEvaluation - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model_evaluation.ModelEvaluation, + model_evaluations: MutableSequence[gca_model_evaluation.ModelEvaluation] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_model_evaluation.ModelEvaluation, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -1311,6 +1313,7 @@ class QuotaState(proto.Enum): User does not have enough accelerator quota for the machine type. """ + QUOTA_STATE_UNSPECIFIED = 0 QUOTA_STATE_USER_HAS_QUOTA = 1 QUOTA_STATE_NO_USER_QUOTA = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/nas_job.py b/google/cloud/aiplatform_v1beta1/types/nas_job.py index 05f6ca8bc3..d06658bf0e 100644 --- a/google/cloud/aiplatform_v1beta1/types/nas_job.py +++ b/google/cloud/aiplatform_v1beta1/types/nas_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import study from google.protobuf import timestamp_pb2 # type: ignore @@ -263,6 +265,7 @@ class MultiTrialAlgorithm(proto.Enum): The Grid Search Algorithm for Multi-trial Neural Architecture Search (NAS). """ + MULTI_TRIAL_ALGORITHM_UNSPECIFIED = 0 REINFORCEMENT_LEARNING = 1 GRID_SEARCH = 2 @@ -290,6 +293,7 @@ class GoalType(proto.Enum): MINIMIZE (2): Minimize the goal metric. """ + GOAL_TYPE_UNSPECIFIED = 0 MAXIMIZE = 1 MINIMIZE = 2 @@ -376,7 +380,9 @@ class TrainTrialSpec(proto.Message): number=3, ) - multi_trial_algorithm: "NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm" = proto.Field( + multi_trial_algorithm: ( + "NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm" + ) = proto.Field( proto.ENUM, number=1, enum="NasJobSpec.MultiTrialAlgorithmSpec.MultiTrialAlgorithm", @@ -507,6 +513,7 @@ class State(proto.Enum): The service will set a NasTrial to INFEASIBLE when it's done but missing the final_measurement. """ + STATE_UNSPECIFIED = 0 REQUESTED = 1 ACTIVE = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/notebook_execution_job.py b/google/cloud/aiplatform_v1beta1/types/notebook_execution_job.py index 9471547ce0..096dc44912 100644 --- a/google/cloud/aiplatform_v1beta1/types/notebook_execution_job.py +++ b/google/cloud/aiplatform_v1beta1/types/notebook_execution_job.py @@ -19,10 +19,14 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import job_state as gca_job_state from google.cloud.aiplatform_v1beta1.types import machine_resources -from google.cloud.aiplatform_v1beta1.types import network_spec as gca_network_spec +from google.cloud.aiplatform_v1beta1.types import ( + network_spec as gca_network_spec, +) from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/notebook_runtime.py b/google/cloud/aiplatform_v1beta1/types/notebook_runtime.py index 6b3833ff45..d641d4b18d 100644 --- a/google/cloud/aiplatform_v1beta1/types/notebook_runtime.py +++ b/google/cloud/aiplatform_v1beta1/types/notebook_runtime.py @@ -19,11 +19,17 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import machine_resources -from google.cloud.aiplatform_v1beta1.types import network_spec as gca_network_spec +from google.cloud.aiplatform_v1beta1.types import ( + network_spec as gca_network_spec, +) from google.cloud.aiplatform_v1beta1.types import notebook_euc_config -from google.cloud.aiplatform_v1beta1.types import notebook_idle_shutdown_config +from google.cloud.aiplatform_v1beta1.types import ( + notebook_idle_shutdown_config, +) from google.cloud.aiplatform_v1beta1.types import ( notebook_runtime_template_ref as gca_notebook_runtime_template_ref, ) @@ -55,6 +61,7 @@ class NotebookRuntimeType(proto.Enum): runtime or template with system defined configurations. """ + NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED = 0 USER_DEFINED = 1 ONE_CLICK = 2 @@ -387,6 +394,7 @@ class HealthState(proto.Enum): NotebookRuntime is in unhealthy state. Applies to ACTIVE state. """ + HEALTH_STATE_UNSPECIFIED = 0 HEALTHY = 1 UNHEALTHY = 2 @@ -419,6 +427,7 @@ class RuntimeState(proto.Enum): NotebookRuntime is in invalid state. Cannot be recovered. """ + RUNTIME_STATE_UNSPECIFIED = 0 RUNNING = 1 BEING_STARTED = 2 @@ -436,7 +445,9 @@ class RuntimeState(proto.Enum): proto.STRING, number=2, ) - notebook_runtime_template_ref: gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef = proto.Field( + notebook_runtime_template_ref: ( + gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef + ) = proto.Field( proto.MESSAGE, number=3, message=gca_notebook_runtime_template_ref.NotebookRuntimeTemplateRef, diff --git a/google/cloud/aiplatform_v1beta1/types/notebook_service.py b/google/cloud/aiplatform_v1beta1/types/notebook_service.py index 2e80e1ab07..f6c6a80bdd 100644 --- a/google/cloud/aiplatform_v1beta1/types/notebook_service.py +++ b/google/cloud/aiplatform_v1beta1/types/notebook_service.py @@ -78,6 +78,7 @@ class NotebookExecutionJobView(proto.Enum): NOTEBOOK_EXECUTION_JOB_VIEW_FULL (2): Includes all fields. """ + NOTEBOOK_EXECUTION_JOB_VIEW_UNSPECIFIED = 0 NOTEBOOK_EXECUTION_JOB_VIEW_BASIC = 1 NOTEBOOK_EXECUTION_JOB_VIEW_FULL = 2 @@ -530,12 +531,12 @@ class ListNotebookRuntimesResponse(proto.Message): def raw_page(self): return self - notebook_runtimes: MutableSequence[ - gca_notebook_runtime.NotebookRuntime - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_notebook_runtime.NotebookRuntime, + notebook_runtimes: MutableSequence[gca_notebook_runtime.NotebookRuntime] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_notebook_runtime.NotebookRuntime, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/notebook_software_config.py b/google/cloud/aiplatform_v1beta1/types/notebook_software_config.py index 5fbac61dec..06df2335f5 100644 --- a/google/cloud/aiplatform_v1beta1/types/notebook_software_config.py +++ b/google/cloud/aiplatform_v1beta1/types/notebook_software_config.py @@ -57,6 +57,7 @@ class PostStartupScriptBehavior(proto.Enum): DOWNLOAD_AND_RUN_EVERY_START (3): No description available. """ + POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED = 0 RUN_ONCE = 1 RUN_EVERY_START = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/openapi.py b/google/cloud/aiplatform_v1beta1/types/openapi.py index ba0c005e8f..d74c8faccd 100644 --- a/google/cloud/aiplatform_v1beta1/types/openapi.py +++ b/google/cloud/aiplatform_v1beta1/types/openapi.py @@ -51,6 +51,7 @@ class Type(proto.Enum): OBJECT (6): OpenAPI object type """ + TYPE_UNSPECIFIED = 0 STRING = 1 NUMBER = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/persistent_resource.py b/google/cloud/aiplatform_v1beta1/types/persistent_resource.py index d35679367c..8596b7bd05 100644 --- a/google/cloud/aiplatform_v1beta1/types/persistent_resource.py +++ b/google/cloud/aiplatform_v1beta1/types/persistent_resource.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import service_networking from google.protobuf import timestamp_pb2 # type: ignore @@ -159,6 +161,7 @@ class State(proto.Enum): The UPDATING state indicates the persistent resource is being updated. """ + STATE_UNSPECIFIED = 0 PROVISIONING = 1 RUNNING = 3 diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_failure_policy.py b/google/cloud/aiplatform_v1beta1/types/pipeline_failure_policy.py index 871255fa89..2902a02c0a 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_failure_policy.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_failure_policy.py @@ -49,6 +49,7 @@ class PipelineFailurePolicy(proto.Enum): Indicates that the pipeline should stop scheduling new tasks after a task has failed. """ + PIPELINE_FAILURE_POLICY_UNSPECIFIED = 0 PIPELINE_FAILURE_POLICY_FAIL_SLOW = 1 PIPELINE_FAILURE_POLICY_FAIL_FAST = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_job.py b/google/cloud/aiplatform_v1beta1/types/pipeline_job.py index c43d2d93b2..c774364e72 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_job.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_job.py @@ -21,7 +21,9 @@ from google.cloud.aiplatform_v1beta1.types import artifact from google.cloud.aiplatform_v1beta1.types import context -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import pipeline_failure_policy from google.cloud.aiplatform_v1beta1.types import pipeline_state @@ -287,6 +289,7 @@ class TaskResourceUnavailableTimeoutBehavior(proto.Enum): Fall back to on-demand execution if the timeout is reached. """ + TASK_RESOURCE_UNAVAILABLE_TIMEOUT_BEHAVIOR_UNSPECIFIED = 0 FAIL = 1 FALL_BACK_TO_ON_DEMAND = 2 @@ -317,7 +320,9 @@ class DefaultRuntime(proto.Message): This field is a member of `oneof`_ ``runtime_detail``. """ - persistent_resource_runtime_detail: "PipelineJob.RuntimeConfig.PersistentResourceRuntimeDetail" = proto.Field( + persistent_resource_runtime_detail: ( + "PipelineJob.RuntimeConfig.PersistentResourceRuntimeDetail" + ) = proto.Field( proto.MESSAGE, number=1, oneof="runtime_detail", @@ -468,12 +473,12 @@ class DefaultRuntime(proto.Message): proto.INT64, number=29, ) - pipeline_task_rerun_configs: MutableSequence[ - "PipelineTaskRerunConfig" - ] = proto.RepeatedField( - proto.MESSAGE, - number=30, - message="PipelineTaskRerunConfig", + pipeline_task_rerun_configs: MutableSequence["PipelineTaskRerunConfig"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=30, + message="PipelineTaskRerunConfig", + ) ) @@ -613,6 +618,7 @@ class State(proto.Enum): specified in the ``condition`` field of [PipelineJob.pipeline_spec][google.cloud.aiplatform.v1beta1.PipelineJob.pipeline_spec]. """ + STATE_UNSPECIFIED = 0 PENDING = 1 RUNNING = 2 @@ -882,12 +888,12 @@ class ArtifactList(proto.Message): Optional. A list of artifact metadata. """ - artifacts: MutableSequence[ - ui_pipeline_spec.RuntimeArtifact - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=ui_pipeline_spec.RuntimeArtifact, + artifacts: MutableSequence[ui_pipeline_spec.RuntimeArtifact] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ui_pipeline_spec.RuntimeArtifact, + ) ) class Inputs(proto.Message): @@ -900,13 +906,13 @@ class Inputs(proto.Message): Optional. Input parameters. """ - artifacts: MutableMapping[ - str, "PipelineTaskRerunConfig.ArtifactList" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="PipelineTaskRerunConfig.ArtifactList", + artifacts: MutableMapping[str, "PipelineTaskRerunConfig.ArtifactList"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="PipelineTaskRerunConfig.ArtifactList", + ) ) parameter_values: MutableMapping[str, struct_pb2.Value] = proto.MapField( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 35a0ebcc9e..0803e9f684 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import ( training_pipeline as gca_training_pipeline, ) @@ -193,12 +195,12 @@ class ListTrainingPipelinesResponse(proto.Message): def raw_page(self): return self - training_pipelines: MutableSequence[ - gca_training_pipeline.TrainingPipeline - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_training_pipeline.TrainingPipeline, + training_pipelines: MutableSequence[gca_training_pipeline.TrainingPipeline] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_training_pipeline.TrainingPipeline, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py index 77054a8fd0..5ef7a3bb60 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py @@ -55,6 +55,7 @@ class PipelineState(proto.Enum): The pipeline has been stopped, and can be resumed. """ + PIPELINE_STATE_UNSPECIFIED = 0 PIPELINE_STATE_QUEUED = 1 PIPELINE_STATE_PENDING = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/prediction_service.py b/google/cloud/aiplatform_v1beta1/types/prediction_service.py index b761cc9531..ddc6172966 100644 --- a/google/cloud/aiplatform_v1beta1/types/prediction_service.py +++ b/google/cloud/aiplatform_v1beta1/types/prediction_service.py @@ -711,13 +711,13 @@ class ConcurrentExplanation(proto.Message): number=1, message=explanation.Explanation, ) - concurrent_explanations: MutableMapping[ - str, ConcurrentExplanation - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=4, - message=ConcurrentExplanation, + concurrent_explanations: MutableMapping[str, ConcurrentExplanation] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message=ConcurrentExplanation, + ) ) deployed_model_id: str = proto.Field( proto.STRING, @@ -834,12 +834,12 @@ class CountTokensResponse(proto.Message): proto.INT32, number=2, ) - prompt_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=content.ModalityTokenCount, + prompt_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=content.ModalityTokenCount, + ) ) @@ -1017,6 +1017,7 @@ class BlockedReason(proto.Enum): MODEL_ARMOR (5): The user prompt was blocked by Model Armor. """ + BLOCKED_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 @@ -1091,26 +1092,26 @@ class UsageMetadata(proto.Message): proto.INT32, number=5, ) - prompt_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=content.ModalityTokenCount, + prompt_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=9, + message=content.ModalityTokenCount, + ) ) - cache_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=content.ModalityTokenCount, + cache_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=10, + message=content.ModalityTokenCount, + ) ) - candidates_tokens_details: MutableSequence[ - content.ModalityTokenCount - ] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=content.ModalityTokenCount, + candidates_tokens_details: MutableSequence[content.ModalityTokenCount] = ( + proto.RepeatedField( + proto.MESSAGE, + number=11, + message=content.ModalityTokenCount, + ) ) candidates: MutableSequence[content.Candidate] = proto.RepeatedField( diff --git a/google/cloud/aiplatform_v1beta1/types/publisher_model.py b/google/cloud/aiplatform_v1beta1/types/publisher_model.py index 9b2dbcfea6..333fe4c304 100644 --- a/google/cloud/aiplatform_v1beta1/types/publisher_model.py +++ b/google/cloud/aiplatform_v1beta1/types/publisher_model.py @@ -100,6 +100,7 @@ class OpenSourceCategory(proto.Enum): Used to indicate the PublisherModel is a 3p-owned pure open source model. """ + OPEN_SOURCE_CATEGORY_UNSPECIFIED = 0 PROPRIETARY = 1 GOOGLE_OWNED_OSS_WITH_GOOGLE_CHECKPOINT = 2 @@ -134,6 +135,7 @@ class LaunchStage(proto.Enum): launch stage, available to all customers and ready for production workload. """ + LAUNCH_STAGE_UNSPECIFIED = 0 EXPERIMENTAL = 1 PRIVATE_PREVIEW = 2 @@ -151,6 +153,7 @@ class VersionState(proto.Enum): VERSION_STATE_UNSTABLE (2): Used to indicate the version is unstable. """ + VERSION_STATE_UNSPECIFIED = 0 VERSION_STATE_STABLE = 1 VERSION_STATE_UNSTABLE = 2 @@ -327,13 +330,13 @@ class RegionalResourceReferences(proto.Message): This field is a member of `oneof`_ ``_resource_description``. """ - references: MutableMapping[ - str, "PublisherModel.ResourceReference" - ] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message="PublisherModel.ResourceReference", + references: MutableMapping[str, "PublisherModel.ResourceReference"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="PublisherModel.ResourceReference", + ) ) title: str = proto.Field( proto.STRING, @@ -365,12 +368,12 @@ class ViewRestApi(proto.Message): Required. The title of the view rest API. """ - documentations: MutableSequence[ - "PublisherModel.Documentation" - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="PublisherModel.Documentation", + documentations: MutableSequence["PublisherModel.Documentation"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PublisherModel.Documentation", + ) ) title: str = proto.Field( proto.STRING, @@ -609,18 +612,24 @@ class DeployGke(proto.Message): message="PublisherModel.CallToAction.RegionalResourceReferences", ) ) - open_fine_tuning_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_fine_tuning_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=4, message="PublisherModel.CallToAction.RegionalResourceReferences", ) - open_fine_tuning_pipelines: "PublisherModel.CallToAction.OpenFineTuningPipelines" = proto.Field( + open_fine_tuning_pipelines: ( + "PublisherModel.CallToAction.OpenFineTuningPipelines" + ) = proto.Field( proto.MESSAGE, number=13, optional=True, message="PublisherModel.CallToAction.OpenFineTuningPipelines", ) - open_prompt_tuning_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_prompt_tuning_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=5, message="PublisherModel.CallToAction.RegionalResourceReferences", @@ -647,7 +656,9 @@ class DeployGke(proto.Message): number=14, message="PublisherModel.CallToAction.DeployGke", ) - open_generation_ai_studio: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_generation_ai_studio: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=8, message="PublisherModel.CallToAction.RegionalResourceReferences", @@ -659,7 +670,9 @@ class DeployGke(proto.Message): message="PublisherModel.CallToAction.RegionalResourceReferences", ) ) - open_evaluation_pipeline: "PublisherModel.CallToAction.RegionalResourceReferences" = proto.Field( + open_evaluation_pipeline: ( + "PublisherModel.CallToAction.RegionalResourceReferences" + ) = proto.Field( proto.MESSAGE, number=11, message="PublisherModel.CallToAction.RegionalResourceReferences", diff --git a/google/cloud/aiplatform_v1beta1/types/reasoning_engine.py b/google/cloud/aiplatform_v1beta1/types/reasoning_engine.py index a5e4604288..e0b797ff57 100644 --- a/google/cloud/aiplatform_v1beta1/types/reasoning_engine.py +++ b/google/cloud/aiplatform_v1beta1/types/reasoning_engine.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import env_var from google.cloud.aiplatform_v1beta1.types import service_networking from google.protobuf import duration_pb2 # type: ignore @@ -435,12 +437,16 @@ class SimilaritySearchConfig(proto.Message): number=1, ) - generation_config: "ReasoningEngineContextSpec.MemoryBankConfig.GenerationConfig" = proto.Field( + generation_config: ( + "ReasoningEngineContextSpec.MemoryBankConfig.GenerationConfig" + ) = proto.Field( proto.MESSAGE, number=1, message="ReasoningEngineContextSpec.MemoryBankConfig.GenerationConfig", ) - similarity_search_config: "ReasoningEngineContextSpec.MemoryBankConfig.SimilaritySearchConfig" = proto.Field( + similarity_search_config: ( + "ReasoningEngineContextSpec.MemoryBankConfig.SimilaritySearchConfig" + ) = proto.Field( proto.MESSAGE, number=2, message="ReasoningEngineContextSpec.MemoryBankConfig.SimilaritySearchConfig", diff --git a/google/cloud/aiplatform_v1beta1/types/reasoning_engine_service.py b/google/cloud/aiplatform_v1beta1/types/reasoning_engine_service.py index cc568926ea..7d24957b1a 100644 --- a/google/cloud/aiplatform_v1beta1/types/reasoning_engine_service.py +++ b/google/cloud/aiplatform_v1beta1/types/reasoning_engine_service.py @@ -194,12 +194,12 @@ class ListReasoningEnginesResponse(proto.Message): def raw_page(self): return self - reasoning_engines: MutableSequence[ - gca_reasoning_engine.ReasoningEngine - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_reasoning_engine.ReasoningEngine, + reasoning_engines: MutableSequence[gca_reasoning_engine.ReasoningEngine] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_reasoning_engine.ReasoningEngine, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/reservation_affinity.py b/google/cloud/aiplatform_v1beta1/types/reservation_affinity.py index acf3efa1cd..20f972a36a 100644 --- a/google/cloud/aiplatform_v1beta1/types/reservation_affinity.py +++ b/google/cloud/aiplatform_v1beta1/types/reservation_affinity.py @@ -66,6 +66,7 @@ class Type(proto.Enum): reservation must be identified via the ``key`` and ``values`` fields. """ + TYPE_UNSPECIFIED = 0 NO_RESERVATION = 1 ANY_RESERVATION = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/schedule.py b/google/cloud/aiplatform_v1beta1/types/schedule.py index 5b61dc8dfb..1f8c2a41cc 100644 --- a/google/cloud/aiplatform_v1beta1/types/schedule.py +++ b/google/cloud/aiplatform_v1beta1/types/schedule.py @@ -163,6 +163,7 @@ class State(proto.Enum): allowed to complete. Schedules in completed state cannot be paused or resumed. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 PAUSED = 2 @@ -202,13 +203,17 @@ class RunResponse(proto.Message): message=pipeline_service.CreatePipelineJobRequest, ) ) - create_model_monitoring_job_request: model_monitoring_service.CreateModelMonitoringJobRequest = proto.Field( + create_model_monitoring_job_request: ( + model_monitoring_service.CreateModelMonitoringJobRequest + ) = proto.Field( proto.MESSAGE, number=15, oneof="request", message=model_monitoring_service.CreateModelMonitoringJobRequest, ) - create_notebook_execution_job_request: notebook_service.CreateNotebookExecutionJobRequest = proto.Field( + create_notebook_execution_job_request: ( + notebook_service.CreateNotebookExecutionJobRequest + ) = proto.Field( proto.MESSAGE, number=20, oneof="request", diff --git a/google/cloud/aiplatform_v1beta1/types/service_networking.py b/google/cloud/aiplatform_v1beta1/types/service_networking.py index 486cde054d..55003b03ac 100644 --- a/google/cloud/aiplatform_v1beta1/types/service_networking.py +++ b/google/cloud/aiplatform_v1beta1/types/service_networking.py @@ -44,6 +44,7 @@ class PSCAutomationState(proto.Enum): PSC_AUTOMATION_STATE_FAILED (2): The PSC service automation has failed. """ + PSC_AUTOMATION_STATE_UNSPECIFIED = 0 PSC_AUTOMATION_STATE_SUCCESSFUL = 1 PSC_AUTOMATION_STATE_FAILED = 2 @@ -138,12 +139,12 @@ class PrivateServiceConnectConfig(proto.Message): proto.STRING, number=2, ) - psc_automation_configs: MutableSequence[ - "PSCAutomationConfig" - ] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message="PSCAutomationConfig", + psc_automation_configs: MutableSequence["PSCAutomationConfig"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="PSCAutomationConfig", + ) ) enable_secure_private_service_connect: bool = proto.Field( proto.BOOL, diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py index d98b140fbe..846de86eb0 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1beta1.types import ( + specialist_pool as gca_specialist_pool, +) from google.protobuf import field_mask_pb2 # type: ignore @@ -152,12 +154,12 @@ class ListSpecialistPoolsResponse(proto.Message): def raw_page(self): return self - specialist_pools: MutableSequence[ - gca_specialist_pool.SpecialistPool - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + specialist_pools: MutableSequence[gca_specialist_pool.SpecialistPool] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_specialist_pool.SpecialistPool, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index b3f69b89de..ee9ca8f791 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -77,6 +77,7 @@ class State(proto.Enum): The study is done when the service exhausts the parameter search space or max_trial_count is reached. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 INACTIVE = 2 @@ -201,6 +202,7 @@ class State(proto.Enum): service will set a Trial to INFEASIBLE when it's done but missing the final_measurement. """ + STATE_UNSPECIFIED = 0 REQUESTED = 1 ACTIVE = 2 @@ -435,6 +437,7 @@ class Algorithm(proto.Enum): Simple random search within the feasible space. """ + ALGORITHM_UNSPECIFIED = 0 GRID_SEARCH = 2 RANDOM_SEARCH = 3 @@ -457,6 +460,7 @@ class ObservationNoise(proto.Enum): in metric evaluations, it may repeat the same Trial parameters more than once. """ + OBSERVATION_NOISE_UNSPECIFIED = 0 LOW = 1 HIGH = 2 @@ -483,6 +487,7 @@ class MeasurementSelectionType(proto.Enum): BEST_MEASUREMENT (2): Use the best measurement reported. """ + MEASUREMENT_SELECTION_TYPE_UNSPECIFIED = 0 LAST_MEASUREMENT = 1 BEST_MEASUREMENT = 2 @@ -519,6 +524,7 @@ class GoalType(proto.Enum): MINIMIZE (2): Minimize the goal metric. """ + GOAL_TYPE_UNSPECIFIED = 0 MAXIMIZE = 1 MINIMIZE = 2 @@ -638,6 +644,7 @@ class ScaleType(proto.Enum): The entire feasible space must be strictly positive. """ + SCALE_TYPE_UNSPECIFIED = 0 UNIT_LINEAR_SCALE = 1 UNIT_LOG_SCALE = 2 @@ -873,7 +880,9 @@ class CategoricalValueCondition(proto.Message): oneof="parent_value_condition", message="StudySpec.ParameterSpec.ConditionalParameterSpec.DiscreteValueCondition", ) - parent_int_values: "StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition" = proto.Field( + parent_int_values: ( + "StudySpec.ParameterSpec.ConditionalParameterSpec.IntValueCondition" + ) = proto.Field( proto.MESSAGE, number=3, oneof="parent_value_condition", diff --git a/google/cloud/aiplatform_v1beta1/types/tensorboard.py b/google/cloud/aiplatform_v1beta1/types/tensorboard.py index 2d2083a4fd..f617671f1c 100644 --- a/google/cloud/aiplatform_v1beta1/types/tensorboard.py +++ b/google/cloud/aiplatform_v1beta1/types/tensorboard.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/aiplatform_v1beta1/types/tensorboard_service.py b/google/cloud/aiplatform_v1beta1/types/tensorboard_service.py index 721fcad918..2028484604 100644 --- a/google/cloud/aiplatform_v1beta1/types/tensorboard_service.py +++ b/google/cloud/aiplatform_v1beta1/types/tensorboard_service.py @@ -20,12 +20,16 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import operation -from google.cloud.aiplatform_v1beta1.types import tensorboard as gca_tensorboard +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard as gca_tensorboard, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_data from google.cloud.aiplatform_v1beta1.types import ( tensorboard_experiment as gca_tensorboard_experiment, ) -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import ( tensorboard_time_series as gca_tensorboard_time_series, ) @@ -593,12 +597,12 @@ class BatchCreateTensorboardRunsResponse(proto.Message): The created TensorboardRuns. """ - tensorboard_runs: MutableSequence[ - gca_tensorboard_run.TensorboardRun - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_tensorboard_run.TensorboardRun, + tensorboard_runs: MutableSequence[gca_tensorboard_run.TensorboardRun] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_tensorboard_run.TensorboardRun, + ) ) @@ -769,12 +773,12 @@ class ListTensorboardRunsResponse(proto.Message): def raw_page(self): return self - tensorboard_runs: MutableSequence[ - gca_tensorboard_run.TensorboardRun - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_tensorboard_run.TensorboardRun, + tensorboard_runs: MutableSequence[gca_tensorboard_run.TensorboardRun] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_tensorboard_run.TensorboardRun, + ) ) next_page_token: str = proto.Field( proto.STRING, @@ -852,12 +856,12 @@ class BatchCreateTensorboardTimeSeriesRequest(proto.Message): proto.STRING, number=1, ) - requests: MutableSequence[ - "CreateTensorboardTimeSeriesRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="CreateTensorboardTimeSeriesRequest", + requests: MutableSequence["CreateTensorboardTimeSeriesRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateTensorboardTimeSeriesRequest", + ) ) @@ -1111,12 +1115,12 @@ class BatchReadTensorboardTimeSeriesDataResponse(proto.Message): The returned time series data. """ - time_series_data: MutableSequence[ - tensorboard_data.TimeSeriesData - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=tensorboard_data.TimeSeriesData, + time_series_data: MutableSequence[tensorboard_data.TimeSeriesData] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=tensorboard_data.TimeSeriesData, + ) ) @@ -1187,12 +1191,12 @@ class WriteTensorboardExperimentDataRequest(proto.Message): proto.STRING, number=1, ) - write_run_data_requests: MutableSequence[ - "WriteTensorboardRunDataRequest" - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="WriteTensorboardRunDataRequest", + write_run_data_requests: MutableSequence["WriteTensorboardRunDataRequest"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="WriteTensorboardRunDataRequest", + ) ) @@ -1226,12 +1230,12 @@ class WriteTensorboardRunDataRequest(proto.Message): proto.STRING, number=1, ) - time_series_data: MutableSequence[ - tensorboard_data.TimeSeriesData - ] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=tensorboard_data.TimeSeriesData, + time_series_data: MutableSequence[tensorboard_data.TimeSeriesData] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=tensorboard_data.TimeSeriesData, + ) ) @@ -1313,12 +1317,12 @@ class ExportTensorboardTimeSeriesDataResponse(proto.Message): def raw_page(self): return self - time_series_data_points: MutableSequence[ - tensorboard_data.TimeSeriesDataPoint - ] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=tensorboard_data.TimeSeriesDataPoint, + time_series_data_points: MutableSequence[tensorboard_data.TimeSeriesDataPoint] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=tensorboard_data.TimeSeriesDataPoint, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/tensorboard_time_series.py b/google/cloud/aiplatform_v1beta1/types/tensorboard_time_series.py index ee4978cfaa..41fa832c29 100644 --- a/google/cloud/aiplatform_v1beta1/types/tensorboard_time_series.py +++ b/google/cloud/aiplatform_v1beta1/types/tensorboard_time_series.py @@ -90,6 +90,7 @@ class ValueType(proto.Enum): of blob sequences. E.g. set of sample images with labels over epochs/time. """ + VALUE_TYPE_UNSPECIFIED = 0 SCALAR = 1 TENSOR = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/tool.py b/google/cloud/aiplatform_v1beta1/types/tool.py index 2a2ef50044..5ff52d65e9 100644 --- a/google/cloud/aiplatform_v1beta1/types/tool.py +++ b/google/cloud/aiplatform_v1beta1/types/tool.py @@ -150,6 +150,7 @@ class Environment(proto.Enum): ENVIRONMENT_BROWSER (1): Operates in a web browser. """ + ENVIRONMENT_UNSPECIFIED = 0 ENVIRONMENT_BROWSER = 1 @@ -501,6 +502,7 @@ class Language(proto.Enum): Python >= 3.10, with numpy and simpy available. """ + LANGUAGE_UNSPECIFIED = 0 PYTHON = 1 @@ -546,6 +548,7 @@ class Outcome(proto.Enum): cancelled. There may or may not be a partial output present. """ + OUTCOME_UNSPECIFIED = 0 OUTCOME_OK = 1 OUTCOME_FAILED = 2 @@ -844,6 +847,7 @@ class Mode(proto.Enum): Run retrieval only when system decides it is necessary. """ + MODE_UNSPECIFIED = 0 MODE_DYNAMIC = 1 @@ -917,6 +921,7 @@ class Mode(proto.Enum): Model behavior is same as when not passing any function declarations. """ + MODE_UNSPECIFIED = 0 AUTO = 1 ANY = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py index afe377e94e..f4bbc335a3 100644 --- a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py +++ b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py @@ -19,7 +19,9 @@ import proto # type: ignore -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import pipeline_state diff --git a/google/cloud/aiplatform_v1beta1/types/tuning_job.py b/google/cloud/aiplatform_v1beta1/types/tuning_job.py index 3afa7fe522..cc44d01089 100644 --- a/google/cloud/aiplatform_v1beta1/types/tuning_job.py +++ b/google/cloud/aiplatform_v1beta1/types/tuning_job.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import content -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import evaluation_service from google.cloud.aiplatform_v1beta1.types import job_state from google.protobuf import struct_pb2 # type: ignore @@ -796,6 +798,7 @@ class AdapterSize(proto.Enum): ADAPTER_SIZE_THIRTY_TWO (5): Adapter size 32. """ + ADAPTER_SIZE_UNSPECIFIED = 0 ADAPTER_SIZE_ONE = 1 ADAPTER_SIZE_TWO = 6 @@ -866,6 +869,7 @@ class TuningMode(proto.Enum): TUNING_MODE_PEFT_ADAPTER (2): PEFT adapter tuning mode. """ + TUNING_MODE_UNSPECIFIED = 0 TUNING_MODE_FULL = 1 TUNING_MODE_PEFT_ADAPTER = 2 @@ -1120,6 +1124,7 @@ class TuningTask(proto.Enum): TUNING_TASK_T2V (2): Tuning task for text to video. """ + TUNING_TASK_UNSPECIFIED = 0 TUNING_TASK_I2V = 1 TUNING_TASK_T2V = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/types.py b/google/cloud/aiplatform_v1beta1/types/types.py index 82c00cfe3f..90c3ba7f52 100644 --- a/google/cloud/aiplatform_v1beta1/types/types.py +++ b/google/cloud/aiplatform_v1beta1/types/types.py @@ -164,6 +164,7 @@ class DataType(proto.Enum): UINT64 (12): No description available. """ + DATA_TYPE_UNSPECIFIED = 0 BOOL = 1 STRING = 2 diff --git a/google/cloud/aiplatform_v1beta1/types/vertex_rag_data.py b/google/cloud/aiplatform_v1beta1/types/vertex_rag_data.py index 3906996e5b..60743323f2 100644 --- a/google/cloud/aiplatform_v1beta1/types/vertex_rag_data.py +++ b/google/cloud/aiplatform_v1beta1/types/vertex_rag_data.py @@ -20,7 +20,9 @@ import proto # type: ignore from google.cloud.aiplatform_v1beta1.types import api_auth as gca_api_auth -from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform_v1beta1.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform_v1beta1.types import io from google.protobuf import timestamp_pb2 # type: ignore @@ -189,7 +191,9 @@ class HybridSearchConfig(proto.Message): message="RagEmbeddingModelConfig.SparseEmbeddingConfig", ) ) - dense_embedding_model_prediction_endpoint: "RagEmbeddingModelConfig.VertexPredictionEndpoint" = proto.Field( + dense_embedding_model_prediction_endpoint: ( + "RagEmbeddingModelConfig.VertexPredictionEndpoint" + ) = proto.Field( proto.MESSAGE, number=2, message="RagEmbeddingModelConfig.VertexPredictionEndpoint", @@ -463,6 +467,7 @@ class State(proto.Enum): RagFile resource is in a problematic state. See ``error_message`` field for details. """ + STATE_UNSPECIFIED = 0 ACTIVE = 1 ERROR = 2 @@ -522,6 +527,7 @@ class State(proto.Enum): RagCorpus is in a problematic situation. See ``error_message`` field for details. """ + UNKNOWN = 0 INITIALIZED = 1 ACTIVE = 2 @@ -793,6 +799,7 @@ class RagFileType(proto.Enum): RAG_FILE_TYPE_PDF (2): RagFile type is PDF. """ + RAG_FILE_TYPE_UNSPECIFIED = 0 RAG_FILE_TYPE_TXT = 1 RAG_FILE_TYPE_PDF = 2 diff --git a/noxfile.py b/noxfile.py index f2890692ad..cd1ed86bf1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,7 +26,7 @@ import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" +BLACK_VERSION = "black==25.1.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "vertexai", "tests", "noxfile.py", "setup.py"] diff --git a/pypi/_vertex_ai_placeholder/version.py b/pypi/_vertex_ai_placeholder/version.py index e3a53fd965..b9b9d0522b 100644 --- a/pypi/_vertex_ai_placeholder/version.py +++ b/pypi/_vertex_ai_placeholder/version.py @@ -15,4 +15,4 @@ # limitations under the License. # -__version__ = "1.111.0" +__version__ = "1.112.0" diff --git a/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1.json index 1f3ada56e2..bf172171bb 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-aiplatform", - "version": "1.111.0" + "version": "1.112.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1beta1.json b/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1beta1.json index 7700b77e69..864052f112 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.aiplatform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-aiplatform", - "version": "1.111.0" + "version": "1.112.0" }, "snippets": [ { diff --git a/tests/system/aiplatform/e2e_base.py b/tests/system/aiplatform/e2e_base.py index 4674421061..cb8e44152b 100644 --- a/tests/system/aiplatform/e2e_base.py +++ b/tests/system/aiplatform/e2e_base.py @@ -182,11 +182,13 @@ def tear_down_resources(self, shared_state: Dict[str, Any]): # Bring all Endpoints to the front of the list # Ensures Models are undeployed first before we attempt deletion shared_state["resources"].sort( - key=lambda r: 1 - if isinstance(r, aiplatform.Endpoint) - or isinstance(r, aiplatform.MatchingEngineIndexEndpoint) - or isinstance(r, aiplatform.Experiment) - else 2 + key=lambda r: ( + 1 + if isinstance(r, aiplatform.Endpoint) + or isinstance(r, aiplatform.MatchingEngineIndexEndpoint) + or isinstance(r, aiplatform.Experiment) + else 2 + ) ) for resource in shared_state["resources"]: diff --git a/tests/system/aiplatform/test_custom_job.py b/tests/system/aiplatform/test_custom_job.py index f48bd9e29a..6969607b05 100644 --- a/tests/system/aiplatform/test_custom_job.py +++ b/tests/system/aiplatform/test_custom_job.py @@ -40,18 +40,22 @@ @mock.patch.object( constants, "AIPLATFORM_DEPENDENCY_PATH", - "google-cloud-aiplatform @ git+https://github.com/googleapis/" - f"python-aiplatform.git@{os.environ['KOKORO_GIT_COMMIT']}#egg=google-cloud-aiplatform" - if os.environ.get("KOKORO_GIT_COMMIT") - else constants.AIPLATFORM_DEPENDENCY_PATH, + ( + "google-cloud-aiplatform @ git+https://github.com/googleapis/" + f"python-aiplatform.git@{os.environ['KOKORO_GIT_COMMIT']}#egg=google-cloud-aiplatform" + if os.environ.get("KOKORO_GIT_COMMIT") + else constants.AIPLATFORM_DEPENDENCY_PATH + ), ) @mock.patch.object( constants, "AIPLATFORM_AUTOLOG_DEPENDENCY_PATH", - "google-cloud-aiplatform[autologging] @ git+https://github.com/googleapis/" - f"python-aiplatform.git@{os.environ['KOKORO_GIT_COMMIT']}#egg=google-cloud-aiplatform" - if os.environ.get("KOKORO_GIT_COMMIT") - else constants.AIPLATFORM_AUTOLOG_DEPENDENCY_PATH, + ( + "google-cloud-aiplatform[autologging] @ git+https://github.com/googleapis/" + f"python-aiplatform.git@{os.environ['KOKORO_GIT_COMMIT']}#egg=google-cloud-aiplatform" + if os.environ.get("KOKORO_GIT_COMMIT") + else constants.AIPLATFORM_AUTOLOG_DEPENDENCY_PATH + ), ) @pytest.mark.usefixtures( "prepare_staging_bucket", "delete_staging_bucket", "tear_down_resources" diff --git a/tests/system/aiplatform/test_featurestore.py b/tests/system/aiplatform/test_featurestore.py index 56013aef6b..8aa1bd6f2f 100644 --- a/tests/system/aiplatform/test_featurestore.py +++ b/tests/system/aiplatform/test_featurestore.py @@ -163,9 +163,9 @@ def test_create_get_list_features(self, shared_state): value_type="STRING", entity_type_name=user_entity_type_name, ) - shared_state[ - "user_gender_feature_resource_name" - ] = user_gender_feature.resource_name + shared_state["user_gender_feature_resource_name"] = ( + user_gender_feature.resource_name + ) get_user_gender_feature = aiplatform.Feature( feature_name=user_gender_feature.resource_name @@ -178,9 +178,9 @@ def test_create_get_list_features(self, shared_state): feature_id=_TEST_USER_LIKED_GENRES_FEATURE_ID, value_type="STRING_ARRAY", ) - shared_state[ - "user_liked_genres_feature_resource_name" - ] = user_liked_genres_feature.resource_name + shared_state["user_liked_genres_feature_resource_name"] = ( + user_liked_genres_feature.resource_name + ) get_user_liked_genres_feature = aiplatform.Feature( feature_name=user_liked_genres_feature.resource_name diff --git a/tests/unit/aiplatform/conftest.py b/tests/unit/aiplatform/conftest.py index 7976049498..6ce8bbcd54 100644 --- a/tests/unit/aiplatform/conftest.py +++ b/tests/unit/aiplatform/conftest.py @@ -411,11 +411,13 @@ def make_training_pipeline(state, add_training_task_metadata=True): training_task_inputs={ "tensorboard": test_constants.TrainingJobConstants._TEST_TENSORBOARD_RESOURCE_NAME }, - training_task_metadata={ - "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME - } - if add_training_task_metadata - else None, + training_task_metadata=( + { + "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME + } + if add_training_task_metadata + else None + ), ) diff --git a/tests/unit/aiplatform/test_automl_tabular_training_jobs.py b/tests/unit/aiplatform/test_automl_tabular_training_jobs.py index b5eac2d6d2..b3cd3cc609 100644 --- a/tests/unit/aiplatform/test_automl_tabular_training_jobs.py +++ b/tests/unit/aiplatform/test_automl_tabular_training_jobs.py @@ -912,15 +912,15 @@ def test_run_call_pipeline_service_create_with_column_specs_not_auto( dataset=mock_dataset_tabular_alternative, target_column=_TEST_TRAINING_TARGET_COLUMN, ) - column_specs[ - _TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[0] - ] = training_jobs.AutoMLTabularTrainingJob.column_data_types.NUMERIC - column_specs[ - _TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[1] - ] = training_jobs.AutoMLTabularTrainingJob.column_data_types.CATEGORICAL - column_specs[ - _TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[2] - ] = training_jobs.AutoMLTabularTrainingJob.column_data_types.TEXT + column_specs[_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[0]] = ( + training_jobs.AutoMLTabularTrainingJob.column_data_types.NUMERIC + ) + column_specs[_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[1]] = ( + training_jobs.AutoMLTabularTrainingJob.column_data_types.CATEGORICAL + ) + column_specs[_TEST_TRAINING_COLUMN_NAMES_ALTERNATIVE[2]] = ( + training_jobs.AutoMLTabularTrainingJob.column_data_types.TEXT + ) job = training_jobs.AutoMLTabularTrainingJob( display_name=_TEST_DISPLAY_NAME, diff --git a/tests/unit/aiplatform/test_custom_job.py b/tests/unit/aiplatform/test_custom_job.py index 75db082ea3..f51ec9c948 100644 --- a/tests/unit/aiplatform/test_custom_job.py +++ b/tests/unit/aiplatform/test_custom_job.py @@ -570,9 +570,9 @@ def create_custom_job_mock_with_psc_interface_config(): _EXPERIMENT_MOCK = copy.deepcopy(_EXPERIMENT_MOCK) -_EXPERIMENT_MOCK.metadata[ - constants._BACKING_TENSORBOARD_RESOURCE_KEY -] = _TEST_TENSORBOARD_NAME +_EXPERIMENT_MOCK.metadata[constants._BACKING_TENSORBOARD_RESOURCE_KEY] = ( + _TEST_TENSORBOARD_NAME +) _EXPERIMENT_RUN_MOCK = copy.deepcopy(_EXPERIMENT_RUN_MOCK) diff --git a/tests/unit/aiplatform/test_docker_utils.py b/tests/unit/aiplatform/test_docker_utils.py index ac8cfd124b..93de801292 100644 --- a/tests/unit/aiplatform/test_docker_utils.py +++ b/tests/unit/aiplatform/test_docker_utils.py @@ -155,9 +155,9 @@ def test_run_prediction_container_with_all_parameters( environment[prediction.AIP_HEALTH_ROUTE] = serving_container_health_route environment[prediction.AIP_PREDICT_ROUTE] = serving_container_predict_route environment[prediction.AIP_STORAGE_URI] = artifact_uri - environment[ - run._ADC_ENVIRONMENT_VARIABLE - ] = run._DEFAULT_CONTAINER_CRED_KEY_PATH + environment[run._ADC_ENVIRONMENT_VARIABLE] = ( + run._DEFAULT_CONTAINER_CRED_KEY_PATH + ) volumes = [f"{credential_path}:{run._DEFAULT_CONTAINER_CRED_KEY_PATH}"] run.run_prediction_container( @@ -390,9 +390,9 @@ def test_run_prediction_container_artifact_uri_is_local_path_default_workdir( environment[prediction.AIP_HEALTH_ROUTE] = None environment[prediction.AIP_PREDICT_ROUTE] = None environment[prediction.AIP_STORAGE_URI] = utils.DEFAULT_MOUNTED_MODEL_DIRECTORY - environment[ - run._ADC_ENVIRONMENT_VARIABLE - ] = run._DEFAULT_CONTAINER_CRED_KEY_PATH + environment[run._ADC_ENVIRONMENT_VARIABLE] = ( + run._DEFAULT_CONTAINER_CRED_KEY_PATH + ) credential_path = tmp_path / "key.json" credential_path.write_text("") volumes = [ @@ -428,9 +428,9 @@ def test_run_prediction_container_artifact_uri_is_local_path_but_not_exists( environment[prediction.AIP_HEALTH_ROUTE] = None environment[prediction.AIP_PREDICT_ROUTE] = None environment[prediction.AIP_STORAGE_URI] = utils.DEFAULT_WORKDIR - environment[ - run._ADC_ENVIRONMENT_VARIABLE - ] = run._DEFAULT_CONTAINER_CRED_KEY_PATH + environment[run._ADC_ENVIRONMENT_VARIABLE] = ( + run._DEFAULT_CONTAINER_CRED_KEY_PATH + ) credential_path = tmp_path / "key.json" credential_path.write_text("") expected_message = ( diff --git a/tests/unit/aiplatform/test_end_to_end.py b/tests/unit/aiplatform/test_end_to_end.py index 7a5ccef347..85f127a1f6 100644 --- a/tests/unit/aiplatform/test_end_to_end.py +++ b/tests/unit/aiplatform/test_end_to_end.py @@ -95,11 +95,13 @@ def make_training_pipeline(state, add_training_task_metadata=True): training_task_inputs={ "tensorboard": test_constants.TrainingJobConstants._TEST_TENSORBOARD_RESOURCE_NAME }, - training_task_metadata={ - "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME - } - if add_training_task_metadata - else None, + training_task_metadata=( + { + "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME + } + if add_training_task_metadata + else None + ), ) diff --git a/tests/unit/aiplatform/test_endpoints.py b/tests/unit/aiplatform/test_endpoints.py index 372d00cd26..60a8726857 100644 --- a/tests/unit/aiplatform/test_endpoints.py +++ b/tests/unit/aiplatform/test_endpoints.py @@ -269,9 +269,9 @@ _TEST_LABELS = {"my_key": "my_value"} _TEST_REQUEST_RESPONSE_LOGGING_SAMPLING_RATE = 0.1 -_TEST_REQUEST_RESPONSE_LOGGING_BQ_DEST = ( - output_uri -) = f"bq://{_TEST_PROJECT}/test_dataset/test_table" +_TEST_REQUEST_RESPONSE_LOGGING_BQ_DEST = output_uri = ( + f"bq://{_TEST_PROJECT}/test_dataset/test_table" +) _TEST_REQUEST_RESPONSE_LOGGING_CONFIG = ( gca_endpoint.PredictRequestResponseLoggingConfig( enabled=True, diff --git a/tests/unit/aiplatform/test_explain_lit.py b/tests/unit/aiplatform/test_explain_lit.py index 68f8f3e65b..a2d727a120 100644 --- a/tests/unit/aiplatform/test_explain_lit.py +++ b/tests/unit/aiplatform/test_explain_lit.py @@ -19,6 +19,7 @@ import os import pandas as pd import pytest +import sys import tensorflow as tf from google.auth import credentials as auth_credentials @@ -296,6 +297,7 @@ def predict_client_explain_list_mock(): class TestExplainLit: + def setup_method(self): reload(initializer) reload(aiplatform) @@ -336,6 +338,10 @@ def test_create_lit_model_from_tensorflow_returns_model(self, set_up_sequential) assert item.keys() == {"label"} assert len(item.values()) == 1 + @pytest.mark.skipif( + sys.version_info < (3, 11), + reason=("temporarily skipped due to failures in python 3.9 and 3.10"), + ) @mock.patch.dict(os.environ, {"LIT_PROXY_URL": "auto"}) @pytest.mark.usefixtures( "sampled_shapley_explainer_mock", "load_model_from_local_path_mock" @@ -568,7 +574,10 @@ def test_create_lit_model_from_list_endpoint_name_with_xai_returns_model( @pytest.mark.usefixtures("init_lit_widget_mock") def test_open_lit( - self, set_up_sequential, set_up_pandas_dataframe_and_columns, widget_render_mock + self, + set_up_sequential, + set_up_pandas_dataframe_and_columns, + widget_render_mock, ): pd_dataset, lit_columns = set_up_pandas_dataframe_and_columns lit_dataset = create_lit_dataset(pd_dataset, lit_columns) @@ -580,7 +589,10 @@ def test_open_lit( @pytest.mark.usefixtures("init_lit_widget_mock") def test_set_up_and_open_lit( - self, set_up_sequential, set_up_pandas_dataframe_and_columns, widget_render_mock + self, + set_up_sequential, + set_up_pandas_dataframe_and_columns, + widget_render_mock, ): pd_dataset, lit_columns = set_up_pandas_dataframe_and_columns feature_types, label_types, saved_model_path = set_up_sequential @@ -608,13 +620,20 @@ def test_set_up_and_open_lit( widget_render_mock.assert_called_once() + @pytest.mark.skipif( + sys.version_info < (3, 11), + reason=("temporarily skipped due to failures in python 3.9 and 3.10"), + ) @pytest.mark.usefixtures("init_lit_widget_mock") @mock.patch.dict(os.environ, {"LIT_PROXY_URL": "auto"}) @pytest.mark.usefixtures( "sampled_shapley_explainer_mock", "load_model_from_local_path_mock" ) def test_set_up_and_open_lit_with_xai( - self, set_up_sequential, set_up_pandas_dataframe_and_columns, widget_render_mock + self, + set_up_sequential, + set_up_pandas_dataframe_and_columns, + widget_render_mock, ): pd_dataset, lit_columns = set_up_pandas_dataframe_and_columns feature_types, label_types, saved_model_path = set_up_sequential diff --git a/tests/unit/aiplatform/test_matching_engine_index_endpoint.py b/tests/unit/aiplatform/test_matching_engine_index_endpoint.py index 60d845c7bd..61db76ba18 100644 --- a/tests/unit/aiplatform/test_matching_engine_index_endpoint.py +++ b/tests/unit/aiplatform/test_matching_engine_index_endpoint.py @@ -1490,11 +1490,13 @@ def test_private_service_access_hybrid_search_match_queries( i ].sparse_embedding_dimensions, ), - rrf=match_service_pb2.MatchRequest.RRF( - alpha=_TEST_HYBRID_QUERIES[i].rrf_ranking_alpha, - ) - if _TEST_HYBRID_QUERIES[i].rrf_ranking_alpha - else None, + rrf=( + match_service_pb2.MatchRequest.RRF( + alpha=_TEST_HYBRID_QUERIES[i].rrf_ranking_alpha, + ) + if _TEST_HYBRID_QUERIES[i].rrf_ranking_alpha + else None + ), ) for i in range(len(_TEST_HYBRID_QUERIES)) ], diff --git a/tests/unit/aiplatform/test_metadata.py b/tests/unit/aiplatform/test_metadata.py index 9fcdcf5151..0df0aba8f9 100644 --- a/tests/unit/aiplatform/test_metadata.py +++ b/tests/unit/aiplatform/test_metadata.py @@ -2011,9 +2011,9 @@ def test_end_run( aiplatform.end_run() _TRUE_CONTEXT = copy.deepcopy(_EXPERIMENT_RUN_MOCK) - _TRUE_CONTEXT.metadata[ - constants._STATE_KEY - ] = gca_execution.Execution.State.COMPLETE.name + _TRUE_CONTEXT.metadata[constants._STATE_KEY] = ( + gca_execution.Execution.State.COMPLETE.name + ) update_context_mock.assert_called_once_with(context=_TRUE_CONTEXT) diff --git a/tests/unit/aiplatform/test_model_monitoring.py b/tests/unit/aiplatform/test_model_monitoring.py index d74c012a19..c80bc7ed4e 100644 --- a/tests/unit/aiplatform/test_model_monitoring.py +++ b/tests/unit/aiplatform/test_model_monitoring.py @@ -73,11 +73,11 @@ def test_skew_config_proto_value(self, data_source, data_format, skew_thresholds ) else: expected_gapic_proto = gca_model_monitoring.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig( - default_skew_threshold=gca_model_monitoring.ThresholdConfig( - value=skew_thresholds - ) - if skew_thresholds is not None - else None, + default_skew_threshold=( + gca_model_monitoring.ThresholdConfig(value=skew_thresholds) + if skew_thresholds is not None + else None + ), attribution_score_skew_thresholds={ key: gca_model_monitoring.ThresholdConfig(value=val) for key, val in attribute_skew_thresholds.items() diff --git a/tests/unit/aiplatform/test_models.py b/tests/unit/aiplatform/test_models.py index a19ffdf6f6..c8c7142476 100644 --- a/tests/unit/aiplatform/test_models.py +++ b/tests/unit/aiplatform/test_models.py @@ -1605,7 +1605,7 @@ def test_upload_raises_with_impartial_explanation_spec(self): display_name=_TEST_MODEL_NAME, artifact_uri=_TEST_ARTIFACT_URI, serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE, - explanation_metadata=_TEST_EXPLANATION_METADATA + explanation_metadata=_TEST_EXPLANATION_METADATA, # Missing the required explanations_parameters field ) diff --git a/tests/unit/aiplatform/test_uploader.py b/tests/unit/aiplatform/test_uploader.py index c925384fcd..aa46b49f71 100644 --- a/tests/unit/aiplatform/test_uploader.py +++ b/tests/unit/aiplatform/test_uploader.py @@ -31,12 +31,16 @@ from google.api_core import datetime_helpers from google.cloud import storage -from google.cloud.aiplatform.compat.services import tensorboard_service_client +from google.cloud.aiplatform.compat.services import ( + tensorboard_service_client, +) from google.cloud.aiplatform.compat.types import tensorboard_data from google.cloud.aiplatform.compat.types import ( tensorboard_experiment as tensorboard_experiment_type, ) -from google.cloud.aiplatform.compat.types import tensorboard_run as tensorboard_run_type +from google.cloud.aiplatform.compat.types import ( + tensorboard_run as tensorboard_run_type, +) from google.cloud.aiplatform.compat.types import tensorboard_service from google.cloud.aiplatform.compat.types import ( tensorboard_time_series as tensorboard_time_series_type, @@ -49,7 +53,9 @@ from google.cloud.aiplatform.tensorboard import uploader as uploader_lib from google.cloud.aiplatform.tensorboard import uploader_constants from google.cloud.aiplatform.tensorboard import uploader_utils -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) from google.cloud.aiplatform_v1.services.tensorboard_service.transports import ( grpc as transports_grpc, ) @@ -1359,7 +1365,7 @@ def test_filter_graphs( bytes_2 = b"\x0a\x7fbogus" logdir = self.get_temp_dir() - for (i, b) in enumerate([bytes_0, bytes_1, bytes_2]): + for i, b in enumerate([bytes_0, bytes_1, bytes_2]): run_dir = os.path.join(logdir, "run_%04d" % i) event = event_pb2.Event(step=0, wall_time=123 * i, graph_def=b) with FileWriter(run_dir) as writer: diff --git a/tests/unit/gapic/aiplatform_v1/test_data_foundry_service.py b/tests/unit/gapic/aiplatform_v1/test_data_foundry_service.py index b35c74ef85..9a109f767e 100644 --- a/tests/unit/gapic/aiplatform_v1/test_data_foundry_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_data_foundry_service.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1.services.data_foundry_service import ( DataFoundryServiceClient, ) -from google.cloud.aiplatform_v1.services.data_foundry_service import transports +from google.cloud.aiplatform_v1.services.data_foundry_service import ( + transports, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import data_foundry_service from google.cloud.aiplatform_v1.types import tool @@ -1812,9 +1814,9 @@ def test_generate_synthetic_data_rest_call_success(request_type): def test_generate_synthetic_data_rest_interceptors(null_interceptor): transport = transports.DataFoundryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DataFoundryServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DataFoundryServiceRestInterceptor() + ), ) client = DataFoundryServiceClient(transport=transport) @@ -2625,9 +2627,11 @@ async def test_generate_synthetic_data_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncDataFoundryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDataFoundryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDataFoundryServiceRestInterceptor() + ), ) client = DataFoundryServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_dataset_service.py b/tests/unit/gapic/aiplatform_v1/test_dataset_service.py index b11afe6e4a..d916d3b153 100644 --- a/tests/unit/gapic/aiplatform_v1/test_dataset_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_dataset_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.dataset_service import ( DatasetServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.dataset_service import DatasetServiceClient +from google.cloud.aiplatform_v1.services.dataset_service import ( + DatasetServiceClient, +) from google.cloud.aiplatform_v1.services.dataset_service import pagers from google.cloud.aiplatform_v1.services.dataset_service import transports from google.cloud.aiplatform_v1.types import annotation @@ -79,7 +81,9 @@ from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import dataset_version -from google.cloud.aiplatform_v1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -3849,9 +3853,9 @@ def test_create_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_dataset_version] = ( + mock_rpc + ) request = {} client.create_dataset_version(request) @@ -4217,9 +4221,9 @@ def test_update_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_dataset_version] = ( + mock_rpc + ) request = {} client.update_dataset_version(request) @@ -4579,9 +4583,9 @@ def test_delete_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_dataset_version] = ( + mock_rpc + ) request = {} client.delete_dataset_version(request) @@ -4940,9 +4944,9 @@ def test_get_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_dataset_version] = ( + mock_rpc + ) request = {} client.get_dataset_version(request) @@ -5301,9 +5305,9 @@ def test_list_dataset_versions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_dataset_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_dataset_versions] = ( + mock_rpc + ) request = {} client.list_dataset_versions(request) @@ -6727,9 +6731,9 @@ def test_search_data_items_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.search_data_items - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_data_items] = ( + mock_rpc + ) request = {} client.search_data_items(request) @@ -7190,9 +7194,9 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_saved_queries - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -7731,9 +7735,9 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_saved_query - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -8084,9 +8088,9 @@ def test_get_annotation_spec_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_annotation_spec - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_annotation_spec] = ( + mock_rpc + ) request = {} client.get_annotation_spec(request) @@ -8430,9 +8434,9 @@ def test_list_annotations_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_annotations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_annotations] = ( + mock_rpc + ) request = {} client.list_annotations(request) @@ -10267,9 +10271,9 @@ def test_create_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_dataset_version] = ( + mock_rpc + ) request = {} client.create_dataset_version(request) @@ -10462,9 +10466,9 @@ def test_update_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_dataset_version] = ( + mock_rpc + ) request = {} client.update_dataset_version(request) @@ -10657,9 +10661,9 @@ def test_delete_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_dataset_version] = ( + mock_rpc + ) request = {} client.delete_dataset_version(request) @@ -10840,9 +10844,9 @@ def test_get_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_dataset_version] = ( + mock_rpc + ) request = {} client.get_dataset_version(request) @@ -11027,9 +11031,9 @@ def test_list_dataset_versions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_dataset_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_dataset_versions] = ( + mock_rpc + ) request = {} client.list_dataset_versions(request) @@ -11745,9 +11749,9 @@ def test_search_data_items_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.search_data_items - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_data_items] = ( + mock_rpc + ) request = {} client.search_data_items(request) @@ -11969,9 +11973,9 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_saved_queries - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -12239,9 +12243,9 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_saved_query - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -12422,9 +12426,9 @@ def test_get_annotation_spec_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_annotation_spec - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_annotation_spec] = ( + mock_rpc + ) request = {} client.get_annotation_spec(request) @@ -12606,9 +12610,9 @@ def test_list_annotations_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_annotations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_annotations] = ( + mock_rpc + ) request = {} client.list_annotations(request) @@ -14116,9 +14120,9 @@ def get_message_fields(field): def test_create_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -14261,9 +14265,9 @@ def test_get_dataset_rest_call_success(request_type): def test_get_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -14515,9 +14519,9 @@ def get_message_fields(field): def test_update_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -14642,9 +14646,9 @@ def test_list_datasets_rest_call_success(request_type): def test_list_datasets_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -14768,9 +14772,9 @@ def test_delete_dataset_rest_call_success(request_type): def test_delete_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -14889,9 +14893,9 @@ def test_import_data_rest_call_success(request_type): def test_import_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15010,9 +15014,9 @@ def test_export_data_rest_call_success(request_type): def test_export_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15221,9 +15225,9 @@ def get_message_fields(field): def test_create_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15459,9 +15463,9 @@ def get_message_fields(field): def test_update_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15587,9 +15591,9 @@ def test_delete_dataset_version_rest_call_success(request_type): def test_delete_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15733,9 +15737,9 @@ def test_get_dataset_version_rest_call_success(request_type): def test_get_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15863,9 +15867,9 @@ def test_list_dataset_versions_rest_call_success(request_type): def test_list_dataset_versions_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15994,9 +15998,9 @@ def test_restore_dataset_version_rest_call_success(request_type): def test_restore_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16124,9 +16128,9 @@ def test_list_data_items_rest_call_success(request_type): def test_list_data_items_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16256,9 +16260,9 @@ def test_search_data_items_rest_call_success(request_type): def test_search_data_items_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16388,9 +16392,9 @@ def test_list_saved_queries_rest_call_success(request_type): def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16519,9 +16523,9 @@ def test_delete_saved_query_rest_call_success(request_type): def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16657,9 +16661,9 @@ def test_get_annotation_spec_rest_call_success(request_type): def test_get_annotation_spec_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16791,9 +16795,9 @@ def test_list_annotations_rest_call_success(request_type): def test_list_annotations_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -18098,9 +18102,11 @@ async def test_create_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -18262,9 +18268,11 @@ async def test_get_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -18532,9 +18540,11 @@ async def test_update_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -18676,9 +18686,11 @@ async def test_list_datasets_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -18819,9 +18831,11 @@ async def test_delete_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -18959,9 +18973,11 @@ async def test_import_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19098,9 +19114,11 @@ async def test_export_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19325,9 +19343,11 @@ async def test_create_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19579,9 +19599,11 @@ async def test_update_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19723,9 +19745,11 @@ async def test_delete_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19885,9 +19909,11 @@ async def test_get_dataset_version_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20031,9 +20057,11 @@ async def test_list_dataset_versions_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20178,9 +20206,11 @@ async def test_restore_dataset_version_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20324,9 +20354,11 @@ async def test_list_data_items_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20473,9 +20505,11 @@ async def test_search_data_items_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20622,9 +20656,11 @@ async def test_list_saved_queries_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20769,9 +20805,11 @@ async def test_delete_saved_query_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20923,9 +20961,11 @@ async def test_get_annotation_spec_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21073,9 +21113,11 @@ async def test_list_annotations_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_deployment_resource_pool_service.py b/tests/unit/gapic/aiplatform_v1/test_deployment_resource_pool_service.py index c8297cd456..e3a20d2892 100644 --- a/tests/unit/gapic/aiplatform_v1/test_deployment_resource_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_deployment_resource_pool_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import ( DeploymentResourcePoolServiceClient, ) -from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import pagers +from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import ( + pagers, +) from google.cloud.aiplatform_v1.services.deployment_resource_pool_service import ( transports, ) @@ -3361,9 +3363,9 @@ def test_query_deployed_models_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_deployed_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_deployed_models] = ( + mock_rpc + ) request = {} client.query_deployed_models(request) @@ -4915,9 +4917,9 @@ def test_query_deployed_models_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_deployed_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_deployed_models] = ( + mock_rpc + ) request = {} client.query_deployed_models(request) @@ -5691,9 +5693,11 @@ def test_create_deployment_resource_pool_rest_call_success(request_type): def test_create_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -5837,9 +5841,11 @@ def test_get_deployment_resource_pool_rest_call_success(request_type): def test_get_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -5980,9 +5986,11 @@ def test_list_deployment_resource_pools_rest_call_success(request_type): def test_list_deployment_resource_pools_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6221,9 +6229,11 @@ def get_message_fields(field): def test_update_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6353,9 +6363,11 @@ def test_delete_deployment_resource_pool_rest_call_success(request_type): def test_delete_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6497,9 +6509,11 @@ def test_query_deployed_models_rest_call_success(request_type): def test_query_deployed_models_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -7450,9 +7464,11 @@ async def test_create_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -7612,9 +7628,11 @@ async def test_get_deployment_resource_pool_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -7773,9 +7791,11 @@ async def test_list_deployment_resource_pools_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8032,9 +8052,11 @@ async def test_update_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8182,9 +8204,11 @@ async def test_delete_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8342,9 +8366,11 @@ async def test_query_deployed_models_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py index f33e2e5c45..723e8fd066 100644 --- a/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.endpoint_service import ( EndpointServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.endpoint_service import EndpointServiceClient +from google.cloud.aiplatform_v1.services.endpoint_service import ( + EndpointServiceClient, +) from google.cloud.aiplatform_v1.services.endpoint_service import pagers from google.cloud.aiplatform_v1.services.endpoint_service import transports from google.cloud.aiplatform_v1.types import accelerator_type @@ -4275,9 +4277,9 @@ def test_mutate_deployed_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_model] = ( + mock_rpc + ) request = {} client.mutate_deployed_model(request) @@ -6190,9 +6192,9 @@ def test_mutate_deployed_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_model] = ( + mock_rpc + ) request = {} client.mutate_deployed_model(request) @@ -7230,9 +7232,9 @@ def get_message_fields(field): def test_create_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -7382,9 +7384,9 @@ def test_get_endpoint_rest_call_success(request_type): def test_get_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -7509,9 +7511,9 @@ def test_list_endpoints_rest_call_success(request_type): def test_list_endpoints_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -7884,9 +7886,9 @@ def get_message_fields(field): def test_update_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8009,9 +8011,9 @@ def test_update_endpoint_long_running_rest_call_success(request_type): def test_update_endpoint_long_running_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8133,9 +8135,9 @@ def test_delete_endpoint_rest_call_success(request_type): def test_delete_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8256,9 +8258,9 @@ def test_deploy_model_rest_call_success(request_type): def test_deploy_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8379,9 +8381,9 @@ def test_undeploy_model_rest_call_success(request_type): def test_undeploy_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8502,9 +8504,9 @@ def test_mutate_deployed_model_rest_call_success(request_type): def test_mutate_deployed_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9702,9 +9704,11 @@ async def test_create_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -9871,9 +9875,11 @@ async def test_get_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10015,9 +10021,11 @@ async def test_list_endpoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10407,9 +10415,11 @@ async def test_update_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10549,9 +10559,11 @@ async def test_update_endpoint_long_running_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10691,9 +10703,11 @@ async def test_delete_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10831,9 +10845,11 @@ async def test_deploy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -10971,9 +10987,11 @@ async def test_undeploy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -11111,9 +11129,11 @@ async def test_mutate_deployed_model_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_evaluation_service.py b/tests/unit/gapic/aiplatform_v1/test_evaluation_service.py index 312ce41cab..7ce073681d 100644 --- a/tests/unit/gapic/aiplatform_v1/test_evaluation_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_evaluation_service.py @@ -1255,9 +1255,9 @@ def test_evaluate_instances_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_instances - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_instances] = ( + mock_rpc + ) request = {} client.evaluate_instances(request) @@ -1440,9 +1440,9 @@ def test_evaluate_instances_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_instances - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_instances] = ( + mock_rpc + ) request = {} client.evaluate_instances(request) @@ -1785,9 +1785,9 @@ def test_evaluate_instances_rest_call_success(request_type): def test_evaluate_instances_rest_interceptors(null_interceptor): transport = transports.EvaluationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EvaluationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceClient(transport=transport) @@ -2596,9 +2596,11 @@ async def test_evaluate_instances_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEvaluationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEvaluationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_feature_online_store_admin_service.py b/tests/unit/gapic/aiplatform_v1/test_feature_online_store_admin_service.py index 06ba2e37e5..106937ddb1 100644 --- a/tests/unit/gapic/aiplatform_v1/test_feature_online_store_admin_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_feature_online_store_admin_service.py @@ -83,7 +83,9 @@ from google.cloud.aiplatform_v1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1.types import feature_view from google.cloud.aiplatform_v1.types import feature_view as gca_feature_view from google.cloud.aiplatform_v1.types import feature_view_sync @@ -3419,9 +3421,9 @@ def test_create_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_view] = ( + mock_rpc + ) request = {} client.create_feature_view(request) @@ -3819,9 +3821,9 @@ def test_get_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view] = ( + mock_rpc + ) request = {} client.get_feature_view(request) @@ -4170,9 +4172,9 @@ def test_list_feature_views_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_views - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_views] = ( + mock_rpc + ) request = {} client.list_feature_views(request) @@ -4713,9 +4715,9 @@ def test_update_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_view] = ( + mock_rpc + ) request = {} client.update_feature_view(request) @@ -5093,9 +5095,9 @@ def test_delete_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_view] = ( + mock_rpc + ) request = {} client.delete_feature_view(request) @@ -5442,9 +5444,9 @@ def test_sync_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.sync_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.sync_feature_view] = ( + mock_rpc + ) request = {} client.sync_feature_view(request) @@ -5791,9 +5793,9 @@ def test_get_feature_view_sync_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view_sync - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view_sync] = ( + mock_rpc + ) request = {} client.get_feature_view_sync(request) @@ -7707,9 +7709,9 @@ def test_create_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_view] = ( + mock_rpc + ) request = {} client.create_feature_view(request) @@ -7935,9 +7937,9 @@ def test_get_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view] = ( + mock_rpc + ) request = {} client.get_feature_view(request) @@ -8119,9 +8121,9 @@ def test_list_feature_views_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_views - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_views] = ( + mock_rpc + ) request = {} client.list_feature_views(request) @@ -8394,9 +8396,9 @@ def test_update_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_view] = ( + mock_rpc + ) request = {} client.update_feature_view(request) @@ -8587,9 +8589,9 @@ def test_delete_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_view] = ( + mock_rpc + ) request = {} client.delete_feature_view(request) @@ -8768,9 +8770,9 @@ def test_sync_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.sync_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.sync_feature_view] = ( + mock_rpc + ) request = {} client.sync_feature_view(request) @@ -8960,9 +8962,9 @@ def test_get_feature_view_sync_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view_sync - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view_sync] = ( + mock_rpc + ) request = {} client.get_feature_view_sync(request) @@ -10396,9 +10398,11 @@ def get_message_fields(field): def test_create_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10542,9 +10546,11 @@ def test_get_feature_online_store_rest_call_success(request_type): def test_get_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10683,9 +10689,11 @@ def test_list_feature_online_stores_rest_call_success(request_type): def test_list_feature_online_stores_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10940,9 +10948,11 @@ def get_message_fields(field): def test_update_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11072,9 +11082,11 @@ def test_delete_feature_online_store_rest_call_success(request_type): def test_delete_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11319,9 +11331,11 @@ def get_message_fields(field): def test_create_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11468,9 +11482,11 @@ def test_get_feature_view_rest_call_success(request_type): def test_get_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11603,9 +11619,11 @@ def test_list_feature_views_rest_call_success(request_type): def test_list_feature_views_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11859,9 +11877,11 @@ def get_message_fields(field): def test_update_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11989,9 +12009,11 @@ def test_delete_feature_view_rest_call_success(request_type): def test_delete_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12129,9 +12151,11 @@ def test_sync_feature_view_rest_call_success(request_type): def test_sync_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12274,9 +12298,11 @@ def test_get_feature_view_sync_rest_call_success(request_type): def test_get_feature_view_sync_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12414,9 +12440,11 @@ def test_list_feature_view_syncs_rest_call_success(request_type): def test_list_feature_view_syncs_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -13629,9 +13657,11 @@ async def test_create_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -13791,9 +13821,11 @@ async def test_get_feature_online_store_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -13948,9 +13980,11 @@ async def test_list_feature_online_stores_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14221,9 +14255,11 @@ async def test_update_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14369,9 +14405,11 @@ async def test_delete_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14632,9 +14670,11 @@ async def test_create_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14797,9 +14837,11 @@ async def test_get_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14949,9 +14991,11 @@ async def test_list_feature_views_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15221,9 +15265,11 @@ async def test_update_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15367,9 +15413,11 @@ async def test_delete_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15523,9 +15571,11 @@ async def test_sync_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15684,9 +15734,11 @@ async def test_get_feature_view_sync_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15840,9 +15892,11 @@ async def test_list_feature_view_syncs_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_feature_online_store_service.py b/tests/unit/gapic/aiplatform_v1/test_feature_online_store_service.py index 8f51d37704..ef7b5f8a43 100644 --- a/tests/unit/gapic/aiplatform_v1/test_feature_online_store_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_feature_online_store_service.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1.services.feature_online_store_service import ( FeatureOnlineStoreServiceClient, ) -from google.cloud.aiplatform_v1.services.feature_online_store_service import transports +from google.cloud.aiplatform_v1.services.feature_online_store_service import ( + transports, +) from google.cloud.aiplatform_v1.types import feature_online_store_service from google.cloud.aiplatform_v1.types import featurestore_online_service from google.cloud.aiplatform_v1.types import types @@ -1298,9 +1300,9 @@ def test_fetch_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.fetch_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.fetch_feature_values] = ( + mock_rpc + ) request = {} client.fetch_feature_values(request) @@ -1999,9 +2001,9 @@ def test_fetch_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.fetch_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.fetch_feature_values] = ( + mock_rpc + ) request = {} client.fetch_feature_values(request) @@ -2615,9 +2617,11 @@ def test_fetch_feature_values_rest_call_success(request_type): def test_fetch_feature_values_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceClient(transport=transport) @@ -2753,9 +2757,11 @@ def test_search_nearest_entities_rest_call_success(request_type): def test_search_nearest_entities_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceClient(transport=transport) @@ -3609,9 +3615,11 @@ async def test_fetch_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceAsyncClient(transport=transport) @@ -3765,9 +3773,11 @@ async def test_search_nearest_entities_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncFeatureOnlineStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_feature_registry_service.py b/tests/unit/gapic/aiplatform_v1/test_feature_registry_service.py index aff97564d7..3e098ebd22 100644 --- a/tests/unit/gapic/aiplatform_v1/test_feature_registry_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_feature_registry_service.py @@ -72,12 +72,18 @@ from google.cloud.aiplatform_v1.services.feature_registry_service import ( FeatureRegistryServiceClient, ) -from google.cloud.aiplatform_v1.services.feature_registry_service import pagers -from google.cloud.aiplatform_v1.services.feature_registry_service import transports +from google.cloud.aiplatform_v1.services.feature_registry_service import ( + pagers, +) +from google.cloud.aiplatform_v1.services.feature_registry_service import ( + transports, +) from google.cloud.aiplatform_v1.types import feature from google.cloud.aiplatform_v1.types import feature as gca_feature from google.cloud.aiplatform_v1.types import feature_group -from google.cloud.aiplatform_v1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1.types import feature_monitoring_stats from google.cloud.aiplatform_v1.types import feature_registry_service from google.cloud.aiplatform_v1.types import featurestore_service @@ -1299,9 +1305,9 @@ def test_create_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_group] = ( + mock_rpc + ) request = {} client.create_feature_group(request) @@ -1694,9 +1700,9 @@ def test_get_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_group] = ( + mock_rpc + ) request = {} client.get_feature_group(request) @@ -2046,9 +2052,9 @@ def test_list_feature_groups_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_groups - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_groups] = ( + mock_rpc + ) request = {} client.list_feature_groups(request) @@ -2583,9 +2589,9 @@ def test_update_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_group] = ( + mock_rpc + ) request = {} client.update_feature_group(request) @@ -2963,9 +2969,9 @@ def test_delete_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_group] = ( + mock_rpc + ) request = {} client.delete_feature_group(request) @@ -3670,9 +3676,9 @@ def test_batch_create_features_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -5490,9 +5496,9 @@ def test_create_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_group] = ( + mock_rpc + ) request = {} client.create_feature_group(request) @@ -5706,9 +5712,9 @@ def test_get_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_group] = ( + mock_rpc + ) request = {} client.get_feature_group(request) @@ -5890,9 +5896,9 @@ def test_list_feature_groups_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_groups - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_groups] = ( + mock_rpc + ) request = {} client.list_feature_groups(request) @@ -6159,9 +6165,9 @@ def test_update_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_group] = ( + mock_rpc + ) request = {} client.update_feature_group(request) @@ -6352,9 +6358,9 @@ def test_delete_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_group] = ( + mock_rpc + ) request = {} client.delete_feature_group(request) @@ -6748,9 +6754,9 @@ def test_batch_create_features_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -8550,9 +8556,11 @@ def get_message_fields(field): def test_create_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -8684,9 +8692,11 @@ def test_get_feature_group_rest_call_success(request_type): def test_get_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -8814,9 +8824,11 @@ def test_list_feature_groups_rest_call_success(request_type): def test_list_feature_groups_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9036,9 +9048,11 @@ def get_message_fields(field): def test_update_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9160,9 +9174,11 @@ def test_delete_feature_group_rest_call_success(request_type): def test_delete_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9381,9 +9397,11 @@ def get_message_fields(field): def test_create_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9509,9 +9527,11 @@ def test_batch_create_features_rest_call_success(request_type): def test_batch_create_features_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9655,9 +9675,11 @@ def test_get_feature_rest_call_success(request_type): def test_get_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -9787,9 +9809,11 @@ def test_list_features_rest_call_success(request_type): def test_list_features_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -10015,9 +10039,11 @@ def get_message_fields(field): def test_update_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -10143,9 +10169,11 @@ def test_delete_feature_rest_call_success(request_type): def test_delete_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -11262,9 +11290,11 @@ async def test_create_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -11414,9 +11444,11 @@ async def test_get_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -11560,9 +11592,11 @@ async def test_list_feature_groups_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -11799,9 +11833,11 @@ async def test_update_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -11941,9 +11977,11 @@ async def test_delete_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -12180,9 +12218,11 @@ async def test_create_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -12324,9 +12364,11 @@ async def test_batch_create_features_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -12488,9 +12530,11 @@ async def test_get_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -12636,9 +12680,11 @@ async def test_list_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -12880,9 +12926,11 @@ async def test_update_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -13024,9 +13072,11 @@ async def test_delete_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_featurestore_online_serving_service.py b/tests/unit/gapic/aiplatform_v1/test_featurestore_online_serving_service.py index bb30c5873d..886ded67f5 100644 --- a/tests/unit/gapic/aiplatform_v1/test_featurestore_online_serving_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_featurestore_online_serving_service.py @@ -1326,9 +1326,9 @@ def test_read_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_feature_values] = ( + mock_rpc + ) request = {} client.read_feature_values(request) @@ -2011,9 +2011,9 @@ def test_write_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.write_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.write_feature_values] = ( + mock_rpc + ) request = {} client.write_feature_values(request) @@ -2316,9 +2316,9 @@ def test_read_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_feature_values] = ( + mock_rpc + ) request = {} client.read_feature_values(request) @@ -2729,9 +2729,9 @@ def test_write_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.write_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.write_feature_values] = ( + mock_rpc + ) request = {} client.write_feature_values(request) @@ -3265,9 +3265,11 @@ def test_read_feature_values_rest_call_success(request_type): def test_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -3407,9 +3409,11 @@ def test_streaming_read_feature_values_rest_call_success(request_type): def test_streaming_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -3545,9 +3549,11 @@ def test_write_feature_values_rest_call_success(request_type): def test_write_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -4408,9 +4414,11 @@ async def test_read_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) @@ -4566,9 +4574,11 @@ async def test_streaming_read_feature_values_rest_asyncio_interceptors( ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) @@ -4720,9 +4730,11 @@ async def test_write_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_featurestore_service.py b/tests/unit/gapic/aiplatform_v1/test_featurestore_service.py index 8f98b783bb..e95a10c7a7 100644 --- a/tests/unit/gapic/aiplatform_v1/test_featurestore_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_featurestore_service.py @@ -73,7 +73,9 @@ FeaturestoreServiceClient, ) from google.cloud.aiplatform_v1.services.featurestore_service import pagers -from google.cloud.aiplatform_v1.services.featurestore_service import transports +from google.cloud.aiplatform_v1.services.featurestore_service import ( + transports, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import entity_type from google.cloud.aiplatform_v1.types import entity_type as gca_entity_type @@ -1300,9 +1302,9 @@ def test_create_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_featurestore] = ( + mock_rpc + ) request = {} client.create_featurestore(request) @@ -1673,9 +1675,9 @@ def test_get_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_featurestore] = ( + mock_rpc + ) request = {} client.get_featurestore(request) @@ -2021,9 +2023,9 @@ def test_list_featurestores_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_featurestores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_featurestores] = ( + mock_rpc + ) request = {} client.list_featurestores(request) @@ -2558,9 +2560,9 @@ def test_update_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_featurestore] = ( + mock_rpc + ) request = {} client.update_featurestore(request) @@ -2914,9 +2916,9 @@ def test_delete_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_featurestore] = ( + mock_rpc + ) request = {} client.delete_featurestore(request) @@ -3272,9 +3274,9 @@ def test_create_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entity_type] = ( + mock_rpc + ) request = {} client.create_entity_type(request) @@ -3989,9 +3991,9 @@ def test_list_entity_types_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_entity_types - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entity_types] = ( + mock_rpc + ) request = {} client.list_entity_types(request) @@ -4539,9 +4541,9 @@ def test_update_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entity_type] = ( + mock_rpc + ) request = {} client.update_entity_type(request) @@ -4898,9 +4900,9 @@ def test_delete_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entity_type] = ( + mock_rpc + ) request = {} client.delete_entity_type(request) @@ -5605,9 +5607,9 @@ def test_batch_create_features_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -7511,9 +7513,9 @@ def test_import_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_feature_values] = ( + mock_rpc + ) request = {} client.import_feature_values(request) @@ -8205,9 +8207,9 @@ def test_export_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_feature_values] = ( + mock_rpc + ) request = {} client.export_feature_values(request) @@ -8552,9 +8554,9 @@ def test_delete_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_values] = ( + mock_rpc + ) request = {} client.delete_feature_values(request) @@ -9365,9 +9367,9 @@ def test_create_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_featurestore] = ( + mock_rpc + ) request = {} client.create_featurestore(request) @@ -9573,9 +9575,9 @@ def test_get_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_featurestore] = ( + mock_rpc + ) request = {} client.get_featurestore(request) @@ -9757,9 +9759,9 @@ def test_list_featurestores_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_featurestores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_featurestores] = ( + mock_rpc + ) request = {} client.list_featurestores(request) @@ -10025,9 +10027,9 @@ def test_update_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_featurestore] = ( + mock_rpc + ) request = {} client.update_featurestore(request) @@ -10210,9 +10212,9 @@ def test_delete_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_featurestore] = ( + mock_rpc + ) request = {} client.delete_featurestore(request) @@ -10397,9 +10399,9 @@ def test_create_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entity_type] = ( + mock_rpc + ) request = {} client.create_entity_type(request) @@ -10786,9 +10788,9 @@ def test_list_entity_types_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_entity_types - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entity_types] = ( + mock_rpc + ) request = {} client.list_entity_types(request) @@ -11056,9 +11058,9 @@ def test_update_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entity_type] = ( + mock_rpc + ) request = {} client.update_entity_type(request) @@ -11242,9 +11244,9 @@ def test_delete_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entity_type] = ( + mock_rpc + ) request = {} client.delete_entity_type(request) @@ -11638,9 +11640,9 @@ def test_batch_create_features_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -12642,9 +12644,9 @@ def test_import_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_feature_values] = ( + mock_rpc + ) request = {} client.import_feature_values(request) @@ -13029,9 +13031,9 @@ def test_export_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_feature_values] = ( + mock_rpc + ) request = {} client.export_feature_values(request) @@ -13223,9 +13225,9 @@ def test_delete_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_values] = ( + mock_rpc + ) request = {} client.delete_feature_values(request) @@ -14986,9 +14988,11 @@ def get_message_fields(field): def test_create_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15126,9 +15130,11 @@ def test_get_featurestore_rest_call_success(request_type): def test_get_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15254,9 +15260,11 @@ def test_list_featurestores_rest_call_success(request_type): def test_list_featurestores_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15478,9 +15486,11 @@ def get_message_fields(field): def test_update_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15602,9 +15612,11 @@ def test_delete_featurestore_rest_call_success(request_type): def test_delete_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15818,9 +15830,11 @@ def get_message_fields(field): def test_create_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15962,9 +15976,11 @@ def test_get_entity_type_rest_call_success(request_type): def test_get_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16094,9 +16110,11 @@ def test_list_entity_types_rest_call_success(request_type): def test_list_entity_types_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16333,9 +16351,11 @@ def get_message_fields(field): def test_update_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16459,9 +16479,11 @@ def test_delete_entity_type_rest_call_success(request_type): def test_delete_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16680,9 +16702,11 @@ def get_message_fields(field): def test_create_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16808,9 +16832,11 @@ def test_batch_create_features_rest_call_success(request_type): def test_batch_create_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16954,9 +16980,11 @@ def test_get_feature_rest_call_success(request_type): def test_get_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17085,9 +17113,11 @@ def test_list_features_rest_call_success(request_type): def test_list_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17331,9 +17361,11 @@ def get_message_fields(field): def test_update_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17457,9 +17489,11 @@ def test_delete_feature_rest_call_success(request_type): def test_delete_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17585,9 +17619,11 @@ def test_import_feature_values_rest_call_success(request_type): def test_import_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17713,9 +17749,11 @@ def test_batch_read_feature_values_rest_call_success(request_type): def test_batch_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17841,9 +17879,11 @@ def test_export_feature_values_rest_call_success(request_type): def test_export_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17969,9 +18009,11 @@ def test_delete_feature_values_rest_call_success(request_type): def test_delete_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -18099,9 +18141,11 @@ def test_search_features_rest_call_success(request_type): def test_search_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -19437,9 +19481,11 @@ async def test_create_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -19593,9 +19639,11 @@ async def test_get_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -19737,9 +19785,11 @@ async def test_list_featurestores_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -19977,9 +20027,11 @@ async def test_update_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20117,9 +20169,11 @@ async def test_delete_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20349,9 +20403,11 @@ async def test_create_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20509,9 +20565,11 @@ async def test_get_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20657,9 +20715,11 @@ async def test_list_entity_types_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20912,9 +20972,11 @@ async def test_update_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21054,9 +21116,11 @@ async def test_delete_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21291,9 +21355,11 @@ async def test_create_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21435,9 +21501,11 @@ async def test_batch_create_features_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21597,9 +21665,11 @@ async def test_get_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21745,9 +21815,11 @@ async def test_list_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22007,9 +22079,11 @@ async def test_update_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22149,9 +22223,11 @@ async def test_delete_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22293,9 +22369,11 @@ async def test_import_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22437,9 +22515,11 @@ async def test_batch_read_feature_values_rest_asyncio_interceptors(null_intercep ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22583,9 +22663,11 @@ async def test_export_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22727,9 +22809,11 @@ async def test_delete_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22873,9 +22957,11 @@ async def test_search_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_gen_ai_cache_service.py b/tests/unit/gapic/aiplatform_v1/test_gen_ai_cache_service.py index 35407512d1..c824da68f3 100644 --- a/tests/unit/gapic/aiplatform_v1/test_gen_ai_cache_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_gen_ai_cache_service.py @@ -69,9 +69,13 @@ GenAiCacheServiceClient, ) from google.cloud.aiplatform_v1.services.gen_ai_cache_service import pagers -from google.cloud.aiplatform_v1.services.gen_ai_cache_service import transports +from google.cloud.aiplatform_v1.services.gen_ai_cache_service import ( + transports, +) from google.cloud.aiplatform_v1.types import cached_content -from google.cloud.aiplatform_v1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import gen_ai_cache_service @@ -1275,9 +1279,9 @@ def test_create_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cached_content] = ( + mock_rpc + ) request = {} client.create_cached_content(request) @@ -1647,9 +1651,9 @@ def test_get_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cached_content] = ( + mock_rpc + ) request = {} client.get_cached_content(request) @@ -1994,9 +1998,9 @@ def test_update_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cached_content] = ( + mock_rpc + ) request = {} client.update_cached_content(request) @@ -2360,9 +2364,9 @@ def test_delete_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cached_content] = ( + mock_rpc + ) request = {} client.delete_cached_content(request) @@ -2695,9 +2699,9 @@ def test_list_cached_contents_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_cached_contents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_cached_contents] = ( + mock_rpc + ) request = {} client.list_cached_contents(request) @@ -3172,9 +3176,9 @@ def test_create_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cached_content] = ( + mock_rpc + ) request = {} client.create_cached_content(request) @@ -3369,9 +3373,9 @@ def test_get_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cached_content] = ( + mock_rpc + ) request = {} client.get_cached_content(request) @@ -3554,9 +3558,9 @@ def test_update_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cached_content] = ( + mock_rpc + ) request = {} client.update_cached_content(request) @@ -3753,9 +3757,9 @@ def test_delete_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cached_content] = ( + mock_rpc + ) request = {} client.delete_cached_content(request) @@ -3932,9 +3936,9 @@ def test_list_cached_contents_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_cached_contents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_cached_contents] = ( + mock_rpc + ) request = {} client.list_cached_contents(request) @@ -4862,9 +4866,9 @@ def get_message_fields(field): def test_create_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -4996,9 +5000,9 @@ def test_get_cached_content_rest_call_success(request_type): def test_get_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5367,9 +5371,9 @@ def get_message_fields(field): def test_update_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5491,9 +5495,9 @@ def test_delete_cached_content_rest_call_success(request_type): def test_delete_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5606,9 +5610,9 @@ def test_list_cached_contents_rest_call_success(request_type): def test_list_cached_contents_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -6741,9 +6745,11 @@ async def test_create_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -6891,9 +6897,11 @@ async def test_get_cached_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7278,9 +7286,11 @@ async def test_update_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7418,9 +7428,11 @@ async def test_delete_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7549,9 +7561,11 @@ async def test_list_cached_contents_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_gen_ai_tuning_service.py b/tests/unit/gapic/aiplatform_v1/test_gen_ai_tuning_service.py index fc0f74dad5..b7842ed84b 100644 --- a/tests/unit/gapic/aiplatform_v1/test_gen_ai_tuning_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_gen_ai_tuning_service.py @@ -73,7 +73,9 @@ GenAiTuningServiceClient, ) from google.cloud.aiplatform_v1.services.gen_ai_tuning_service import pagers -from google.cloud.aiplatform_v1.services.gen_ai_tuning_service import transports +from google.cloud.aiplatform_v1.services.gen_ai_tuning_service import ( + transports, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import genai_tuning_service @@ -1288,9 +1290,9 @@ def test_create_tuning_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tuning_job] = ( + mock_rpc + ) request = {} client.create_tuning_job(request) @@ -1993,9 +1995,9 @@ def test_list_tuning_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tuning_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tuning_jobs] = ( + mock_rpc + ) request = {} client.list_tuning_jobs(request) @@ -2514,9 +2516,9 @@ def test_cancel_tuning_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_tuning_job] = ( + mock_rpc + ) request = {} client.cancel_tuning_job(request) @@ -2844,9 +2846,9 @@ def test_rebase_tuned_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.rebase_tuned_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rebase_tuned_model] = ( + mock_rpc + ) request = {} client.rebase_tuned_model(request) @@ -3133,9 +3135,9 @@ def test_create_tuning_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tuning_job] = ( + mock_rpc + ) request = {} client.create_tuning_job(request) @@ -3502,9 +3504,9 @@ def test_list_tuning_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tuning_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tuning_jobs] = ( + mock_rpc + ) request = {} client.list_tuning_jobs(request) @@ -3761,9 +3763,9 @@ def test_cancel_tuning_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_tuning_job] = ( + mock_rpc + ) request = {} client.cancel_tuning_job(request) @@ -3941,9 +3943,9 @@ def test_rebase_tuned_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.rebase_tuned_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rebase_tuned_model] = ( + mock_rpc + ) request = {} client.rebase_tuned_model(request) @@ -4755,9 +4757,9 @@ def get_message_fields(field): def test_create_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -4894,9 +4896,9 @@ def test_get_tuning_job_rest_call_success(request_type): def test_get_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5022,9 +5024,9 @@ def test_list_tuning_jobs_rest_call_success(request_type): def test_list_tuning_jobs_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5149,9 +5151,9 @@ def test_cancel_tuning_job_rest_call_success(request_type): def test_cancel_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5258,9 +5260,9 @@ def test_rebase_tuned_model_rest_call_success(request_type): def test_rebase_tuned_model_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -6363,9 +6365,11 @@ async def test_create_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6518,9 +6522,11 @@ async def test_get_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6662,9 +6668,11 @@ async def test_list_tuning_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6805,9 +6813,11 @@ async def test_cancel_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6930,9 +6940,11 @@ async def test_rebase_tuned_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_index_endpoint_service.py b/tests/unit/gapic/aiplatform_v1/test_index_endpoint_service.py index 1c47b035c7..2ade7ea4c5 100644 --- a/tests/unit/gapic/aiplatform_v1/test_index_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_index_endpoint_service.py @@ -73,11 +73,15 @@ IndexEndpointServiceClient, ) from google.cloud.aiplatform_v1.services.index_endpoint_service import pagers -from google.cloud.aiplatform_v1.services.index_endpoint_service import transports +from google.cloud.aiplatform_v1.services.index_endpoint_service import ( + transports, +) from google.cloud.aiplatform_v1.types import accelerator_type from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import index_endpoint -from google.cloud.aiplatform_v1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1.types import index_endpoint_service from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -1294,9 +1298,9 @@ def test_create_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_index_endpoint] = ( + mock_rpc + ) request = {} client.create_index_endpoint(request) @@ -1671,9 +1675,9 @@ def test_get_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_index_endpoint] = ( + mock_rpc + ) request = {} client.get_index_endpoint(request) @@ -2035,9 +2039,9 @@ def test_list_index_endpoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_index_endpoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_index_endpoints] = ( + mock_rpc + ) request = {} client.list_index_endpoints(request) @@ -2594,9 +2598,9 @@ def test_update_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_index_endpoint] = ( + mock_rpc + ) request = {} client.update_index_endpoint(request) @@ -2962,9 +2966,9 @@ def test_delete_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_index_endpoint] = ( + mock_rpc + ) request = {} client.delete_index_endpoint(request) @@ -3987,9 +3991,9 @@ def test_mutate_deployed_index_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_index - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_index] = ( + mock_rpc + ) request = {} client.mutate_deployed_index(request) @@ -4279,9 +4283,9 @@ def test_create_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_index_endpoint] = ( + mock_rpc + ) request = {} client.create_index_endpoint(request) @@ -4471,9 +4475,9 @@ def test_get_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_index_endpoint] = ( + mock_rpc + ) request = {} client.get_index_endpoint(request) @@ -4655,9 +4659,9 @@ def test_list_index_endpoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_index_endpoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_index_endpoints] = ( + mock_rpc + ) request = {} client.list_index_endpoints(request) @@ -4925,9 +4929,9 @@ def test_update_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_index_endpoint] = ( + mock_rpc + ) request = {} client.update_index_endpoint(request) @@ -5120,9 +5124,9 @@ def test_delete_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_index_endpoint] = ( + mock_rpc + ) request = {} client.delete_index_endpoint(request) @@ -5688,9 +5692,9 @@ def test_mutate_deployed_index_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_index - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_index] = ( + mock_rpc + ) request = {} client.mutate_deployed_index(request) @@ -6629,9 +6633,11 @@ def get_message_fields(field): def test_create_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -6777,9 +6783,11 @@ def test_get_index_endpoint_rest_call_success(request_type): def test_get_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -6909,9 +6917,11 @@ def test_list_index_endpoints_rest_call_success(request_type): def test_list_index_endpoints_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7232,9 +7242,11 @@ def get_message_fields(field): def test_update_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7356,9 +7368,11 @@ def test_delete_index_endpoint_rest_call_success(request_type): def test_delete_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7484,9 +7498,11 @@ def test_deploy_index_rest_call_success(request_type): def test_deploy_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7612,9 +7628,11 @@ def test_undeploy_index_rest_call_success(request_type): def test_undeploy_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7871,9 +7889,11 @@ def get_message_fields(field): def test_mutate_deployed_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -9007,9 +9027,11 @@ async def test_create_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9172,9 +9194,11 @@ async def test_get_index_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9320,9 +9344,11 @@ async def test_list_index_endpoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9659,9 +9685,11 @@ async def test_update_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9800,9 +9828,11 @@ async def test_delete_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9945,9 +9975,11 @@ async def test_deploy_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -10089,9 +10121,11 @@ async def test_undeploy_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -10364,9 +10398,11 @@ async def test_mutate_deployed_index_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_index_service.py b/tests/unit/gapic/aiplatform_v1/test_index_service.py index 56eba5eb15..bd39d3463b 100644 --- a/tests/unit/gapic/aiplatform_v1/test_index_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_index_service.py @@ -66,8 +66,12 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1.services.index_service import IndexServiceAsyncClient -from google.cloud.aiplatform_v1.services.index_service import IndexServiceClient +from google.cloud.aiplatform_v1.services.index_service import ( + IndexServiceAsyncClient, +) +from google.cloud.aiplatform_v1.services.index_service import ( + IndexServiceClient, +) from google.cloud.aiplatform_v1.services.index_service import pagers from google.cloud.aiplatform_v1.services.index_service import transports from google.cloud.aiplatform_v1.types import deployed_index_ref @@ -3065,9 +3069,9 @@ def test_upsert_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.upsert_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.upsert_datapoints] = ( + mock_rpc + ) request = {} client.upsert_datapoints(request) @@ -3312,9 +3316,9 @@ def test_remove_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.remove_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.remove_datapoints] = ( + mock_rpc + ) request = {} client.remove_datapoints(request) @@ -4468,9 +4472,9 @@ def test_upsert_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.upsert_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.upsert_datapoints] = ( + mock_rpc + ) request = {} client.upsert_datapoints(request) @@ -4591,9 +4595,9 @@ def test_remove_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.remove_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.remove_datapoints] = ( + mock_rpc + ) request = {} client.remove_datapoints(request) @@ -5323,9 +5327,9 @@ def get_message_fields(field): def test_create_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -5464,9 +5468,9 @@ def test_get_index_rest_call_success(request_type): def test_get_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -5587,9 +5591,9 @@ def test_list_indexes_rest_call_success(request_type): def test_list_indexes_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -5813,9 +5817,9 @@ def get_message_fields(field): def test_update_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -5934,9 +5938,9 @@ def test_delete_index_rest_call_success(request_type): def test_delete_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6060,9 +6064,9 @@ def test_upsert_datapoints_rest_call_success(request_type): def test_upsert_datapoints_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6189,9 +6193,9 @@ def test_remove_datapoints_rest_call_success(request_type): def test_remove_datapoints_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -7236,9 +7240,9 @@ async def test_create_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -7395,9 +7399,9 @@ async def test_get_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -7536,9 +7540,9 @@ async def test_list_indexes_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -7780,9 +7784,9 @@ async def test_update_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -7919,9 +7923,9 @@ async def test_delete_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8061,9 +8065,9 @@ async def test_upsert_datapoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8207,9 +8211,9 @@ async def test_remove_datapoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_job_service.py b/tests/unit/gapic/aiplatform_v1/test_job_service.py index aef45a8b7b..35bea5ae57 100644 --- a/tests/unit/gapic/aiplatform_v1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_job_service.py @@ -66,7 +66,9 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1.services.job_service import JobServiceAsyncClient +from google.cloud.aiplatform_v1.services.job_service import ( + JobServiceAsyncClient, +) from google.cloud.aiplatform_v1.services.job_service import JobServiceClient from google.cloud.aiplatform_v1.services.job_service import pagers from google.cloud.aiplatform_v1.services.job_service import transports @@ -79,7 +81,9 @@ from google.cloud.aiplatform_v1.types import custom_job from google.cloud.aiplatform_v1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1.types import data_labeling_job -from google.cloud.aiplatform_v1.types import data_labeling_job as gca_data_labeling_job +from google.cloud.aiplatform_v1.types import ( + data_labeling_job as gca_data_labeling_job, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import explanation @@ -1233,9 +1237,9 @@ def test_create_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_custom_job] = ( + mock_rpc + ) request = {} client.create_custom_job(request) @@ -1929,9 +1933,9 @@ def test_list_custom_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_custom_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_custom_jobs] = ( + mock_rpc + ) request = {} client.list_custom_jobs(request) @@ -2449,9 +2453,9 @@ def test_delete_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_custom_job] = ( + mock_rpc + ) request = {} client.delete_custom_job(request) @@ -2792,9 +2796,9 @@ def test_cancel_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_custom_job] = ( + mock_rpc + ) request = {} client.cancel_custom_job(request) @@ -3526,9 +3530,9 @@ def test_get_data_labeling_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_data_labeling_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_labeling_job] = ( + mock_rpc + ) request = {} client.get_data_labeling_job(request) @@ -8960,9 +8964,9 @@ def test_get_nas_trial_detail_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_nas_trial_detail - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_nas_trial_detail] = ( + mock_rpc + ) request = {} client.get_nas_trial_detail(request) @@ -9306,9 +9310,9 @@ def test_list_nas_trial_details_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_nas_trial_details - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_nas_trial_details] = ( + mock_rpc + ) request = {} client.list_nas_trial_details(request) @@ -15141,9 +15145,9 @@ def test_create_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_custom_job] = ( + mock_rpc + ) request = {} client.create_custom_job(request) @@ -15510,9 +15514,9 @@ def test_list_custom_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_custom_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_custom_jobs] = ( + mock_rpc + ) request = {} client.list_custom_jobs(request) @@ -15771,9 +15775,9 @@ def test_delete_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_custom_job] = ( + mock_rpc + ) request = {} client.delete_custom_job(request) @@ -15951,9 +15955,9 @@ def test_cancel_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_custom_job] = ( + mock_rpc + ) request = {} client.cancel_custom_job(request) @@ -16326,9 +16330,9 @@ def test_get_data_labeling_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_data_labeling_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_labeling_job] = ( + mock_rpc + ) request = {} client.get_data_labeling_job(request) @@ -19151,9 +19155,9 @@ def test_get_nas_trial_detail_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_nas_trial_detail - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_nas_trial_detail] = ( + mock_rpc + ) request = {} client.get_nas_trial_detail(request) @@ -19336,9 +19340,9 @@ def test_list_nas_trial_details_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_nas_trial_details - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_nas_trial_details] = ( + mock_rpc + ) request = {} client.list_nas_trial_details(request) @@ -20874,9 +20878,9 @@ def test_search_model_deployment_monitoring_stats_anomalies_rest_required_fields # verify required fields with default values are now present - jsonified_request[ - "modelDeploymentMonitoringJob" - ] = "model_deployment_monitoring_job_value" + jsonified_request["modelDeploymentMonitoringJob"] = ( + "model_deployment_monitoring_job_value" + ) jsonified_request["deployedModelId"] = "deployed_model_id_value" unset_fields = transport_class( @@ -24579,9 +24583,9 @@ def get_message_fields(field): def test_create_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24712,9 +24716,9 @@ def test_get_custom_job_rest_call_success(request_type): def test_get_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24839,9 +24843,9 @@ def test_list_custom_jobs_rest_call_success(request_type): def test_list_custom_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24962,9 +24966,9 @@ def test_delete_custom_job_rest_call_success(request_type): def test_delete_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25085,9 +25089,9 @@ def test_cancel_custom_job_rest_call_success(request_type): def test_cancel_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25334,9 +25338,9 @@ def get_message_fields(field): def test_create_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25487,9 +25491,9 @@ def test_get_data_labeling_job_rest_call_success(request_type): def test_get_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25616,9 +25620,9 @@ def test_list_data_labeling_jobs_rest_call_success(request_type): def test_list_data_labeling_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25747,9 +25751,9 @@ def test_delete_data_labeling_job_rest_call_success(request_type): def test_delete_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25875,9 +25879,9 @@ def test_cancel_data_labeling_job_rest_call_success(request_type): def test_cancel_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26282,9 +26286,9 @@ def get_message_fields(field): def test_create_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26435,9 +26439,9 @@ def test_get_hyperparameter_tuning_job_rest_call_success(request_type): def test_get_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26568,9 +26572,9 @@ def test_list_hyperparameter_tuning_jobs_rest_call_success(request_type): def test_list_hyperparameter_tuning_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26699,9 +26703,9 @@ def test_delete_hyperparameter_tuning_job_rest_call_success(request_type): def test_delete_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26827,9 +26831,9 @@ def test_cancel_hyperparameter_tuning_job_rest_call_success(request_type): def test_cancel_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27160,9 +27164,9 @@ def get_message_fields(field): def test_create_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27295,9 +27299,9 @@ def test_get_nas_job_rest_call_success(request_type): def test_get_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27418,9 +27422,9 @@ def test_list_nas_jobs_rest_call_success(request_type): def test_list_nas_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27537,9 +27541,9 @@ def test_delete_nas_job_rest_call_success(request_type): def test_delete_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27658,9 +27662,9 @@ def test_cancel_nas_job_rest_call_success(request_type): def test_cancel_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27779,9 +27783,9 @@ def test_get_nas_trial_detail_rest_call_success(request_type): def test_get_nas_trial_detail_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27906,9 +27910,9 @@ def test_list_nas_trial_details_rest_call_success(request_type): def test_list_nas_trial_details_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28286,9 +28290,9 @@ def get_message_fields(field): def test_create_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28441,9 +28445,9 @@ def test_get_batch_prediction_job_rest_call_success(request_type): def test_get_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28574,9 +28578,9 @@ def test_list_batch_prediction_jobs_rest_call_success(request_type): def test_list_batch_prediction_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28705,9 +28709,9 @@ def test_delete_batch_prediction_job_rest_call_success(request_type): def test_delete_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28833,9 +28837,9 @@ def test_cancel_batch_prediction_job_rest_call_success(request_type): def test_cancel_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29149,9 +29153,9 @@ def get_message_fields(field): def test_create_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29304,9 +29308,9 @@ def test_search_model_deployment_monitoring_stats_anomalies_rest_interceptors( ): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29474,9 +29478,9 @@ def test_get_model_deployment_monitoring_job_rest_call_success(request_type): def test_get_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29613,9 +29617,9 @@ def test_list_model_deployment_monitoring_jobs_rest_call_success(request_type): def test_list_model_deployment_monitoring_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29924,9 +29928,9 @@ def get_message_fields(field): def test_update_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30054,9 +30058,9 @@ def test_delete_model_deployment_monitoring_job_rest_call_success(request_type): def test_delete_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30184,9 +30188,9 @@ def test_pause_model_deployment_monitoring_job_rest_call_success(request_type): def test_pause_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30298,9 +30302,9 @@ def test_resume_model_deployment_monitoring_job_rest_call_success(request_type): def test_resume_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -32020,9 +32024,9 @@ async def test_create_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32172,9 +32176,9 @@ async def test_get_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32315,9 +32319,9 @@ async def test_list_custom_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32454,9 +32458,9 @@ async def test_delete_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32594,9 +32598,9 @@ async def test_cancel_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32859,9 +32863,9 @@ async def test_create_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33028,9 +33032,9 @@ async def test_get_data_labeling_job_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33174,9 +33178,9 @@ async def test_list_data_labeling_jobs_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33321,9 +33325,9 @@ async def test_delete_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33465,9 +33469,9 @@ async def test_cancel_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33890,9 +33894,9 @@ async def test_create_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34063,9 +34067,9 @@ async def test_get_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34214,9 +34218,9 @@ async def test_list_hyperparameter_tuning_jobs_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34364,9 +34368,9 @@ async def test_delete_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34512,9 +34516,9 @@ async def test_cancel_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34864,9 +34868,9 @@ async def test_create_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35017,9 +35021,9 @@ async def test_get_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35158,9 +35162,9 @@ async def test_list_nas_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35295,9 +35299,9 @@ async def test_delete_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35434,9 +35438,9 @@ async def test_cancel_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35571,9 +35575,9 @@ async def test_get_nas_trial_detail_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35715,9 +35719,9 @@ async def test_list_nas_trial_details_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36111,9 +36115,9 @@ async def test_create_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36282,9 +36286,9 @@ async def test_get_batch_prediction_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36431,9 +36435,9 @@ async def test_list_batch_prediction_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36578,9 +36582,9 @@ async def test_delete_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36722,9 +36726,9 @@ async def test_cancel_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37058,9 +37062,9 @@ async def test_create_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37231,9 +37235,9 @@ async def test_search_model_deployment_monitoring_stats_anomalies_rest_asyncio_i ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37421,9 +37425,9 @@ async def test_get_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37582,9 +37586,9 @@ async def test_list_model_deployment_monitoring_jobs_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37913,9 +37917,9 @@ async def test_update_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38063,9 +38067,9 @@ async def test_delete_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38213,9 +38217,9 @@ async def test_pause_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38347,9 +38351,9 @@ async def test_resume_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_llm_utility_service.py b/tests/unit/gapic/aiplatform_v1/test_llm_utility_service.py index a27c2f9f61..2a6fb7f7ee 100644 --- a/tests/unit/gapic/aiplatform_v1/test_llm_utility_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_llm_utility_service.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1.services.llm_utility_service import ( LlmUtilityServiceClient, ) -from google.cloud.aiplatform_v1.services.llm_utility_service import transports +from google.cloud.aiplatform_v1.services.llm_utility_service import ( + transports, +) from google.cloud.aiplatform_v1.types import content from google.cloud.aiplatform_v1.types import llm_utility_service from google.cloud.aiplatform_v1.types import openapi @@ -2501,9 +2503,9 @@ def test_count_tokens_rest_call_success(request_type): def test_count_tokens_rest_interceptors(null_interceptor): transport = transports.LlmUtilityServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LlmUtilityServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.LlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceClient(transport=transport) @@ -2630,9 +2632,9 @@ def test_compute_tokens_rest_call_success(request_type): def test_compute_tokens_rest_interceptors(null_interceptor): transport = transports.LlmUtilityServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LlmUtilityServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.LlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceClient(transport=transport) @@ -3463,9 +3465,11 @@ async def test_count_tokens_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncLlmUtilityServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncLlmUtilityServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncLlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceAsyncClient(transport=transport) @@ -3609,9 +3613,11 @@ async def test_compute_tokens_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncLlmUtilityServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncLlmUtilityServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncLlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_match_service.py b/tests/unit/gapic/aiplatform_v1/test_match_service.py index 082e84b719..46db0440c8 100644 --- a/tests/unit/gapic/aiplatform_v1/test_match_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_match_service.py @@ -62,8 +62,12 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1.services.match_service import MatchServiceAsyncClient -from google.cloud.aiplatform_v1.services.match_service import MatchServiceClient +from google.cloud.aiplatform_v1.services.match_service import ( + MatchServiceAsyncClient, +) +from google.cloud.aiplatform_v1.services.match_service import ( + MatchServiceClient, +) from google.cloud.aiplatform_v1.services.match_service import transports from google.cloud.aiplatform_v1.types import index from google.cloud.aiplatform_v1.types import match_service @@ -1432,9 +1436,9 @@ def test_read_index_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_index_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_index_datapoints] = ( + mock_rpc + ) request = {} client.read_index_datapoints(request) @@ -1739,9 +1743,9 @@ def test_read_index_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_index_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_index_datapoints] = ( + mock_rpc + ) request = {} client.read_index_datapoints(request) @@ -2134,9 +2138,9 @@ def test_find_neighbors_rest_call_success(request_type): def test_find_neighbors_rest_interceptors(null_interceptor): transport = transports.MatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MatchServiceRestInterceptor() + ), ) client = MatchServiceClient(transport=transport) @@ -2267,9 +2271,9 @@ def test_read_index_datapoints_rest_call_success(request_type): def test_read_index_datapoints_rest_interceptors(null_interceptor): transport = transports.MatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MatchServiceRestInterceptor() + ), ) client = MatchServiceClient(transport=transport) @@ -3102,9 +3106,9 @@ async def test_find_neighbors_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMatchServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncMatchServiceRestInterceptor() + ), ) client = MatchServiceAsyncClient(transport=transport) @@ -3251,9 +3255,9 @@ async def test_read_index_datapoints_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMatchServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncMatchServiceRestInterceptor() + ), ) client = MatchServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_metadata_service.py b/tests/unit/gapic/aiplatform_v1/test_metadata_service.py index 9d91e30d2b..28c1d8ebde 100644 --- a/tests/unit/gapic/aiplatform_v1/test_metadata_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_metadata_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.metadata_service import ( MetadataServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.metadata_service import MetadataServiceClient +from google.cloud.aiplatform_v1.services.metadata_service import ( + MetadataServiceClient, +) from google.cloud.aiplatform_v1.services.metadata_service import pagers from google.cloud.aiplatform_v1.services.metadata_service import transports from google.cloud.aiplatform_v1.types import artifact @@ -82,10 +84,14 @@ from google.cloud.aiplatform_v1.types import execution as gca_execution from google.cloud.aiplatform_v1.types import lineage_subgraph from google.cloud.aiplatform_v1.types import metadata_schema -from google.cloud.aiplatform_v1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1.types import metadata_service from google.cloud.aiplatform_v1.types import metadata_store -from google.cloud.aiplatform_v1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1265,9 +1271,9 @@ def test_create_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_store] = ( + mock_rpc + ) request = {} client.create_metadata_store(request) @@ -1636,9 +1642,9 @@ def test_get_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_store] = ( + mock_rpc + ) request = {} client.get_metadata_store(request) @@ -1982,9 +1988,9 @@ def test_list_metadata_stores_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_stores] = ( + mock_rpc + ) request = {} client.list_metadata_stores(request) @@ -2524,9 +2530,9 @@ def test_delete_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_metadata_store] = ( + mock_rpc + ) request = {} client.delete_metadata_store(request) @@ -7727,9 +7733,9 @@ def test_add_context_children_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_context_children - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_context_children] = ( + mock_rpc + ) request = {} client.add_context_children(request) @@ -8768,9 +8774,9 @@ def test_create_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_execution] = ( + mock_rpc + ) request = {} client.create_execution(request) @@ -10000,9 +10006,9 @@ def test_update_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_execution] = ( + mock_rpc + ) request = {} client.update_execution(request) @@ -10347,9 +10353,9 @@ def test_delete_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_execution] = ( + mock_rpc + ) request = {} client.delete_execution(request) @@ -10679,9 +10685,9 @@ def test_purge_executions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.purge_executions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_executions] = ( + mock_rpc + ) request = {} client.purge_executions(request) @@ -11015,9 +11021,9 @@ def test_add_execution_events_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_execution_events - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_execution_events] = ( + mock_rpc + ) request = {} client.add_execution_events(request) @@ -11715,9 +11721,9 @@ def test_create_metadata_schema_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_schema] = ( + mock_rpc + ) request = {} client.create_metadata_schema(request) @@ -12099,9 +12105,9 @@ def test_get_metadata_schema_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_schema] = ( + mock_rpc + ) request = {} client.get_metadata_schema(request) @@ -12457,9 +12463,9 @@ def test_list_metadata_schemas_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_schemas - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_schemas] = ( + mock_rpc + ) request = {} client.list_metadata_schemas(request) @@ -13273,9 +13279,9 @@ def test_create_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_store] = ( + mock_rpc + ) request = {} client.create_metadata_store(request) @@ -13469,9 +13475,9 @@ def test_get_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_store] = ( + mock_rpc + ) request = {} client.get_metadata_store(request) @@ -13653,9 +13659,9 @@ def test_list_metadata_stores_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_stores] = ( + mock_rpc + ) request = {} client.list_metadata_stores(request) @@ -13914,9 +13920,9 @@ def test_delete_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_metadata_store] = ( + mock_rpc + ) request = {} client.delete_metadata_store(request) @@ -16724,9 +16730,9 @@ def test_add_context_children_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_context_children - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_context_children] = ( + mock_rpc + ) request = {} client.add_context_children(request) @@ -17286,9 +17292,9 @@ def test_create_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_execution] = ( + mock_rpc + ) request = {} client.create_execution(request) @@ -17927,9 +17933,9 @@ def test_update_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_execution] = ( + mock_rpc + ) request = {} client.update_execution(request) @@ -18124,9 +18130,9 @@ def test_delete_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_execution] = ( + mock_rpc + ) request = {} client.delete_execution(request) @@ -18307,9 +18313,9 @@ def test_purge_executions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.purge_executions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_executions] = ( + mock_rpc + ) request = {} client.purge_executions(request) @@ -18503,9 +18509,9 @@ def test_add_execution_events_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_execution_events - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_execution_events] = ( + mock_rpc + ) request = {} client.add_execution_events(request) @@ -18880,9 +18886,9 @@ def test_create_metadata_schema_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_schema] = ( + mock_rpc + ) request = {} client.create_metadata_schema(request) @@ -19079,9 +19085,9 @@ def test_get_metadata_schema_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_schema] = ( + mock_rpc + ) request = {} client.get_metadata_schema(request) @@ -19264,9 +19270,9 @@ def test_list_metadata_schemas_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_schemas - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_schemas] = ( + mock_rpc + ) request = {} client.list_metadata_schemas(request) @@ -21605,9 +21611,9 @@ def get_message_fields(field): def test_create_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21737,9 +21743,9 @@ def test_get_metadata_store_rest_call_success(request_type): def test_get_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21867,9 +21873,9 @@ def test_list_metadata_stores_rest_call_success(request_type): def test_list_metadata_stores_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21994,9 +22000,9 @@ def test_delete_metadata_store_rest_call_success(request_type): def test_delete_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22223,9 +22229,9 @@ def get_message_fields(field): def test_create_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22368,9 +22374,9 @@ def test_get_artifact_rest_call_success(request_type): def test_get_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22499,9 +22505,9 @@ def test_list_artifacts_rest_call_success(request_type): def test_list_artifacts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22734,9 +22740,9 @@ def get_message_fields(field): def test_update_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22859,9 +22865,9 @@ def test_delete_artifact_rest_call_success(request_type): def test_delete_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22986,9 +22992,9 @@ def test_purge_artifacts_rest_call_success(request_type): def test_purge_artifacts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23211,9 +23217,9 @@ def get_message_fields(field): def test_create_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23352,9 +23358,9 @@ def test_get_context_rest_call_success(request_type): def test_get_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23483,9 +23489,9 @@ def test_list_contexts_rest_call_success(request_type): def test_list_contexts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23715,9 +23721,9 @@ def get_message_fields(field): def test_update_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23840,9 +23846,9 @@ def test_delete_context_rest_call_success(request_type): def test_delete_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23967,9 +23973,9 @@ def test_purge_contexts_rest_call_success(request_type): def test_purge_contexts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24101,9 +24107,9 @@ def test_add_context_artifacts_and_executions_rest_call_success(request_type): def test_add_context_artifacts_and_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24239,9 +24245,9 @@ def test_add_context_children_rest_call_success(request_type): def test_add_context_children_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24373,9 +24379,9 @@ def test_remove_context_children_rest_call_success(request_type): def test_remove_context_children_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24507,9 +24513,9 @@ def test_query_context_lineage_subgraph_rest_call_success(request_type): def test_query_context_lineage_subgraph_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24733,9 +24739,9 @@ def get_message_fields(field): def test_create_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24876,9 +24882,9 @@ def test_get_execution_rest_call_success(request_type): def test_get_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25007,9 +25013,9 @@ def test_list_executions_rest_call_success(request_type): def test_list_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25239,9 +25245,9 @@ def get_message_fields(field): def test_update_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25364,9 +25370,9 @@ def test_delete_execution_rest_call_success(request_type): def test_delete_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25491,9 +25497,9 @@ def test_purge_executions_rest_call_success(request_type): def test_purge_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25621,9 +25627,9 @@ def test_add_execution_events_rest_call_success(request_type): def test_add_execution_events_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25755,9 +25761,9 @@ def test_query_execution_inputs_and_outputs_rest_call_success(request_type): def test_query_execution_inputs_and_outputs_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25979,9 +25985,9 @@ def get_message_fields(field): def test_create_metadata_schema_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26124,9 +26130,9 @@ def test_get_metadata_schema_rest_call_success(request_type): def test_get_metadata_schema_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26258,9 +26264,9 @@ def test_list_metadata_schemas_rest_call_success(request_type): def test_list_metadata_schemas_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26392,9 +26398,9 @@ def test_query_artifact_lineage_subgraph_rest_call_success(request_type): def test_query_artifact_lineage_subgraph_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -27939,9 +27945,11 @@ async def test_create_metadata_store_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28087,9 +28095,11 @@ async def test_get_metadata_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28233,9 +28243,11 @@ async def test_list_metadata_stores_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28376,9 +28388,11 @@ async def test_delete_metadata_store_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28621,9 +28635,11 @@ async def test_create_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28783,9 +28799,11 @@ async def test_get_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28931,9 +28949,11 @@ async def test_list_artifacts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29183,9 +29203,11 @@ async def test_update_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29325,9 +29347,11 @@ async def test_delete_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29469,9 +29493,11 @@ async def test_purge_artifacts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29711,9 +29737,11 @@ async def test_create_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29871,9 +29899,11 @@ async def test_get_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30018,9 +30048,11 @@ async def test_list_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30267,9 +30299,11 @@ async def test_update_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30409,9 +30443,11 @@ async def test_delete_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30553,9 +30589,11 @@ async def test_purge_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30708,9 +30746,11 @@ async def test_add_context_artifacts_and_executions_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30862,9 +30902,11 @@ async def test_add_context_children_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31012,9 +31054,11 @@ async def test_remove_context_children_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31164,9 +31208,11 @@ async def test_query_context_lineage_subgraph_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31408,9 +31454,11 @@ async def test_create_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31568,9 +31616,11 @@ async def test_get_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31716,9 +31766,11 @@ async def test_list_executions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31965,9 +32017,11 @@ async def test_update_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32107,9 +32161,11 @@ async def test_delete_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32251,9 +32307,11 @@ async def test_purge_executions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32398,9 +32456,11 @@ async def test_add_execution_events_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32552,9 +32612,11 @@ async def test_query_execution_inputs_and_outputs_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32792,9 +32854,11 @@ async def test_create_metadata_schema_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32953,9 +33017,11 @@ async def test_get_metadata_schema_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -33103,9 +33169,11 @@ async def test_list_metadata_schemas_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -33255,9 +33323,11 @@ async def test_query_artifact_lineage_subgraph_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1/test_migration_service.py index b7ffa7bfc4..9061ef4f8b 100644 --- a/tests/unit/gapic/aiplatform_v1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_migration_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.migration_service import ( MigrationServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.migration_service import MigrationServiceClient +from google.cloud.aiplatform_v1.services.migration_service import ( + MigrationServiceClient, +) from google.cloud.aiplatform_v1.services.migration_service import pagers from google.cloud.aiplatform_v1.services.migration_service import transports from google.cloud.aiplatform_v1.types import migratable_resource @@ -2870,9 +2872,9 @@ def test_search_migratable_resources_rest_call_success(request_type): def test_search_migratable_resources_rest_interceptors(null_interceptor): transport = transports.MigrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MigrationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MigrationServiceRestInterceptor() + ), ) client = MigrationServiceClient(transport=transport) @@ -2997,9 +2999,9 @@ def test_batch_migrate_resources_rest_call_success(request_type): def test_batch_migrate_resources_rest_interceptors(null_interceptor): transport = transports.MigrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MigrationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MigrationServiceRestInterceptor() + ), ) client = MigrationServiceClient(transport=transport) @@ -3849,9 +3851,11 @@ async def test_search_migratable_resources_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncMigrationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMigrationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMigrationServiceRestInterceptor() + ), ) client = MigrationServiceAsyncClient(transport=transport) @@ -3994,9 +3998,11 @@ async def test_batch_migrate_resources_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncMigrationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMigrationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMigrationServiceRestInterceptor() + ), ) client = MigrationServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_model_garden_service.py b/tests/unit/gapic/aiplatform_v1/test_model_garden_service.py index 2a62b5d6aa..9f688a62b5 100644 --- a/tests/unit/gapic/aiplatform_v1/test_model_garden_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_model_garden_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1.services.model_garden_service import ( ModelGardenServiceClient, ) -from google.cloud.aiplatform_v1.services.model_garden_service import transports +from google.cloud.aiplatform_v1.services.model_garden_service import ( + transports, +) from google.cloud.aiplatform_v1.types import accelerator_type from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import machine_resources @@ -1297,9 +1299,9 @@ def test_get_publisher_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_publisher_model] = ( + mock_rpc + ) request = {} client.get_publisher_model(request) @@ -1696,9 +1698,9 @@ async def test_deploy_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.deploy - ] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.deploy] = ( + mock_rpc + ) request = {} await client.deploy(request) @@ -1838,9 +1840,9 @@ def test_get_publisher_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_publisher_model] = ( + mock_rpc + ) request = {} client.get_publisher_model(request) @@ -2454,9 +2456,9 @@ def test_get_publisher_model_rest_call_success(request_type): def test_get_publisher_model_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -2576,9 +2578,9 @@ def test_deploy_rest_call_success(request_type): def test_deploy_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -3443,9 +3445,11 @@ async def test_get_publisher_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -3583,9 +3587,11 @@ async def test_deploy_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_model_service.py b/tests/unit/gapic/aiplatform_v1/test_model_service.py index fac757ad6d..e811b181d1 100644 --- a/tests/unit/gapic/aiplatform_v1/test_model_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_model_service.py @@ -66,8 +66,12 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1.services.model_service import ModelServiceAsyncClient -from google.cloud.aiplatform_v1.services.model_service import ModelServiceClient +from google.cloud.aiplatform_v1.services.model_service import ( + ModelServiceAsyncClient, +) +from google.cloud.aiplatform_v1.services.model_service import ( + ModelServiceClient, +) from google.cloud.aiplatform_v1.services.model_service import pagers from google.cloud.aiplatform_v1.services.model_service import transports from google.cloud.aiplatform_v1.types import deployed_model_ref @@ -80,7 +84,9 @@ from google.cloud.aiplatform_v1.types import model from google.cloud.aiplatform_v1.types import model as gca_model from google.cloud.aiplatform_v1.types import model_evaluation -from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation +from google.cloud.aiplatform_v1.types import ( + model_evaluation as gca_model_evaluation, +) from google.cloud.aiplatform_v1.types import model_evaluation_slice from google.cloud.aiplatform_v1.types import model_service from google.cloud.aiplatform_v1.types import operation as gca_operation @@ -2494,9 +2500,9 @@ def test_list_model_versions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_versions] = ( + mock_rpc + ) request = {} client.list_model_versions(request) @@ -4672,9 +4678,9 @@ def test_delete_model_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_version] = ( + mock_rpc + ) request = {} client.delete_model_version(request) @@ -5064,9 +5070,9 @@ def test_merge_version_aliases_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.merge_version_aliases - ] = mock_rpc + client._transport._wrapped_methods[client._transport.merge_version_aliases] = ( + mock_rpc + ) request = {} client.merge_version_aliases(request) @@ -7275,9 +7281,9 @@ def test_get_model_evaluation_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_evaluation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_evaluation] = ( + mock_rpc + ) request = {} client.get_model_evaluation(request) @@ -7632,9 +7638,9 @@ def test_list_model_evaluations_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_evaluations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_evaluations] = ( + mock_rpc + ) request = {} client.list_model_evaluations(request) @@ -9631,9 +9637,9 @@ def test_list_model_versions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_versions] = ( + mock_rpc + ) request = {} client.list_model_versions(request) @@ -10714,9 +10720,9 @@ def test_delete_model_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_version] = ( + mock_rpc + ) request = {} client.delete_model_version(request) @@ -10896,9 +10902,9 @@ def test_merge_version_aliases_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.merge_version_aliases - ] = mock_rpc + client._transport._wrapped_methods[client._transport.merge_version_aliases] = ( + mock_rpc + ) request = {} client.merge_version_aliases(request) @@ -12089,9 +12095,9 @@ def test_get_model_evaluation_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_evaluation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_evaluation] = ( + mock_rpc + ) request = {} client.get_model_evaluation(request) @@ -12274,9 +12280,9 @@ def test_list_model_evaluations_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_evaluations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_evaluations] = ( + mock_rpc + ) request = {} client.list_model_evaluations(request) @@ -14191,9 +14197,9 @@ def test_upload_model_rest_call_success(request_type): def test_upload_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14360,9 +14366,9 @@ def test_get_model_rest_call_success(request_type): def test_get_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14483,9 +14489,9 @@ def test_list_models_rest_call_success(request_type): def test_list_models_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14612,9 +14618,9 @@ def test_list_model_versions_rest_call_success(request_type): def test_list_model_versions_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14746,9 +14752,9 @@ def test_list_model_version_checkpoints_rest_call_success(request_type): def test_list_model_version_checkpoints_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15133,9 +15139,9 @@ def get_message_fields(field): def test_update_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15254,9 +15260,9 @@ def test_update_explanation_dataset_rest_call_success(request_type): def test_update_explanation_dataset_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15376,9 +15382,9 @@ def test_delete_model_rest_call_success(request_type): def test_delete_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15499,9 +15505,9 @@ def test_delete_model_version_rest_call_success(request_type): def test_delete_model_version_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15671,9 +15677,9 @@ def test_merge_version_aliases_rest_call_success(request_type): def test_merge_version_aliases_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15791,9 +15797,9 @@ def test_export_model_rest_call_success(request_type): def test_export_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15912,9 +15918,9 @@ def test_copy_model_rest_call_success(request_type): def test_copy_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16049,9 +16055,9 @@ def test_import_model_evaluation_rest_call_success(request_type): def test_import_model_evaluation_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16190,9 +16196,9 @@ def test_batch_import_model_evaluation_slices_rest_call_success(request_type): def test_batch_import_model_evaluation_slices_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16331,9 +16337,9 @@ def test_batch_import_evaluated_annotations_rest_call_success(request_type): def test_batch_import_evaluated_annotations_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16479,9 +16485,9 @@ def test_get_model_evaluation_rest_call_success(request_type): def test_get_model_evaluation_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16609,9 +16615,9 @@ def test_list_model_evaluations_rest_call_success(request_type): def test_list_model_evaluations_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16748,9 +16754,9 @@ def test_get_model_evaluation_slice_rest_call_success(request_type): def test_get_model_evaluation_slice_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16885,9 +16891,9 @@ def test_list_model_evaluation_slices_rest_call_success(request_type): def test_list_model_evaluation_slices_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -18092,9 +18098,9 @@ async def test_upload_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18279,9 +18285,9 @@ async def test_get_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18420,9 +18426,9 @@ async def test_list_models_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18565,9 +18571,9 @@ async def test_list_model_versions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18718,9 +18724,9 @@ async def test_list_model_version_checkpoints_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19125,9 +19131,9 @@ async def test_update_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19262,9 +19268,9 @@ async def test_update_explanation_dataset_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19402,9 +19408,9 @@ async def test_delete_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19541,9 +19547,9 @@ async def test_delete_model_version_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19729,9 +19735,9 @@ async def test_merge_version_aliases_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19867,9 +19873,9 @@ async def test_export_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20006,9 +20012,9 @@ async def test_copy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20159,9 +20165,9 @@ async def test_import_model_evaluation_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20320,9 +20326,9 @@ async def test_batch_import_model_evaluation_slices_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20481,9 +20487,9 @@ async def test_batch_import_evaluated_annotations_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20646,9 +20652,9 @@ async def test_get_model_evaluation_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20792,9 +20798,9 @@ async def test_list_model_evaluations_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20947,9 +20953,9 @@ async def test_get_model_evaluation_slice_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21100,9 +21106,9 @@ async def test_list_model_evaluation_slices_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_notebook_service.py b/tests/unit/gapic/aiplatform_v1/test_notebook_service.py index c8f43cda7b..93f2dd4480 100644 --- a/tests/unit/gapic/aiplatform_v1/test_notebook_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_notebook_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.notebook_service import ( NotebookServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.notebook_service import NotebookServiceClient +from google.cloud.aiplatform_v1.services.notebook_service import ( + NotebookServiceClient, +) from google.cloud.aiplatform_v1.services.notebook_service import pagers from google.cloud.aiplatform_v1.services.notebook_service import transports from google.cloud.aiplatform_v1.types import accelerator_type @@ -85,7 +87,9 @@ ) from google.cloud.aiplatform_v1.types import notebook_idle_shutdown_config from google.cloud.aiplatform_v1.types import notebook_runtime -from google.cloud.aiplatform_v1.types import notebook_runtime as gca_notebook_runtime +from google.cloud.aiplatform_v1.types import ( + notebook_runtime as gca_notebook_runtime, +) from google.cloud.aiplatform_v1.types import notebook_runtime_template_ref from google.cloud.aiplatform_v1.types import notebook_service from google.cloud.aiplatform_v1.types import notebook_software_config @@ -3732,9 +3736,9 @@ def test_get_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_notebook_runtime] = ( + mock_rpc + ) request = {} client.get_notebook_runtime(request) @@ -4112,9 +4116,9 @@ def test_list_notebook_runtimes_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_notebook_runtimes - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_notebook_runtimes] = ( + mock_rpc + ) request = {} client.list_notebook_runtimes(request) @@ -5348,9 +5352,9 @@ def test_start_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.start_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.start_notebook_runtime] = ( + mock_rpc + ) request = {} client.start_notebook_runtime(request) @@ -5695,9 +5699,9 @@ def test_stop_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stop_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stop_notebook_runtime] = ( + mock_rpc + ) request = {} client.stop_notebook_runtime(request) @@ -8896,9 +8900,9 @@ def test_get_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_notebook_runtime] = ( + mock_rpc + ) request = {} client.get_notebook_runtime(request) @@ -9081,9 +9085,9 @@ def test_list_notebook_runtimes_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_notebook_runtimes - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_notebook_runtimes] = ( + mock_rpc + ) request = {} client.list_notebook_runtimes(request) @@ -9719,9 +9723,9 @@ def test_start_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.start_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.start_notebook_runtime] = ( + mock_rpc + ) request = {} client.start_notebook_runtime(request) @@ -9904,9 +9908,9 @@ def test_stop_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stop_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stop_notebook_runtime] = ( + mock_rpc + ) request = {} client.stop_notebook_runtime(request) @@ -12079,9 +12083,9 @@ def get_message_fields(field): def test_create_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12232,9 +12236,9 @@ def test_get_notebook_runtime_template_rest_call_success(request_type): def test_get_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12367,9 +12371,9 @@ def test_list_notebook_runtime_templates_rest_call_success(request_type): def test_list_notebook_runtime_templates_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12499,9 +12503,9 @@ def test_delete_notebook_runtime_template_rest_call_success(request_type): def test_delete_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12781,9 +12785,9 @@ def get_message_fields(field): def test_update_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12910,9 +12914,9 @@ def test_assign_notebook_runtime_rest_call_success(request_type): def test_assign_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13075,9 +13079,9 @@ def test_get_notebook_runtime_rest_call_success(request_type): def test_get_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13205,9 +13209,9 @@ def test_list_notebook_runtimes_rest_call_success(request_type): def test_list_notebook_runtimes_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13336,9 +13340,9 @@ def test_delete_notebook_runtime_rest_call_success(request_type): def test_delete_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13464,9 +13468,9 @@ def test_upgrade_notebook_runtime_rest_call_success(request_type): def test_upgrade_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13592,9 +13596,9 @@ def test_start_notebook_runtime_rest_call_success(request_type): def test_start_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13720,9 +13724,9 @@ def test_stop_notebook_runtime_rest_call_success(request_type): def test_stop_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13970,9 +13974,9 @@ def get_message_fields(field): def test_create_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14115,9 +14119,9 @@ def test_get_notebook_execution_job_rest_call_success(request_type): def test_get_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14250,9 +14254,9 @@ def test_list_notebook_execution_jobs_rest_call_success(request_type): def test_list_notebook_execution_jobs_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14381,9 +14385,9 @@ def test_delete_notebook_execution_job_rest_call_success(request_type): def test_delete_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -15660,9 +15664,11 @@ async def test_create_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -15831,9 +15837,11 @@ async def test_get_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -15986,9 +15994,11 @@ async def test_list_notebook_runtime_templates_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16137,9 +16147,11 @@ async def test_delete_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16437,9 +16449,11 @@ async def test_update_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16582,9 +16596,11 @@ async def test_assign_notebook_runtime_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16763,9 +16779,11 @@ async def test_get_notebook_runtime_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16909,9 +16927,11 @@ async def test_list_notebook_runtimes_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17056,9 +17076,11 @@ async def test_delete_notebook_runtime_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17200,9 +17222,11 @@ async def test_upgrade_notebook_runtime_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17344,9 +17368,11 @@ async def test_start_notebook_runtime_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17488,9 +17514,11 @@ async def test_stop_notebook_runtime_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17756,9 +17784,11 @@ async def test_create_notebook_execution_job_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17919,9 +17949,11 @@ async def test_get_notebook_execution_job_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -18071,9 +18103,11 @@ async def test_list_notebook_execution_jobs_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -18222,9 +18256,11 @@ async def test_delete_notebook_execution_job_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_persistent_resource_service.py b/tests/unit/gapic/aiplatform_v1/test_persistent_resource_service.py index 3cab74c261..a2f0c19303 100644 --- a/tests/unit/gapic/aiplatform_v1/test_persistent_resource_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_persistent_resource_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1.services.persistent_resource_service import ( PersistentResourceServiceClient, ) -from google.cloud.aiplatform_v1.services.persistent_resource_service import pagers -from google.cloud.aiplatform_v1.services.persistent_resource_service import transports +from google.cloud.aiplatform_v1.services.persistent_resource_service import ( + pagers, +) +from google.cloud.aiplatform_v1.services.persistent_resource_service import ( + transports, +) from google.cloud.aiplatform_v1.types import accelerator_type from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import machine_resources @@ -5461,9 +5465,11 @@ def get_message_fields(field): def test_create_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5605,9 +5611,11 @@ def test_get_persistent_resource_rest_call_success(request_type): def test_get_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5742,9 +5750,11 @@ def test_list_persistent_resources_rest_call_success(request_type): def test_list_persistent_resources_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5879,9 +5889,11 @@ def test_delete_persistent_resource_rest_call_success(request_type): def test_delete_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -6159,9 +6171,11 @@ def get_message_fields(field): def test_update_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -6289,9 +6303,11 @@ def test_reboot_persistent_resource_rest_call_success(request_type): def test_reboot_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -7369,9 +7385,11 @@ async def test_create_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7529,9 +7547,11 @@ async def test_get_persistent_resource_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7682,9 +7702,11 @@ async def test_list_persistent_resources_rest_asyncio_interceptors(null_intercep ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7835,9 +7857,11 @@ async def test_delete_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -8131,9 +8155,11 @@ async def test_update_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -8277,9 +8303,11 @@ async def test_reboot_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_pipeline_service.py b/tests/unit/gapic/aiplatform_v1/test_pipeline_service.py index 718f9a62d2..08ca5e6617 100644 --- a/tests/unit/gapic/aiplatform_v1/test_pipeline_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_pipeline_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.pipeline_service import ( PipelineServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.pipeline_service import PipelineServiceClient +from google.cloud.aiplatform_v1.services.pipeline_service import ( + PipelineServiceClient, +) from google.cloud.aiplatform_v1.services.pipeline_service import pagers from google.cloud.aiplatform_v1.services.pipeline_service import transports from google.cloud.aiplatform_v1.types import artifact @@ -90,7 +92,9 @@ from google.cloud.aiplatform_v1.types import pipeline_state from google.cloud.aiplatform_v1.types import service_networking from google.cloud.aiplatform_v1.types import training_pipeline -from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline +from google.cloud.aiplatform_v1.types import ( + training_pipeline as gca_training_pipeline, +) from google.cloud.aiplatform_v1.types import value from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1657,9 +1661,9 @@ def test_get_training_pipeline_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_training_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_training_pipeline] = ( + mock_rpc + ) request = {} client.get_training_pipeline(request) @@ -3254,9 +3258,9 @@ def test_create_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_pipeline_job] = ( + mock_rpc + ) request = {} client.create_pipeline_job(request) @@ -3642,9 +3646,9 @@ def test_get_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_pipeline_job] = ( + mock_rpc + ) request = {} client.get_pipeline_job(request) @@ -3995,9 +3999,9 @@ def test_list_pipeline_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_pipeline_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_pipeline_jobs] = ( + mock_rpc + ) request = {} client.list_pipeline_jobs(request) @@ -4536,9 +4540,9 @@ def test_delete_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_pipeline_job] = ( + mock_rpc + ) request = {} client.delete_pipeline_job(request) @@ -5239,9 +5243,9 @@ def test_cancel_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_pipeline_job] = ( + mock_rpc + ) request = {} client.cancel_pipeline_job(request) @@ -6056,9 +6060,9 @@ def test_get_training_pipeline_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_training_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_training_pipeline] = ( + mock_rpc + ) request = {} client.get_training_pipeline(request) @@ -6872,9 +6876,9 @@ def test_create_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_pipeline_job] = ( + mock_rpc + ) request = {} client.create_pipeline_job(request) @@ -7067,9 +7071,9 @@ def test_get_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_pipeline_job] = ( + mock_rpc + ) request = {} client.get_pipeline_job(request) @@ -7251,9 +7255,9 @@ def test_list_pipeline_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_pipeline_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_pipeline_jobs] = ( + mock_rpc + ) request = {} client.list_pipeline_jobs(request) @@ -7517,9 +7521,9 @@ def test_delete_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_pipeline_job] = ( + mock_rpc + ) request = {} client.delete_pipeline_job(request) @@ -7897,9 +7901,9 @@ def test_cancel_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_pipeline_job] = ( + mock_rpc + ) request = {} client.cancel_pipeline_job(request) @@ -9361,9 +9365,9 @@ def get_message_fields(field): def test_create_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9508,9 +9512,9 @@ def test_get_training_pipeline_rest_call_success(request_type): def test_get_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9638,9 +9642,9 @@ def test_list_training_pipelines_rest_call_success(request_type): def test_list_training_pipelines_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9769,9 +9773,9 @@ def test_delete_training_pipeline_rest_call_success(request_type): def test_delete_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9897,9 +9901,9 @@ def test_cancel_training_pipeline_rest_call_success(request_type): def test_cancel_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10209,9 +10213,9 @@ def get_message_fields(field): def test_create_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10355,9 +10359,9 @@ def test_get_pipeline_job_rest_call_success(request_type): def test_get_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10482,9 +10486,9 @@ def test_list_pipeline_jobs_rest_call_success(request_type): def test_list_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10609,9 +10613,9 @@ def test_delete_pipeline_job_rest_call_success(request_type): def test_delete_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10733,9 +10737,9 @@ def test_batch_delete_pipeline_jobs_rest_call_success(request_type): def test_batch_delete_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10857,9 +10861,9 @@ def test_cancel_pipeline_job_rest_call_success(request_type): def test_cancel_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10966,9 +10970,9 @@ def test_batch_cancel_pipeline_jobs_rest_call_success(request_type): def test_batch_cancel_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -12318,9 +12322,11 @@ async def test_create_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12481,9 +12487,11 @@ async def test_get_training_pipeline_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12627,9 +12635,11 @@ async def test_list_training_pipelines_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12774,9 +12784,11 @@ async def test_delete_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12918,9 +12930,11 @@ async def test_cancel_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13246,9 +13260,11 @@ async def test_create_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13408,9 +13424,11 @@ async def test_get_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13552,9 +13570,11 @@ async def test_list_pipeline_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13695,9 +13715,11 @@ async def test_delete_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13835,9 +13857,11 @@ async def test_batch_delete_pipeline_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13976,9 +14000,11 @@ async def test_cancel_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -14101,9 +14127,11 @@ async def test_batch_cancel_pipeline_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_prediction_service.py b/tests/unit/gapic/aiplatform_v1/test_prediction_service.py index 37611035e1..d39076f195 100644 --- a/tests/unit/gapic/aiplatform_v1/test_prediction_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_prediction_service.py @@ -1892,9 +1892,9 @@ def test_stream_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_raw_predict] = ( + mock_rpc + ) request = {} client.stream_raw_predict(request) @@ -2476,9 +2476,9 @@ def test_direct_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.direct_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.direct_raw_predict] = ( + mock_rpc + ) request = {} client.direct_raw_predict(request) @@ -2701,9 +2701,9 @@ def test_stream_direct_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_direct_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_direct_predict] = ( + mock_rpc + ) request = [{}] client.stream_direct_predict(request) @@ -3016,9 +3016,9 @@ def test_streaming_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.streaming_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.streaming_predict] = ( + mock_rpc + ) request = [{}] client.streaming_predict(request) @@ -3430,9 +3430,9 @@ def test_streaming_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.streaming_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.streaming_raw_predict] = ( + mock_rpc + ) request = [{}] client.streaming_raw_predict(request) @@ -3894,9 +3894,9 @@ def test_generate_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_content] = ( + mock_rpc + ) request = {} client.generate_content(request) @@ -4891,9 +4891,9 @@ def test_stream_raw_predict_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_raw_predict] = ( + mock_rpc + ) request = {} client.stream_raw_predict(request) @@ -5201,9 +5201,9 @@ def test_direct_raw_predict_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.direct_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.direct_raw_predict] = ( + mock_rpc + ) request = {} client.direct_raw_predict(request) @@ -5690,9 +5690,9 @@ def test_generate_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_content] = ( + mock_rpc + ) request = {} client.generate_content(request) @@ -6762,9 +6762,9 @@ def test_predict_rest_call_success(request_type): def test_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -6890,9 +6890,9 @@ def test_raw_predict_rest_call_success(request_type): def test_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7020,9 +7020,9 @@ def test_stream_raw_predict_rest_call_success(request_type): def test_stream_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7145,9 +7145,9 @@ def test_direct_predict_rest_call_success(request_type): def test_direct_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7277,9 +7277,9 @@ def test_direct_raw_predict_rest_call_success(request_type): def test_direct_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7450,9 +7450,9 @@ def test_server_streaming_predict_rest_call_success(request_type): def test_server_streaming_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7594,9 +7594,9 @@ def test_explain_rest_call_success(request_type): def test_explain_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7725,9 +7725,9 @@ def test_generate_content_rest_call_success(request_type): def test_generate_content_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -7864,9 +7864,9 @@ def test_stream_generate_content_rest_call_success(request_type): def test_stream_generate_content_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8850,9 +8850,11 @@ async def test_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -8994,9 +8996,11 @@ async def test_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9139,9 +9143,11 @@ async def test_stream_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9280,9 +9286,11 @@ async def test_direct_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9429,9 +9437,11 @@ async def test_direct_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9631,9 +9641,11 @@ async def test_server_streaming_predict_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9799,9 +9811,11 @@ async def test_explain_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -9946,9 +9960,11 @@ async def test_generate_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -10099,9 +10115,11 @@ async def test_stream_generate_content_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_execution_service.py b/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_execution_service.py index 6951219a20..80cd3804b0 100644 --- a/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_execution_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_execution_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1.services.reasoning_engine_execution_service import ( transports, ) -from google.cloud.aiplatform_v1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -1327,9 +1329,9 @@ def test_query_reasoning_engine_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_reasoning_engine] = ( + mock_rpc + ) request = {} client.query_reasoning_engine(request) @@ -1772,9 +1774,9 @@ def test_query_reasoning_engine_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_reasoning_engine] = ( + mock_rpc + ) request = {} client.query_reasoning_engine(request) @@ -2315,9 +2317,11 @@ def test_query_reasoning_engine_rest_call_success(request_type): def test_query_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceClient(transport=transport) @@ -2461,9 +2465,11 @@ def test_stream_query_reasoning_engine_rest_call_success(request_type): def test_stream_query_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceClient(transport=transport) @@ -3305,9 +3311,11 @@ async def test_query_reasoning_engine_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncReasoningEngineExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceAsyncClient(transport=transport) @@ -3467,9 +3475,11 @@ async def test_stream_query_reasoning_engine_rest_asyncio_interceptors( ) transport = transports.AsyncReasoningEngineExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_service.py b/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_service.py index 637a178aa1..dfb69c2e85 100644 --- a/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_reasoning_engine_service.py @@ -72,13 +72,19 @@ from google.cloud.aiplatform_v1.services.reasoning_engine_service import ( ReasoningEngineServiceClient, ) -from google.cloud.aiplatform_v1.services.reasoning_engine_service import pagers -from google.cloud.aiplatform_v1.services.reasoning_engine_service import transports +from google.cloud.aiplatform_v1.services.reasoning_engine_service import ( + pagers, +) +from google.cloud.aiplatform_v1.services.reasoning_engine_service import ( + transports, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import env_var from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import reasoning_engine -from google.cloud.aiplatform_v1.types import reasoning_engine as gca_reasoning_engine +from google.cloud.aiplatform_v1.types import ( + reasoning_engine as gca_reasoning_engine, +) from google.cloud.aiplatform_v1.types import reasoning_engine_service from google.cloud.aiplatform_v1.types import service_networking from google.cloud.location import locations_pb2 @@ -1662,9 +1668,9 @@ def test_get_reasoning_engine_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_reasoning_engine] = ( + mock_rpc + ) request = {} client.get_reasoning_engine(request) @@ -2015,9 +2021,9 @@ def test_list_reasoning_engines_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_reasoning_engines - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_reasoning_engines] = ( + mock_rpc + ) request = {} client.list_reasoning_engines(request) @@ -3384,9 +3390,9 @@ def test_get_reasoning_engine_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_reasoning_engine] = ( + mock_rpc + ) request = {} client.get_reasoning_engine(request) @@ -3569,9 +3575,9 @@ def test_list_reasoning_engines_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_reasoning_engines - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_reasoning_engines] = ( + mock_rpc + ) request = {} client.list_reasoning_engines(request) @@ -4747,9 +4753,11 @@ def get_message_fields(field): def test_create_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -4887,9 +4895,11 @@ def test_get_reasoning_engine_rest_call_success(request_type): def test_get_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5019,9 +5029,11 @@ def test_list_reasoning_engines_rest_call_success(request_type): def test_list_reasoning_engines_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5269,9 +5281,11 @@ def get_message_fields(field): def test_update_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5397,9 +5411,11 @@ def test_delete_reasoning_engine_rest_call_success(request_type): def test_delete_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -6422,9 +6438,11 @@ async def test_create_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -6580,9 +6598,11 @@ async def test_get_reasoning_engine_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -6730,9 +6750,11 @@ async def test_list_reasoning_engines_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -6998,9 +7020,11 @@ async def test_update_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -7144,9 +7168,11 @@ async def test_delete_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_schedule_service.py b/tests/unit/gapic/aiplatform_v1/test_schedule_service.py index b87c09a511..15586d948d 100644 --- a/tests/unit/gapic/aiplatform_v1/test_schedule_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_schedule_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1.services.schedule_service import ( ScheduleServiceAsyncClient, ) -from google.cloud.aiplatform_v1.services.schedule_service import ScheduleServiceClient +from google.cloud.aiplatform_v1.services.schedule_service import ( + ScheduleServiceClient, +) from google.cloud.aiplatform_v1.services.schedule_service import pagers from google.cloud.aiplatform_v1.services.schedule_service import transports from google.cloud.aiplatform_v1.types import accelerator_type @@ -5903,9 +5905,9 @@ def get_message_fields(field): def test_create_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6024,9 +6026,9 @@ def test_delete_schedule_rest_call_success(request_type): def test_delete_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6168,9 +6170,9 @@ def test_get_schedule_rest_call_success(request_type): def test_get_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6295,9 +6297,9 @@ def test_list_schedules_rest_call_success(request_type): def test_list_schedules_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6421,9 +6423,9 @@ def test_pause_schedule_rest_call_success(request_type): def test_pause_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6530,9 +6532,9 @@ def test_resume_schedule_rest_call_success(request_type): def test_resume_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6930,9 +6932,9 @@ def get_message_fields(field): def test_update_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -8154,9 +8156,11 @@ async def test_create_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8292,9 +8296,11 @@ async def test_delete_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8453,9 +8459,11 @@ async def test_get_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8597,9 +8605,11 @@ async def test_list_schedules_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8740,9 +8750,11 @@ async def test_pause_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8865,9 +8877,11 @@ async def test_resume_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -9281,9 +9295,11 @@ async def test_update_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_specialist_pool_service.py b/tests/unit/gapic/aiplatform_v1/test_specialist_pool_service.py index 35ecc52bb7..ed02f6e458 100644 --- a/tests/unit/gapic/aiplatform_v1/test_specialist_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_specialist_pool_service.py @@ -72,11 +72,17 @@ from google.cloud.aiplatform_v1.services.specialist_pool_service import ( SpecialistPoolServiceClient, ) -from google.cloud.aiplatform_v1.services.specialist_pool_service import pagers -from google.cloud.aiplatform_v1.services.specialist_pool_service import transports +from google.cloud.aiplatform_v1.services.specialist_pool_service import ( + pagers, +) +from google.cloud.aiplatform_v1.services.specialist_pool_service import ( + transports, +) from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import specialist_pool -from google.cloud.aiplatform_v1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1.types import specialist_pool_service from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1290,9 +1296,9 @@ def test_create_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_specialist_pool] = ( + mock_rpc + ) request = {} client.create_specialist_pool(request) @@ -1659,9 +1665,9 @@ def test_get_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_specialist_pool] = ( + mock_rpc + ) request = {} client.get_specialist_pool(request) @@ -2014,9 +2020,9 @@ def test_list_specialist_pools_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_specialist_pools - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_specialist_pools] = ( + mock_rpc + ) request = {} client.list_specialist_pools(request) @@ -2556,9 +2562,9 @@ def test_delete_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_specialist_pool] = ( + mock_rpc + ) request = {} client.delete_specialist_pool(request) @@ -2899,9 +2905,9 @@ def test_update_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_specialist_pool] = ( + mock_rpc + ) request = {} client.update_specialist_pool(request) @@ -3191,9 +3197,9 @@ def test_create_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_specialist_pool] = ( + mock_rpc + ) request = {} client.create_specialist_pool(request) @@ -3383,9 +3389,9 @@ def test_get_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_specialist_pool] = ( + mock_rpc + ) request = {} client.get_specialist_pool(request) @@ -3568,9 +3574,9 @@ def test_list_specialist_pools_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_specialist_pools - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_specialist_pools] = ( + mock_rpc + ) request = {} client.list_specialist_pools(request) @@ -3836,9 +3842,9 @@ def test_delete_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_specialist_pool] = ( + mock_rpc + ) request = {} client.delete_specialist_pool(request) @@ -4022,9 +4028,9 @@ def test_update_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_specialist_pool] = ( + mock_rpc + ) request = {} client.update_specialist_pool(request) @@ -4727,9 +4733,11 @@ def get_message_fields(field): def test_create_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -4871,9 +4879,11 @@ def test_get_specialist_pool_rest_call_success(request_type): def test_get_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5003,9 +5013,11 @@ def test_list_specialist_pools_rest_call_success(request_type): def test_list_specialist_pools_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5134,9 +5146,11 @@ def test_delete_specialist_pool_rest_call_success(request_type): def test_delete_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5352,9 +5366,11 @@ def get_message_fields(field): def test_update_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -6348,9 +6364,11 @@ async def test_create_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6510,9 +6528,11 @@ async def test_get_specialist_pool_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6658,9 +6678,11 @@ async def test_list_specialist_pools_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6807,9 +6829,11 @@ async def test_delete_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -7043,9 +7067,11 @@ async def test_update_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_tensorboard_service.py b/tests/unit/gapic/aiplatform_v1/test_tensorboard_service.py index 350b011b33..42989535be 100644 --- a/tests/unit/gapic/aiplatform_v1/test_tensorboard_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_tensorboard_service.py @@ -73,7 +73,9 @@ TensorboardServiceClient, ) from google.cloud.aiplatform_v1.services.tensorboard_service import pagers -from google.cloud.aiplatform_v1.services.tensorboard_service import transports +from google.cloud.aiplatform_v1.services.tensorboard_service import ( + transports, +) from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.cloud.aiplatform_v1.types import tensorboard @@ -84,7 +86,9 @@ tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1.types import tensorboard_run -from google.cloud.aiplatform_v1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1.types import tensorboard_service from google.cloud.aiplatform_v1.types import tensorboard_time_series from google.cloud.aiplatform_v1.types import ( @@ -1285,9 +1289,9 @@ def test_create_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard] = ( + mock_rpc + ) request = {} client.create_tensorboard(request) @@ -1993,9 +1997,9 @@ def test_update_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard] = ( + mock_rpc + ) request = {} client.update_tensorboard(request) @@ -2356,9 +2360,9 @@ def test_list_tensorboards_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboards - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboards] = ( + mock_rpc + ) request = {} client.list_tensorboards(request) @@ -2897,9 +2901,9 @@ def test_delete_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard] = ( + mock_rpc + ) request = {} client.delete_tensorboard(request) @@ -3244,9 +3248,9 @@ def test_read_tensorboard_usage_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_usage - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_usage] = ( + mock_rpc + ) request = {} client.read_tensorboard_usage(request) @@ -3584,9 +3588,9 @@ def test_read_tensorboard_size_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_size - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_size] = ( + mock_rpc + ) request = {} client.read_tensorboard_size(request) @@ -5961,9 +5965,9 @@ def test_create_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard_run] = ( + mock_rpc + ) request = {} client.create_tensorboard_run(request) @@ -6694,9 +6698,9 @@ def test_get_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_tensorboard_run] = ( + mock_rpc + ) request = {} client.get_tensorboard_run(request) @@ -7045,9 +7049,9 @@ def test_update_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard_run] = ( + mock_rpc + ) request = {} client.update_tensorboard_run(request) @@ -7410,9 +7414,9 @@ def test_list_tensorboard_runs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboard_runs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboard_runs] = ( + mock_rpc + ) request = {} client.list_tensorboard_runs(request) @@ -7952,9 +7956,9 @@ def test_delete_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard_run] = ( + mock_rpc + ) request = {} client.delete_tensorboard_run(request) @@ -13023,9 +13027,9 @@ def test_create_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard] = ( + mock_rpc + ) request = {} client.create_tensorboard(request) @@ -13395,9 +13399,9 @@ def test_update_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard] = ( + mock_rpc + ) request = {} client.update_tensorboard(request) @@ -13586,9 +13590,9 @@ def test_list_tensorboards_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboards - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboards] = ( + mock_rpc + ) request = {} client.list_tensorboards(request) @@ -13852,9 +13856,9 @@ def test_delete_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard] = ( + mock_rpc + ) request = {} client.delete_tensorboard(request) @@ -14036,9 +14040,9 @@ def test_read_tensorboard_usage_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_usage - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_usage] = ( + mock_rpc + ) request = {} client.read_tensorboard_usage(request) @@ -14223,9 +14227,9 @@ def test_read_tensorboard_size_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_size - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_size] = ( + mock_rpc + ) request = {} client.read_tensorboard_size(request) @@ -15489,9 +15493,9 @@ def test_create_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard_run] = ( + mock_rpc + ) request = {} client.create_tensorboard_run(request) @@ -15908,9 +15912,9 @@ def test_get_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_tensorboard_run] = ( + mock_rpc + ) request = {} client.get_tensorboard_run(request) @@ -16093,9 +16097,9 @@ def test_update_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard_run] = ( + mock_rpc + ) request = {} client.update_tensorboard_run(request) @@ -16288,9 +16292,9 @@ def test_list_tensorboard_runs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboard_runs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboard_runs] = ( + mock_rpc + ) request = {} client.list_tensorboard_runs(request) @@ -16561,9 +16565,9 @@ def test_delete_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard_run] = ( + mock_rpc + ) request = {} client.delete_tensorboard_run(request) @@ -21171,9 +21175,9 @@ def get_message_fields(field): def test_create_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21317,9 +21321,9 @@ def test_get_tensorboard_rest_call_success(request_type): def test_get_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21529,9 +21533,9 @@ def get_message_fields(field): def test_update_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21659,9 +21663,9 @@ def test_list_tensorboards_rest_call_success(request_type): def test_list_tensorboards_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21786,9 +21790,9 @@ def test_delete_tensorboard_rest_call_success(request_type): def test_delete_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21917,9 +21921,9 @@ def test_read_tensorboard_usage_rest_call_success(request_type): def test_read_tensorboard_usage_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22054,9 +22058,9 @@ def test_read_tensorboard_size_rest_call_success(request_type): def test_read_tensorboard_size_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22276,9 +22280,9 @@ def get_message_fields(field): def test_create_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22423,9 +22427,9 @@ def test_get_tensorboard_experiment_rest_call_success(request_type): def test_get_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22653,9 +22657,9 @@ def get_message_fields(field): def test_update_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22790,9 +22794,9 @@ def test_list_tensorboard_experiments_rest_call_success(request_type): def test_list_tensorboard_experiments_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22922,9 +22926,9 @@ def test_delete_tensorboard_experiment_rest_call_success(request_type): def test_delete_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23142,9 +23146,9 @@ def get_message_fields(field): def test_create_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23275,9 +23279,9 @@ def test_batch_create_tensorboard_runs_rest_call_success(request_type): def test_batch_create_tensorboard_runs_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23420,9 +23424,9 @@ def test_get_tensorboard_run_rest_call_success(request_type): def test_get_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23642,9 +23646,9 @@ def get_message_fields(field): def test_update_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23776,9 +23780,9 @@ def test_list_tensorboard_runs_rest_call_success(request_type): def test_list_tensorboard_runs_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23907,9 +23911,9 @@ def test_delete_tensorboard_run_rest_call_success(request_type): def test_delete_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24042,9 +24046,9 @@ def test_batch_create_tensorboard_time_series_rest_call_success(request_type): def test_batch_create_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24289,9 +24293,9 @@ def get_message_fields(field): def test_create_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24443,9 +24447,9 @@ def test_get_tensorboard_time_series_rest_call_success(request_type): def test_get_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24688,9 +24692,9 @@ def get_message_fields(field): def test_update_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24829,9 +24833,9 @@ def test_list_tensorboard_time_series_rest_call_success(request_type): def test_list_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24961,9 +24965,9 @@ def test_delete_tensorboard_time_series_rest_call_success(request_type): def test_delete_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25100,9 +25104,9 @@ def test_batch_read_tensorboard_time_series_data_rest_call_success(request_type) def test_batch_read_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25244,9 +25248,9 @@ def test_read_tensorboard_time_series_data_rest_call_success(request_type): def test_read_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25388,9 +25392,9 @@ def test_read_tensorboard_blob_data_rest_call_success(request_type): def test_read_tensorboard_blob_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25526,9 +25530,9 @@ def test_write_tensorboard_experiment_data_rest_call_success(request_type): def test_write_tensorboard_experiment_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25666,9 +25670,9 @@ def test_write_tensorboard_run_data_rest_call_success(request_type): def test_write_tensorboard_run_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25805,9 +25809,9 @@ def test_export_tensorboard_time_series_data_rest_call_success(request_type): def test_export_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -27354,9 +27358,11 @@ async def test_create_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27516,9 +27522,11 @@ async def test_get_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27744,9 +27752,11 @@ async def test_update_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27890,9 +27900,11 @@ async def test_list_tensorboards_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28033,9 +28045,11 @@ async def test_delete_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28180,9 +28194,11 @@ async def test_read_tensorboard_usage_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28333,9 +28349,11 @@ async def test_read_tensorboard_size_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28573,9 +28591,11 @@ async def test_create_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28736,9 +28756,11 @@ async def test_get_tensorboard_experiment_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28986,9 +29008,11 @@ async def test_update_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29139,9 +29163,11 @@ async def test_list_tensorboard_experiments_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29290,9 +29316,11 @@ async def test_delete_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29526,9 +29554,11 @@ async def test_create_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29677,9 +29707,11 @@ async def test_batch_create_tensorboard_runs_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29838,9 +29870,11 @@ async def test_get_tensorboard_run_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30076,9 +30110,11 @@ async def test_update_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30226,9 +30262,11 @@ async def test_list_tensorboard_runs_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30373,9 +30411,11 @@ async def test_delete_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30528,9 +30568,11 @@ async def test_batch_create_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30793,9 +30835,11 @@ async def test_create_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30963,9 +31007,11 @@ async def test_get_tensorboard_time_series_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31228,9 +31274,11 @@ async def test_update_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31385,9 +31433,11 @@ async def test_list_tensorboard_time_series_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31536,9 +31586,11 @@ async def test_delete_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31695,9 +31747,11 @@ async def test_batch_read_tensorboard_time_series_data_rest_asyncio_interceptors ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31859,9 +31913,11 @@ async def test_read_tensorboard_time_series_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32017,9 +32073,11 @@ async def test_read_tensorboard_blob_data_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32177,9 +32235,11 @@ async def test_write_tensorboard_experiment_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32333,9 +32393,11 @@ async def test_write_tensorboard_run_data_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32494,9 +32556,11 @@ async def test_export_tensorboard_time_series_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_vertex_rag_data_service.py b/tests/unit/gapic/aiplatform_v1/test_vertex_rag_data_service.py index 044c4dc85a..bc6d7b1274 100644 --- a/tests/unit/gapic/aiplatform_v1/test_vertex_rag_data_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_vertex_rag_data_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1.services.vertex_rag_data_service import ( VertexRagDataServiceClient, ) -from google.cloud.aiplatform_v1.services.vertex_rag_data_service import pagers -from google.cloud.aiplatform_v1.services.vertex_rag_data_service import transports +from google.cloud.aiplatform_v1.services.vertex_rag_data_service import ( + pagers, +) +from google.cloud.aiplatform_v1.services.vertex_rag_data_service import ( + transports, +) from google.cloud.aiplatform_v1.types import api_auth from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import io @@ -1288,9 +1292,9 @@ def test_create_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rag_corpus] = ( + mock_rpc + ) request = {} client.create_rag_corpus(request) @@ -1670,9 +1674,9 @@ def test_update_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_rag_corpus] = ( + mock_rpc + ) request = {} client.update_rag_corpus(request) @@ -2379,9 +2383,9 @@ def test_list_rag_corpora_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_rag_corpora - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rag_corpora] = ( + mock_rpc + ) request = {} client.list_rag_corpora(request) @@ -2900,9 +2904,9 @@ def test_delete_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_rag_corpus] = ( + mock_rpc + ) request = {} client.delete_rag_corpus(request) @@ -3634,9 +3638,9 @@ def test_import_rag_files_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_rag_files - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_rag_files] = ( + mock_rpc + ) request = {} client.import_rag_files(request) @@ -5519,9 +5523,9 @@ def test_get_rag_engine_config_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_rag_engine_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rag_engine_config] = ( + mock_rpc + ) request = {} client.get_rag_engine_config(request) @@ -5791,9 +5795,9 @@ def test_create_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rag_corpus] = ( + mock_rpc + ) request = {} client.create_rag_corpus(request) @@ -5992,9 +5996,9 @@ def test_update_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_rag_corpus] = ( + mock_rpc + ) request = {} client.update_rag_corpus(request) @@ -6362,9 +6366,9 @@ def test_list_rag_corpora_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_rag_corpora - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rag_corpora] = ( + mock_rpc + ) request = {} client.list_rag_corpora(request) @@ -6621,9 +6625,9 @@ def test_delete_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_rag_corpus] = ( + mock_rpc + ) request = {} client.delete_rag_corpus(request) @@ -7019,9 +7023,9 @@ def test_import_rag_files_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_rag_files - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_rag_files] = ( + mock_rpc + ) request = {} client.import_rag_files(request) @@ -8019,9 +8023,9 @@ def test_get_rag_engine_config_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_rag_engine_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rag_engine_config] = ( + mock_rpc + ) request = {} client.get_rag_engine_config(request) @@ -9056,9 +9060,11 @@ def get_message_fields(field): def test_create_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9286,9 +9292,11 @@ def get_message_fields(field): def test_update_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9420,9 +9428,11 @@ def test_get_rag_corpus_rest_call_success(request_type): def test_get_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9548,9 +9558,11 @@ def test_list_rag_corpora_rest_call_success(request_type): def test_list_rag_corpora_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9675,9 +9687,11 @@ def test_delete_rag_corpus_rest_call_success(request_type): def test_delete_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9802,9 +9816,11 @@ def test_upload_rag_file_rest_call_success(request_type): def test_upload_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9929,9 +9945,11 @@ def test_import_rag_files_rest_call_success(request_type): def test_import_rag_files_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10067,9 +10085,11 @@ def test_get_rag_file_rest_call_success(request_type): def test_get_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10195,9 +10215,11 @@ def test_list_rag_files_rest_call_success(request_type): def test_list_rag_files_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10326,9 +10348,11 @@ def test_delete_rag_file_rest_call_success(request_type): def test_delete_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10531,9 +10555,11 @@ def get_message_fields(field): def test_update_rag_engine_config_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10661,9 +10687,11 @@ def test_get_rag_engine_config_rest_call_success(request_type): def test_get_rag_engine_config_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -11813,9 +11841,11 @@ async def test_create_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12059,9 +12089,11 @@ async def test_update_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12209,9 +12241,11 @@ async def test_get_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12353,9 +12387,11 @@ async def test_list_rag_corpora_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12496,9 +12532,11 @@ async def test_delete_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12639,9 +12677,11 @@ async def test_upload_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12782,9 +12822,11 @@ async def test_import_rag_files_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12936,9 +12978,11 @@ async def test_get_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13080,9 +13124,11 @@ async def test_list_rag_files_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13227,9 +13273,11 @@ async def test_delete_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13448,9 +13496,11 @@ async def test_update_rag_engine_config_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13596,9 +13646,11 @@ async def test_get_rag_engine_config_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_vertex_rag_service.py b/tests/unit/gapic/aiplatform_v1/test_vertex_rag_service.py index 3ba6425180..df2cd735a8 100644 --- a/tests/unit/gapic/aiplatform_v1/test_vertex_rag_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_vertex_rag_service.py @@ -1251,9 +1251,9 @@ def test_retrieve_contexts_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_contexts - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_contexts] = ( + mock_rpc + ) request = {} client.retrieve_contexts(request) @@ -1958,9 +1958,9 @@ def test_corroborate_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.corroborate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.corroborate_content] = ( + mock_rpc + ) request = {} client.corroborate_content(request) @@ -2250,9 +2250,9 @@ def test_retrieve_contexts_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_contexts - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_contexts] = ( + mock_rpc + ) request = {} client.retrieve_contexts(request) @@ -2629,9 +2629,9 @@ def test_corroborate_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.corroborate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.corroborate_content] = ( + mock_rpc + ) request = {} client.corroborate_content(request) @@ -3136,9 +3136,9 @@ def test_retrieve_contexts_rest_call_success(request_type): def test_retrieve_contexts_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -3266,9 +3266,9 @@ def test_augment_prompt_rest_call_success(request_type): def test_augment_prompt_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -3398,9 +3398,9 @@ def test_corroborate_content_rest_call_success(request_type): def test_corroborate_content_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -4251,9 +4251,11 @@ async def test_retrieve_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) @@ -4397,9 +4399,11 @@ async def test_augment_prompt_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) @@ -4546,9 +4550,11 @@ async def test_corroborate_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1/test_vizier_service.py b/tests/unit/gapic/aiplatform_v1/test_vizier_service.py index ccdab605d4..58175ba72f 100644 --- a/tests/unit/gapic/aiplatform_v1/test_vizier_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_vizier_service.py @@ -66,8 +66,12 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1.services.vizier_service import VizierServiceAsyncClient -from google.cloud.aiplatform_v1.services.vizier_service import VizierServiceClient +from google.cloud.aiplatform_v1.services.vizier_service import ( + VizierServiceAsyncClient, +) +from google.cloud.aiplatform_v1.services.vizier_service import ( + VizierServiceClient, +) from google.cloud.aiplatform_v1.services.vizier_service import pagers from google.cloud.aiplatform_v1.services.vizier_service import transports from google.cloud.aiplatform_v1.types import study @@ -4519,9 +4523,9 @@ def test_add_trial_measurement_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_trial_measurement - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_trial_measurement] = ( + mock_rpc + ) request = {} client.add_trial_measurement(request) @@ -5870,9 +5874,9 @@ def test_list_optimal_trials_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_optimal_trials - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_optimal_trials] = ( + mock_rpc + ) request = {} client.list_optimal_trials(request) @@ -7889,9 +7893,9 @@ def test_add_trial_measurement_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_trial_measurement - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_trial_measurement] = ( + mock_rpc + ) request = {} client.add_trial_measurement(request) @@ -8566,9 +8570,9 @@ def test_list_optimal_trials_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_optimal_trials - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_optimal_trials] = ( + mock_rpc + ) request = {} client.list_optimal_trials(request) @@ -9825,9 +9829,9 @@ def get_message_fields(field): def test_create_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -9956,9 +9960,9 @@ def test_get_study_rest_call_success(request_type): def test_get_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10079,9 +10083,9 @@ def test_list_studies_rest_call_success(request_type): def test_list_studies_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10200,9 +10204,9 @@ def test_delete_study_rest_call_success(request_type): def test_delete_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10319,9 +10323,9 @@ def test_lookup_study_rest_call_success(request_type): def test_lookup_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10440,9 +10444,9 @@ def test_suggest_trials_rest_call_success(request_type): def test_suggest_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10674,9 +10678,9 @@ def get_message_fields(field): def test_create_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10813,9 +10817,9 @@ def test_get_trial_rest_call_success(request_type): def test_get_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10936,9 +10940,9 @@ def test_list_trials_rest_call_success(request_type): def test_list_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11079,9 +11083,9 @@ def test_add_trial_measurement_rest_call_success(request_type): def test_add_trial_measurement_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11221,9 +11225,9 @@ def test_complete_trial_rest_call_success(request_type): def test_complete_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11344,9 +11348,9 @@ def test_delete_trial_rest_call_success(request_type): def test_delete_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11457,9 +11461,9 @@ def test_check_trial_early_stopping_state_rest_call_success(request_type): def test_check_trial_early_stopping_state_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11599,9 +11603,9 @@ def test_stop_trial_rest_call_success(request_type): def test_stop_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11723,9 +11727,9 @@ def test_list_optimal_trials_rest_call_success(request_type): def test_list_optimal_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -12989,9 +12993,9 @@ async def test_create_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13138,9 +13142,9 @@ async def test_get_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13279,9 +13283,9 @@ async def test_list_studies_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13418,9 +13422,9 @@ async def test_delete_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13555,9 +13559,9 @@ async def test_lookup_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13692,9 +13696,9 @@ async def test_suggest_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13945,9 +13949,9 @@ async def test_create_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14102,9 +14106,9 @@ async def test_get_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14243,9 +14247,9 @@ async def test_list_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14402,9 +14406,9 @@ async def test_add_trial_measurement_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14560,9 +14564,9 @@ async def test_complete_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14702,9 +14706,9 @@ async def test_delete_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14833,9 +14837,9 @@ async def test_check_trial_early_stopping_state_rest_asyncio_interceptors( ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14995,9 +14999,9 @@ async def test_stop_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -15135,9 +15139,9 @@ async def test_list_optimal_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py index 00742bab5b..303528e457 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py @@ -73,7 +73,9 @@ DatasetServiceClient, ) from google.cloud.aiplatform_v1beta1.services.dataset_service import pagers -from google.cloud.aiplatform_v1beta1.services.dataset_service import transports +from google.cloud.aiplatform_v1beta1.services.dataset_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import annotation from google.cloud.aiplatform_v1beta1.types import annotation_spec from google.cloud.aiplatform_v1beta1.types import content @@ -82,7 +84,9 @@ from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset from google.cloud.aiplatform_v1beta1.types import dataset_service from google.cloud.aiplatform_v1beta1.types import dataset_version -from google.cloud.aiplatform_v1beta1.types import dataset_version as gca_dataset_version +from google.cloud.aiplatform_v1beta1.types import ( + dataset_version as gca_dataset_version, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import openapi @@ -3856,9 +3860,9 @@ def test_create_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_dataset_version] = ( + mock_rpc + ) request = {} client.create_dataset_version(request) @@ -4224,9 +4228,9 @@ def test_update_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_dataset_version] = ( + mock_rpc + ) request = {} client.update_dataset_version(request) @@ -4586,9 +4590,9 @@ def test_delete_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_dataset_version] = ( + mock_rpc + ) request = {} client.delete_dataset_version(request) @@ -4947,9 +4951,9 @@ def test_get_dataset_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_dataset_version] = ( + mock_rpc + ) request = {} client.get_dataset_version(request) @@ -5308,9 +5312,9 @@ def test_list_dataset_versions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_dataset_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_dataset_versions] = ( + mock_rpc + ) request = {} client.list_dataset_versions(request) @@ -6734,9 +6738,9 @@ def test_search_data_items_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.search_data_items - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_data_items] = ( + mock_rpc + ) request = {} client.search_data_items(request) @@ -7197,9 +7201,9 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_saved_queries - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -7738,9 +7742,9 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_saved_query - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -8091,9 +8095,9 @@ def test_get_annotation_spec_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_annotation_spec - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_annotation_spec] = ( + mock_rpc + ) request = {} client.get_annotation_spec(request) @@ -8437,9 +8441,9 @@ def test_list_annotations_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_annotations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_annotations] = ( + mock_rpc + ) request = {} client.list_annotations(request) @@ -10768,9 +10772,9 @@ def test_create_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_dataset_version] = ( + mock_rpc + ) request = {} client.create_dataset_version(request) @@ -10963,9 +10967,9 @@ def test_update_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_dataset_version] = ( + mock_rpc + ) request = {} client.update_dataset_version(request) @@ -11158,9 +11162,9 @@ def test_delete_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_dataset_version] = ( + mock_rpc + ) request = {} client.delete_dataset_version(request) @@ -11341,9 +11345,9 @@ def test_get_dataset_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_dataset_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_dataset_version] = ( + mock_rpc + ) request = {} client.get_dataset_version(request) @@ -11528,9 +11532,9 @@ def test_list_dataset_versions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_dataset_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_dataset_versions] = ( + mock_rpc + ) request = {} client.list_dataset_versions(request) @@ -12246,9 +12250,9 @@ def test_search_data_items_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.search_data_items - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_data_items] = ( + mock_rpc + ) request = {} client.search_data_items(request) @@ -12470,9 +12474,9 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_saved_queries - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -12740,9 +12744,9 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_saved_query - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -12923,9 +12927,9 @@ def test_get_annotation_spec_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_annotation_spec - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_annotation_spec] = ( + mock_rpc + ) request = {} client.get_annotation_spec(request) @@ -13107,9 +13111,9 @@ def test_list_annotations_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_annotations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_annotations] = ( + mock_rpc + ) request = {} client.list_annotations(request) @@ -14953,9 +14957,9 @@ def get_message_fields(field): def test_create_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15098,9 +15102,9 @@ def test_get_dataset_rest_call_success(request_type): def test_get_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15352,9 +15356,9 @@ def get_message_fields(field): def test_update_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15479,9 +15483,9 @@ def test_list_datasets_rest_call_success(request_type): def test_list_datasets_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15605,9 +15609,9 @@ def test_delete_dataset_rest_call_success(request_type): def test_delete_dataset_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15726,9 +15730,9 @@ def test_import_data_rest_call_success(request_type): def test_import_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -15847,9 +15851,9 @@ def test_export_data_rest_call_success(request_type): def test_export_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16058,9 +16062,9 @@ def get_message_fields(field): def test_create_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16296,9 +16300,9 @@ def get_message_fields(field): def test_update_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16424,9 +16428,9 @@ def test_delete_dataset_version_rest_call_success(request_type): def test_delete_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16570,9 +16574,9 @@ def test_get_dataset_version_rest_call_success(request_type): def test_get_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16700,9 +16704,9 @@ def test_list_dataset_versions_rest_call_success(request_type): def test_list_dataset_versions_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16831,9 +16835,9 @@ def test_restore_dataset_version_rest_call_success(request_type): def test_restore_dataset_version_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -16961,9 +16965,9 @@ def test_list_data_items_rest_call_success(request_type): def test_list_data_items_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17093,9 +17097,9 @@ def test_search_data_items_rest_call_success(request_type): def test_search_data_items_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17225,9 +17229,9 @@ def test_list_saved_queries_rest_call_success(request_type): def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17356,9 +17360,9 @@ def test_delete_saved_query_rest_call_success(request_type): def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17494,9 +17498,9 @@ def test_get_annotation_spec_rest_call_success(request_type): def test_get_annotation_spec_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17628,9 +17632,9 @@ def test_list_annotations_rest_call_success(request_type): def test_list_annotations_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17752,9 +17756,9 @@ def test_assess_data_rest_call_success(request_type): def test_assess_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -17875,9 +17879,9 @@ def test_assemble_data_rest_call_success(request_type): def test_assemble_data_rest_interceptors(null_interceptor): transport = transports.DatasetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DatasetServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.DatasetServiceRestInterceptor() + ), ) client = DatasetServiceClient(transport=transport) @@ -19219,9 +19223,11 @@ async def test_create_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19383,9 +19389,11 @@ async def test_get_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19653,9 +19661,11 @@ async def test_update_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19797,9 +19807,11 @@ async def test_list_datasets_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -19940,9 +19952,11 @@ async def test_delete_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20080,9 +20094,11 @@ async def test_import_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20219,9 +20235,11 @@ async def test_export_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20446,9 +20464,11 @@ async def test_create_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20700,9 +20720,11 @@ async def test_update_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -20844,9 +20866,11 @@ async def test_delete_dataset_version_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21006,9 +21030,11 @@ async def test_get_dataset_version_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21152,9 +21178,11 @@ async def test_list_dataset_versions_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21299,9 +21327,11 @@ async def test_restore_dataset_version_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21445,9 +21475,11 @@ async def test_list_data_items_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21594,9 +21626,11 @@ async def test_search_data_items_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21743,9 +21777,11 @@ async def test_list_saved_queries_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -21890,9 +21926,11 @@ async def test_delete_saved_query_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -22044,9 +22082,11 @@ async def test_get_annotation_spec_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -22194,9 +22234,11 @@ async def test_list_annotations_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -22337,9 +22379,11 @@ async def test_assess_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) @@ -22476,9 +22520,11 @@ async def test_assemble_data_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncDatasetServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDatasetServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDatasetServiceRestInterceptor() + ), ) client = DatasetServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py index 206c9b33f1..ec79aa49ae 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_deployment_resource_pool_service.py @@ -83,7 +83,9 @@ from google.cloud.aiplatform_v1beta1.types import ( deployment_resource_pool as gca_deployment_resource_pool, ) -from google.cloud.aiplatform_v1beta1.types import deployment_resource_pool_service +from google.cloud.aiplatform_v1beta1.types import ( + deployment_resource_pool_service, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import endpoint from google.cloud.aiplatform_v1beta1.types import machine_resources @@ -3364,9 +3366,9 @@ def test_query_deployed_models_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_deployed_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_deployed_models] = ( + mock_rpc + ) request = {} client.query_deployed_models(request) @@ -4918,9 +4920,9 @@ def test_query_deployed_models_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_deployed_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_deployed_models] = ( + mock_rpc + ) request = {} client.query_deployed_models(request) @@ -5694,9 +5696,11 @@ def test_create_deployment_resource_pool_rest_call_success(request_type): def test_create_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -5840,9 +5844,11 @@ def test_get_deployment_resource_pool_rest_call_success(request_type): def test_get_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -5983,9 +5989,11 @@ def test_list_deployment_resource_pools_rest_call_success(request_type): def test_list_deployment_resource_pools_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6231,9 +6239,11 @@ def get_message_fields(field): def test_update_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6363,9 +6373,11 @@ def test_delete_deployment_resource_pool_rest_call_success(request_type): def test_delete_deployment_resource_pool_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -6507,9 +6519,11 @@ def test_query_deployed_models_rest_call_success(request_type): def test_query_deployed_models_rest_interceptors(null_interceptor): transport = transports.DeploymentResourcePoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.DeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.DeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceClient(transport=transport) @@ -7460,9 +7474,11 @@ async def test_create_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -7622,9 +7638,11 @@ async def test_get_deployment_resource_pool_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -7783,9 +7801,11 @@ async def test_list_deployment_resource_pools_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8049,9 +8069,11 @@ async def test_update_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8199,9 +8221,11 @@ async def test_delete_deployment_resource_pool_rest_asyncio_interceptors( ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) @@ -8359,9 +8383,11 @@ async def test_query_deployed_models_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncDeploymentResourcePoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncDeploymentResourcePoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncDeploymentResourcePoolServiceRestInterceptor() + ), ) client = DeploymentResourcePoolServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py index d0c60e9ca3..ab1205dcd1 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py @@ -73,7 +73,9 @@ EndpointServiceClient, ) from google.cloud.aiplatform_v1beta1.services.endpoint_service import pagers -from google.cloud.aiplatform_v1beta1.services.endpoint_service import transports +from google.cloud.aiplatform_v1beta1.services.endpoint_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import endpoint @@ -4275,9 +4277,9 @@ def test_mutate_deployed_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_model] = ( + mock_rpc + ) request = {} client.mutate_deployed_model(request) @@ -6906,9 +6908,9 @@ def test_mutate_deployed_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_model] = ( + mock_rpc + ) request = {} client.mutate_deployed_model(request) @@ -8454,9 +8456,9 @@ def get_message_fields(field): def test_create_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8606,9 +8608,9 @@ def test_get_endpoint_rest_call_success(request_type): def test_get_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -8733,9 +8735,9 @@ def test_list_endpoints_rest_call_success(request_type): def test_list_endpoints_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9128,9 +9130,9 @@ def get_message_fields(field): def test_update_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9253,9 +9255,9 @@ def test_update_endpoint_long_running_rest_call_success(request_type): def test_update_endpoint_long_running_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9377,9 +9379,9 @@ def test_delete_endpoint_rest_call_success(request_type): def test_delete_endpoint_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9500,9 +9502,9 @@ def test_deploy_model_rest_call_success(request_type): def test_deploy_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9623,9 +9625,9 @@ def test_undeploy_model_rest_call_success(request_type): def test_undeploy_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9746,9 +9748,9 @@ def test_mutate_deployed_model_rest_call_success(request_type): def test_mutate_deployed_model_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -9874,9 +9876,9 @@ def test_set_publisher_model_config_rest_call_success(request_type): def test_set_publisher_model_config_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -10005,9 +10007,9 @@ def test_fetch_publisher_model_config_rest_call_success(request_type): def test_fetch_publisher_model_config_rest_interceptors(null_interceptor): transport = transports.EndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EndpointServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EndpointServiceRestInterceptor() + ), ) client = EndpointServiceClient(transport=transport) @@ -11269,9 +11271,11 @@ async def test_create_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -11438,9 +11442,11 @@ async def test_get_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -11582,9 +11588,11 @@ async def test_list_endpoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -11994,9 +12002,11 @@ async def test_update_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12136,9 +12146,11 @@ async def test_update_endpoint_long_running_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12278,9 +12290,11 @@ async def test_delete_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12418,9 +12432,11 @@ async def test_deploy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12558,9 +12574,11 @@ async def test_undeploy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12698,9 +12716,11 @@ async def test_mutate_deployed_model_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12842,9 +12862,11 @@ async def test_set_publisher_model_config_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) @@ -12990,9 +13012,11 @@ async def test_fetch_publisher_model_config_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEndpointServiceRestInterceptor() + ), ) client = EndpointServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_evaluation_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_evaluation_service.py index 7b41c02232..11550bf572 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_evaluation_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_evaluation_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1beta1.services.evaluation_service import ( EvaluationServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.evaluation_service import transports +from google.cloud.aiplatform_v1beta1.services.evaluation_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import evaluation_service from google.cloud.aiplatform_v1beta1.types import io @@ -1264,9 +1266,9 @@ def test_evaluate_instances_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_instances - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_instances] = ( + mock_rpc + ) request = {} client.evaluate_instances(request) @@ -1508,9 +1510,9 @@ def test_evaluate_dataset_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_dataset - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_dataset] = ( + mock_rpc + ) request = {} client.evaluate_dataset(request) @@ -1697,9 +1699,9 @@ def test_evaluate_instances_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_instances - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_instances] = ( + mock_rpc + ) request = {} client.evaluate_instances(request) @@ -1820,9 +1822,9 @@ def test_evaluate_dataset_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.evaluate_dataset - ] = mock_rpc + client._transport._wrapped_methods[client._transport.evaluate_dataset] = ( + mock_rpc + ) request = {} client.evaluate_dataset(request) @@ -2222,9 +2224,9 @@ def test_evaluate_instances_rest_call_success(request_type): def test_evaluate_instances_rest_interceptors(null_interceptor): transport = transports.EvaluationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EvaluationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceClient(transport=transport) @@ -2349,9 +2351,9 @@ def test_evaluate_dataset_rest_call_success(request_type): def test_evaluate_dataset_rest_interceptors(null_interceptor): transport = transports.EvaluationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EvaluationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.EvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceClient(transport=transport) @@ -3194,9 +3196,11 @@ async def test_evaluate_instances_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEvaluationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEvaluationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceAsyncClient(transport=transport) @@ -3337,9 +3341,11 @@ async def test_evaluate_dataset_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncEvaluationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncEvaluationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncEvaluationServiceRestInterceptor() + ), ) client = EvaluationServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_example_store_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_example_store_service.py index d9d0926fa3..d224edcafc 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_example_store_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_example_store_service.py @@ -72,12 +72,18 @@ from google.cloud.aiplatform_v1beta1.services.example_store_service import ( ExampleStoreServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.example_store_service import pagers -from google.cloud.aiplatform_v1beta1.services.example_store_service import transports +from google.cloud.aiplatform_v1beta1.services.example_store_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.example_store_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import example from google.cloud.aiplatform_v1beta1.types import example_store -from google.cloud.aiplatform_v1beta1.types import example_store as gca_example_store +from google.cloud.aiplatform_v1beta1.types import ( + example_store as gca_example_store, +) from google.cloud.aiplatform_v1beta1.types import example_store_service from google.cloud.aiplatform_v1beta1.types import tool from google.cloud.location import locations_pb2 @@ -1292,9 +1298,9 @@ def test_create_example_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_example_store] = ( + mock_rpc + ) request = {} client.create_example_store(request) @@ -1653,9 +1659,9 @@ def test_get_example_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_example_store] = ( + mock_rpc + ) request = {} client.get_example_store(request) @@ -1992,9 +1998,9 @@ def test_update_example_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_example_store] = ( + mock_rpc + ) request = {} client.update_example_store(request) @@ -2348,9 +2354,9 @@ def test_delete_example_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_example_store] = ( + mock_rpc + ) request = {} client.delete_example_store(request) @@ -2701,9 +2707,9 @@ def test_list_example_stores_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_example_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_example_stores] = ( + mock_rpc + ) request = {} client.list_example_stores(request) @@ -4329,9 +4335,9 @@ def test_create_example_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_example_store] = ( + mock_rpc + ) request = {} client.create_example_store(request) @@ -4519,9 +4525,9 @@ def test_get_example_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_example_store] = ( + mock_rpc + ) request = {} client.get_example_store(request) @@ -4703,9 +4709,9 @@ def test_update_example_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_example_store] = ( + mock_rpc + ) request = {} client.update_example_store(request) @@ -4888,9 +4894,9 @@ def test_delete_example_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_example_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_example_store] = ( + mock_rpc + ) request = {} client.delete_example_store(request) @@ -5071,9 +5077,9 @@ def test_list_example_stores_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_example_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_example_stores] = ( + mock_rpc + ) request = {} client.list_example_stores(request) @@ -6582,9 +6588,11 @@ def get_message_fields(field): def test_create_example_store_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -6716,9 +6724,11 @@ def test_get_example_store_rest_call_success(request_type): def test_get_example_store_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -6925,9 +6935,11 @@ def get_message_fields(field): def test_update_example_store_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7049,9 +7061,11 @@ def test_delete_example_store_rest_call_success(request_type): def test_delete_example_store_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7179,9 +7193,11 @@ def test_list_example_stores_rest_call_success(request_type): def test_list_example_stores_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7313,9 +7329,11 @@ def test_upsert_examples_rest_call_success(request_type): def test_upsert_examples_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7450,9 +7468,11 @@ def test_remove_examples_rest_call_success(request_type): def test_remove_examples_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7584,9 +7604,11 @@ def test_search_examples_rest_call_success(request_type): def test_search_examples_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -7721,9 +7743,11 @@ def test_fetch_examples_rest_call_success(request_type): def test_fetch_examples_rest_interceptors(null_interceptor): transport = transports.ExampleStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceClient(transport=transport) @@ -8793,9 +8817,11 @@ async def test_create_example_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -8943,9 +8969,11 @@ async def test_get_example_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9168,9 +9196,11 @@ async def test_update_example_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9308,9 +9338,11 @@ async def test_delete_example_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9454,9 +9486,11 @@ async def test_list_example_stores_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9604,9 +9638,11 @@ async def test_upsert_examples_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9757,9 +9793,11 @@ async def test_remove_examples_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -9907,9 +9945,11 @@ async def test_search_examples_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) @@ -10060,9 +10100,11 @@ async def test_fetch_examples_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExampleStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExampleStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExampleStoreServiceRestInterceptor() + ), ) client = ExampleStoreServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_extension_execution_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_extension_execution_service.py index a38c48e519..6f1f00c412 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_extension_execution_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_extension_execution_service.py @@ -1303,9 +1303,9 @@ def test_execute_extension_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.execute_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.execute_extension] = ( + mock_rpc + ) request = {} client.execute_extension(request) @@ -1919,9 +1919,9 @@ def test_execute_extension_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.execute_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.execute_extension] = ( + mock_rpc + ) request = {} client.execute_extension(request) @@ -2592,9 +2592,11 @@ def test_execute_extension_rest_call_success(request_type): def test_execute_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionExecutionServiceRestInterceptor() + ), ) client = ExtensionExecutionServiceClient(transport=transport) @@ -2727,9 +2729,11 @@ def test_query_extension_rest_call_success(request_type): def test_query_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionExecutionServiceRestInterceptor() + ), ) client = ExtensionExecutionServiceClient(transport=transport) @@ -3563,9 +3567,11 @@ async def test_execute_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionExecutionServiceRestInterceptor() + ), ) client = ExtensionExecutionServiceAsyncClient(transport=transport) @@ -3716,9 +3722,11 @@ async def test_query_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionExecutionServiceRestInterceptor() + ), ) client = ExtensionExecutionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_extension_registry_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_extension_registry_service.py index ca075fa0d1..95c2b32956 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_extension_registry_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_extension_registry_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1beta1.services.extension_registry_service import ( ExtensionRegistryServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.extension_registry_service import pagers +from google.cloud.aiplatform_v1beta1.services.extension_registry_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.services.extension_registry_service import ( transports, ) @@ -1297,9 +1299,9 @@ def test_import_extension_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_extension] = ( + mock_rpc + ) request = {} client.import_extension(request) @@ -2498,9 +2500,9 @@ def test_update_extension_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_extension] = ( + mock_rpc + ) request = {} client.update_extension(request) @@ -2837,9 +2839,9 @@ def test_delete_extension_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_extension] = ( + mock_rpc + ) request = {} client.delete_extension(request) @@ -3106,9 +3108,9 @@ def test_import_extension_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_extension] = ( + mock_rpc + ) request = {} client.import_extension(request) @@ -3741,9 +3743,9 @@ def test_update_extension_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_extension] = ( + mock_rpc + ) request = {} client.update_extension(request) @@ -3933,9 +3935,9 @@ def test_delete_extension_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_extension - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_extension] = ( + mock_rpc + ) request = {} client.delete_extension(request) @@ -4709,9 +4711,11 @@ def get_message_fields(field): def test_import_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceClient(transport=transport) @@ -4845,9 +4849,11 @@ def test_get_extension_rest_call_success(request_type): def test_get_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceClient(transport=transport) @@ -4975,9 +4981,11 @@ def test_list_extensions_rest_call_success(request_type): def test_list_extensions_rest_interceptors(null_interceptor): transport = transports.ExtensionRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceClient(transport=transport) @@ -5302,9 +5310,11 @@ def get_message_fields(field): def test_update_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceClient(transport=transport) @@ -5424,9 +5434,11 @@ def test_delete_extension_rest_call_success(request_type): def test_delete_extension_rest_interceptors(null_interceptor): transport = transports.ExtensionRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceClient(transport=transport) @@ -6508,9 +6520,11 @@ async def test_import_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceAsyncClient(transport=transport) @@ -6660,9 +6674,11 @@ async def test_get_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceAsyncClient(transport=transport) @@ -6806,9 +6822,11 @@ async def test_list_extensions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceAsyncClient(transport=transport) @@ -7149,9 +7167,11 @@ async def test_update_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceAsyncClient(transport=transport) @@ -7287,9 +7307,11 @@ async def test_delete_extension_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncExtensionRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncExtensionRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncExtensionRegistryServiceRestInterceptor() + ), ) client = ExtensionRegistryServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_admin_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_admin_service.py index 5b4be6e90a..6725905cce 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_admin_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_admin_service.py @@ -83,9 +83,13 @@ from google.cloud.aiplatform_v1beta1.types import ( feature_online_store as gca_feature_online_store, ) -from google.cloud.aiplatform_v1beta1.types import feature_online_store_admin_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_admin_service, +) from google.cloud.aiplatform_v1beta1.types import feature_view -from google.cloud.aiplatform_v1beta1.types import feature_view as gca_feature_view +from google.cloud.aiplatform_v1beta1.types import ( + feature_view as gca_feature_view, +) from google.cloud.aiplatform_v1beta1.types import feature_view_sync from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -3419,9 +3423,9 @@ def test_create_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_view] = ( + mock_rpc + ) request = {} client.create_feature_view(request) @@ -3819,9 +3823,9 @@ def test_get_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view] = ( + mock_rpc + ) request = {} client.get_feature_view(request) @@ -4170,9 +4174,9 @@ def test_list_feature_views_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_views - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_views] = ( + mock_rpc + ) request = {} client.list_feature_views(request) @@ -4713,9 +4717,9 @@ def test_update_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_view] = ( + mock_rpc + ) request = {} client.update_feature_view(request) @@ -5093,9 +5097,9 @@ def test_delete_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_view] = ( + mock_rpc + ) request = {} client.delete_feature_view(request) @@ -5442,9 +5446,9 @@ def test_sync_feature_view_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.sync_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.sync_feature_view] = ( + mock_rpc + ) request = {} client.sync_feature_view(request) @@ -5791,9 +5795,9 @@ def test_get_feature_view_sync_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view_sync - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view_sync] = ( + mock_rpc + ) request = {} client.get_feature_view_sync(request) @@ -7707,9 +7711,9 @@ def test_create_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_view] = ( + mock_rpc + ) request = {} client.create_feature_view(request) @@ -7935,9 +7939,9 @@ def test_get_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view] = ( + mock_rpc + ) request = {} client.get_feature_view(request) @@ -8119,9 +8123,9 @@ def test_list_feature_views_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_views - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_views] = ( + mock_rpc + ) request = {} client.list_feature_views(request) @@ -8394,9 +8398,9 @@ def test_update_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_view] = ( + mock_rpc + ) request = {} client.update_feature_view(request) @@ -8587,9 +8591,9 @@ def test_delete_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_view] = ( + mock_rpc + ) request = {} client.delete_feature_view(request) @@ -8768,9 +8772,9 @@ def test_sync_feature_view_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.sync_feature_view - ] = mock_rpc + client._transport._wrapped_methods[client._transport.sync_feature_view] = ( + mock_rpc + ) request = {} client.sync_feature_view(request) @@ -8960,9 +8964,9 @@ def test_get_feature_view_sync_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_view_sync - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_view_sync] = ( + mock_rpc + ) request = {} client.get_feature_view_sync(request) @@ -10398,9 +10402,11 @@ def get_message_fields(field): def test_create_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10544,9 +10550,11 @@ def test_get_feature_online_store_rest_call_success(request_type): def test_get_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10685,9 +10693,11 @@ def test_list_feature_online_stores_rest_call_success(request_type): def test_list_feature_online_stores_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -10944,9 +10954,11 @@ def get_message_fields(field): def test_update_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11076,9 +11088,11 @@ def test_delete_feature_online_store_rest_call_success(request_type): def test_delete_feature_online_store_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11332,9 +11346,11 @@ def get_message_fields(field): def test_create_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11481,9 +11497,11 @@ def test_get_feature_view_rest_call_success(request_type): def test_get_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11616,9 +11634,11 @@ def test_list_feature_views_rest_call_success(request_type): def test_list_feature_views_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -11881,9 +11901,11 @@ def get_message_fields(field): def test_update_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12011,9 +12033,11 @@ def test_delete_feature_view_rest_call_success(request_type): def test_delete_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12151,9 +12175,11 @@ def test_sync_feature_view_rest_call_success(request_type): def test_sync_feature_view_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12296,9 +12322,11 @@ def test_get_feature_view_sync_rest_call_success(request_type): def test_get_feature_view_sync_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -12436,9 +12464,11 @@ def test_list_feature_view_syncs_rest_call_success(request_type): def test_list_feature_view_syncs_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceClient(transport=transport) @@ -13653,9 +13683,11 @@ async def test_create_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -13815,9 +13847,11 @@ async def test_get_feature_online_store_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -13972,9 +14006,11 @@ async def test_list_feature_online_stores_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14247,9 +14283,11 @@ async def test_update_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14395,9 +14433,11 @@ async def test_delete_feature_online_store_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14667,9 +14707,11 @@ async def test_create_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14832,9 +14874,11 @@ async def test_get_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -14984,9 +15028,11 @@ async def test_list_feature_views_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15265,9 +15311,11 @@ async def test_update_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15411,9 +15459,11 @@ async def test_delete_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15567,9 +15617,11 @@ async def test_sync_feature_view_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15728,9 +15780,11 @@ async def test_get_feature_view_sync_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) @@ -15884,9 +15938,11 @@ async def test_list_feature_view_syncs_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncFeatureOnlineStoreAdminServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreAdminServiceRestInterceptor() + ), ) client = FeatureOnlineStoreAdminServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_service.py index 96b10fa87d..0de8b7f8e5 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_feature_online_store_service.py @@ -71,7 +71,9 @@ from google.cloud.aiplatform_v1beta1.services.feature_online_store_service import ( transports, ) -from google.cloud.aiplatform_v1beta1.types import feature_online_store_service +from google.cloud.aiplatform_v1beta1.types import ( + feature_online_store_service, +) from google.cloud.aiplatform_v1beta1.types import featurestore_online_service from google.cloud.aiplatform_v1beta1.types import types from google.cloud.location import locations_pb2 @@ -1302,9 +1304,9 @@ def test_fetch_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.fetch_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.fetch_feature_values] = ( + mock_rpc + ) request = {} client.fetch_feature_values(request) @@ -2170,9 +2172,9 @@ def test_fetch_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.fetch_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.fetch_feature_values] = ( + mock_rpc + ) request = {} client.fetch_feature_values(request) @@ -2797,9 +2799,11 @@ def test_fetch_feature_values_rest_call_success(request_type): def test_fetch_feature_values_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceClient(transport=transport) @@ -2949,9 +2953,11 @@ def test_search_nearest_entities_rest_call_success(request_type): def test_search_nearest_entities_rest_interceptors(null_interceptor): transport = transports.FeatureOnlineStoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceClient(transport=transport) @@ -3805,9 +3811,11 @@ async def test_fetch_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureOnlineStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceAsyncClient(transport=transport) @@ -3980,9 +3988,11 @@ async def test_search_nearest_entities_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncFeatureOnlineStoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureOnlineStoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureOnlineStoreServiceRestInterceptor() + ), ) client = FeatureOnlineStoreServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_feature_registry_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_feature_registry_service.py index 52b1a574ad..2ad6c17495 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_feature_registry_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_feature_registry_service.py @@ -72,14 +72,22 @@ from google.cloud.aiplatform_v1beta1.services.feature_registry_service import ( FeatureRegistryServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.feature_registry_service import pagers -from google.cloud.aiplatform_v1beta1.services.feature_registry_service import transports +from google.cloud.aiplatform_v1beta1.services.feature_registry_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.feature_registry_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_group -from google.cloud.aiplatform_v1beta1.types import feature_group as gca_feature_group +from google.cloud.aiplatform_v1beta1.types import ( + feature_group as gca_feature_group, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor -from google.cloud.aiplatform_v1beta1.types import feature_monitor as gca_feature_monitor +from google.cloud.aiplatform_v1beta1.types import ( + feature_monitor as gca_feature_monitor, +) from google.cloud.aiplatform_v1beta1.types import feature_monitor_job from google.cloud.aiplatform_v1beta1.types import ( feature_monitor_job as gca_feature_monitor_job, @@ -1311,9 +1319,9 @@ def test_create_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_group] = ( + mock_rpc + ) request = {} client.create_feature_group(request) @@ -1713,9 +1721,9 @@ def test_get_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_group] = ( + mock_rpc + ) request = {} client.get_feature_group(request) @@ -2072,9 +2080,9 @@ def test_list_feature_groups_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_groups - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_groups] = ( + mock_rpc + ) request = {} client.list_feature_groups(request) @@ -2609,9 +2617,9 @@ def test_update_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_group] = ( + mock_rpc + ) request = {} client.update_feature_group(request) @@ -2989,9 +2997,9 @@ def test_delete_feature_group_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_group] = ( + mock_rpc + ) request = {} client.delete_feature_group(request) @@ -3696,9 +3704,9 @@ def test_batch_create_features_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -5584,9 +5592,9 @@ def test_create_feature_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_monitor] = ( + mock_rpc + ) request = {} client.create_feature_monitor(request) @@ -5957,9 +5965,9 @@ def test_get_feature_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_monitor] = ( + mock_rpc + ) request = {} client.get_feature_monitor(request) @@ -6310,9 +6318,9 @@ def test_list_feature_monitors_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_monitors - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_monitors] = ( + mock_rpc + ) request = {} client.list_feature_monitors(request) @@ -6848,9 +6856,9 @@ def test_update_feature_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_monitor] = ( + mock_rpc + ) request = {} client.update_feature_monitor(request) @@ -7205,9 +7213,9 @@ def test_delete_feature_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_monitor] = ( + mock_rpc + ) request = {} client.delete_feature_monitor(request) @@ -8793,9 +8801,9 @@ def test_create_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_group] = ( + mock_rpc + ) request = {} client.create_feature_group(request) @@ -9009,9 +9017,9 @@ def test_get_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_group] = ( + mock_rpc + ) request = {} client.get_feature_group(request) @@ -9193,9 +9201,9 @@ def test_list_feature_groups_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_groups - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_groups] = ( + mock_rpc + ) request = {} client.list_feature_groups(request) @@ -9462,9 +9470,9 @@ def test_update_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_group] = ( + mock_rpc + ) request = {} client.update_feature_group(request) @@ -9655,9 +9663,9 @@ def test_delete_feature_group_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_group] = ( + mock_rpc + ) request = {} client.delete_feature_group(request) @@ -10051,9 +10059,9 @@ def test_batch_create_features_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -11056,9 +11064,9 @@ def test_create_feature_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_feature_monitor] = ( + mock_rpc + ) request = {} client.create_feature_monitor(request) @@ -11268,9 +11276,9 @@ def test_get_feature_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_feature_monitor] = ( + mock_rpc + ) request = {} client.get_feature_monitor(request) @@ -11453,9 +11461,9 @@ def test_list_feature_monitors_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_feature_monitors - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_feature_monitors] = ( + mock_rpc + ) request = {} client.list_feature_monitors(request) @@ -11727,9 +11735,9 @@ def test_update_feature_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_feature_monitor] = ( + mock_rpc + ) request = {} client.update_feature_monitor(request) @@ -11913,9 +11921,9 @@ def test_delete_feature_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_monitor] = ( + mock_rpc + ) request = {} client.delete_feature_monitor(request) @@ -13983,9 +13991,11 @@ def get_message_fields(field): def test_create_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14124,9 +14134,11 @@ def test_get_feature_group_rest_call_success(request_type): def test_get_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14254,9 +14266,11 @@ def test_list_feature_groups_rest_call_success(request_type): def test_list_feature_groups_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14478,9 +14492,11 @@ def get_message_fields(field): def test_update_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14602,9 +14618,11 @@ def test_delete_feature_group_rest_call_success(request_type): def test_delete_feature_group_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14851,9 +14869,11 @@ def get_message_fields(field): def test_create_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -14979,9 +14999,11 @@ def test_batch_create_features_rest_call_success(request_type): def test_batch_create_features_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15125,9 +15147,11 @@ def test_get_feature_rest_call_success(request_type): def test_get_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15257,9 +15281,11 @@ def test_list_features_rest_call_success(request_type): def test_list_features_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15513,9 +15539,11 @@ def get_message_fields(field): def test_update_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15641,9 +15669,11 @@ def test_delete_feature_rest_call_success(request_type): def test_delete_feature_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15852,9 +15882,11 @@ def get_message_fields(field): def test_create_feature_monitor_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -15990,9 +16022,11 @@ def test_get_feature_monitor_rest_call_success(request_type): def test_get_feature_monitor_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -16126,9 +16160,11 @@ def test_list_feature_monitors_rest_call_success(request_type): def test_list_feature_monitors_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -16344,9 +16380,11 @@ def get_message_fields(field): def test_update_feature_monitor_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -16472,9 +16510,11 @@ def test_delete_feature_monitor_rest_call_success(request_type): def test_delete_feature_monitor_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -16730,9 +16770,11 @@ def get_message_fields(field): def test_create_feature_monitor_job_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -16878,9 +16920,11 @@ def test_get_feature_monitor_job_rest_call_success(request_type): def test_get_feature_monitor_job_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -17017,9 +17061,11 @@ def test_list_feature_monitor_jobs_rest_call_success(request_type): def test_list_feature_monitor_jobs_rest_interceptors(null_interceptor): transport = transports.FeatureRegistryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceClient(transport=transport) @@ -18319,9 +18365,11 @@ async def test_create_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -18478,9 +18526,11 @@ async def test_get_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -18624,9 +18674,11 @@ async def test_list_feature_groups_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -18865,9 +18917,11 @@ async def test_update_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -19007,9 +19061,11 @@ async def test_delete_feature_group_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -19274,9 +19330,11 @@ async def test_create_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -19418,9 +19476,11 @@ async def test_batch_create_features_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -19582,9 +19642,11 @@ async def test_get_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -19730,9 +19792,11 @@ async def test_list_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20002,9 +20066,11 @@ async def test_update_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20146,9 +20212,11 @@ async def test_delete_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20373,9 +20441,11 @@ async def test_create_feature_monitor_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20529,9 +20599,11 @@ async def test_get_feature_monitor_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20682,9 +20754,11 @@ async def test_list_feature_monitors_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -20918,9 +20992,11 @@ async def test_update_feature_monitor_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -21064,9 +21140,11 @@ async def test_delete_feature_monitor_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -21340,9 +21418,11 @@ async def test_create_feature_monitor_job_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -21504,9 +21584,11 @@ async def test_get_feature_monitor_job_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) @@ -21661,9 +21743,11 @@ async def test_list_feature_monitor_jobs_rest_asyncio_interceptors(null_intercep ) transport = transports.AsyncFeatureRegistryServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeatureRegistryServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeatureRegistryServiceRestInterceptor() + ), ) client = FeatureRegistryServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_online_serving_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_online_serving_service.py index a3ad602391..b72026cefe 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_online_serving_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_online_serving_service.py @@ -1326,9 +1326,9 @@ def test_read_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_feature_values] = ( + mock_rpc + ) request = {} client.read_feature_values(request) @@ -2011,9 +2011,9 @@ def test_write_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.write_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.write_feature_values] = ( + mock_rpc + ) request = {} client.write_feature_values(request) @@ -2316,9 +2316,9 @@ def test_read_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_feature_values] = ( + mock_rpc + ) request = {} client.read_feature_values(request) @@ -2729,9 +2729,9 @@ def test_write_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.write_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.write_feature_values] = ( + mock_rpc + ) request = {} client.write_feature_values(request) @@ -3265,9 +3265,11 @@ def test_read_feature_values_rest_call_success(request_type): def test_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -3407,9 +3409,11 @@ def test_streaming_read_feature_values_rest_call_success(request_type): def test_streaming_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -3545,9 +3549,11 @@ def test_write_feature_values_rest_call_success(request_type): def test_write_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreOnlineServingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceClient(transport=transport) @@ -4408,9 +4414,11 @@ async def test_read_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) @@ -4566,9 +4574,11 @@ async def test_streaming_read_feature_values_rest_asyncio_interceptors( ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) @@ -4720,9 +4730,11 @@ async def test_write_feature_values_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreOnlineServingServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreOnlineServingServiceRestInterceptor() + ), ) client = FeaturestoreOnlineServingServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py index 75b8c29c63..7498e1af48 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py @@ -72,18 +72,26 @@ from google.cloud.aiplatform_v1beta1.services.featurestore_service import ( FeaturestoreServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.featurestore_service import pagers -from google.cloud.aiplatform_v1beta1.services.featurestore_service import transports +from google.cloud.aiplatform_v1beta1.services.featurestore_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.featurestore_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import entity_type -from google.cloud.aiplatform_v1beta1.types import entity_type as gca_entity_type +from google.cloud.aiplatform_v1beta1.types import ( + entity_type as gca_entity_type, +) from google.cloud.aiplatform_v1beta1.types import feature from google.cloud.aiplatform_v1beta1.types import feature as gca_feature from google.cloud.aiplatform_v1beta1.types import feature_monitor from google.cloud.aiplatform_v1beta1.types import feature_monitoring_stats from google.cloud.aiplatform_v1beta1.types import feature_selector from google.cloud.aiplatform_v1beta1.types import featurestore -from google.cloud.aiplatform_v1beta1.types import featurestore as gca_featurestore +from google.cloud.aiplatform_v1beta1.types import ( + featurestore as gca_featurestore, +) from google.cloud.aiplatform_v1beta1.types import featurestore_monitoring from google.cloud.aiplatform_v1beta1.types import featurestore_service from google.cloud.aiplatform_v1beta1.types import io @@ -1303,9 +1311,9 @@ def test_create_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_featurestore] = ( + mock_rpc + ) request = {} client.create_featurestore(request) @@ -1676,9 +1684,9 @@ def test_get_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_featurestore] = ( + mock_rpc + ) request = {} client.get_featurestore(request) @@ -2024,9 +2032,9 @@ def test_list_featurestores_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_featurestores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_featurestores] = ( + mock_rpc + ) request = {} client.list_featurestores(request) @@ -2561,9 +2569,9 @@ def test_update_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_featurestore] = ( + mock_rpc + ) request = {} client.update_featurestore(request) @@ -2917,9 +2925,9 @@ def test_delete_featurestore_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_featurestore] = ( + mock_rpc + ) request = {} client.delete_featurestore(request) @@ -3275,9 +3283,9 @@ def test_create_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entity_type] = ( + mock_rpc + ) request = {} client.create_entity_type(request) @@ -3992,9 +4000,9 @@ def test_list_entity_types_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_entity_types - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entity_types] = ( + mock_rpc + ) request = {} client.list_entity_types(request) @@ -4542,9 +4550,9 @@ def test_update_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entity_type] = ( + mock_rpc + ) request = {} client.update_entity_type(request) @@ -4901,9 +4909,9 @@ def test_delete_entity_type_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entity_type] = ( + mock_rpc + ) request = {} client.delete_entity_type(request) @@ -5608,9 +5616,9 @@ def test_batch_create_features_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -7514,9 +7522,9 @@ def test_import_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_feature_values] = ( + mock_rpc + ) request = {} client.import_feature_values(request) @@ -8208,9 +8216,9 @@ def test_export_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_feature_values] = ( + mock_rpc + ) request = {} client.export_feature_values(request) @@ -8555,9 +8563,9 @@ def test_delete_feature_values_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_values] = ( + mock_rpc + ) request = {} client.delete_feature_values(request) @@ -9368,9 +9376,9 @@ def test_create_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_featurestore] = ( + mock_rpc + ) request = {} client.create_featurestore(request) @@ -9576,9 +9584,9 @@ def test_get_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_featurestore] = ( + mock_rpc + ) request = {} client.get_featurestore(request) @@ -9760,9 +9768,9 @@ def test_list_featurestores_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_featurestores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_featurestores] = ( + mock_rpc + ) request = {} client.list_featurestores(request) @@ -10028,9 +10036,9 @@ def test_update_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_featurestore] = ( + mock_rpc + ) request = {} client.update_featurestore(request) @@ -10213,9 +10221,9 @@ def test_delete_featurestore_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_featurestore - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_featurestore] = ( + mock_rpc + ) request = {} client.delete_featurestore(request) @@ -10400,9 +10408,9 @@ def test_create_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entity_type] = ( + mock_rpc + ) request = {} client.create_entity_type(request) @@ -10789,9 +10797,9 @@ def test_list_entity_types_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_entity_types - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entity_types] = ( + mock_rpc + ) request = {} client.list_entity_types(request) @@ -11059,9 +11067,9 @@ def test_update_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entity_type] = ( + mock_rpc + ) request = {} client.update_entity_type(request) @@ -11245,9 +11253,9 @@ def test_delete_entity_type_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entity_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entity_type] = ( + mock_rpc + ) request = {} client.delete_entity_type(request) @@ -11641,9 +11649,9 @@ def test_batch_create_features_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.batch_create_features - ] = mock_rpc + client._transport._wrapped_methods[client._transport.batch_create_features] = ( + mock_rpc + ) request = {} client.batch_create_features(request) @@ -12647,9 +12655,9 @@ def test_import_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_feature_values] = ( + mock_rpc + ) request = {} client.import_feature_values(request) @@ -13034,9 +13042,9 @@ def test_export_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_feature_values] = ( + mock_rpc + ) request = {} client.export_feature_values(request) @@ -13228,9 +13236,9 @@ def test_delete_feature_values_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_feature_values - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_feature_values] = ( + mock_rpc + ) request = {} client.delete_feature_values(request) @@ -14991,9 +14999,11 @@ def get_message_fields(field): def test_create_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15131,9 +15141,11 @@ def test_get_featurestore_rest_call_success(request_type): def test_get_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15259,9 +15271,11 @@ def test_list_featurestores_rest_call_success(request_type): def test_list_featurestores_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15483,9 +15497,11 @@ def get_message_fields(field): def test_update_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15607,9 +15623,11 @@ def test_delete_featurestore_rest_call_success(request_type): def test_delete_featurestore_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15824,9 +15842,11 @@ def get_message_fields(field): def test_create_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -15968,9 +15988,11 @@ def test_get_entity_type_rest_call_success(request_type): def test_get_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16100,9 +16122,11 @@ def test_list_entity_types_rest_call_success(request_type): def test_list_entity_types_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16340,9 +16364,11 @@ def get_message_fields(field): def test_update_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16466,9 +16492,11 @@ def test_delete_entity_type_rest_call_success(request_type): def test_delete_entity_type_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16715,9 +16743,11 @@ def get_message_fields(field): def test_create_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16843,9 +16873,11 @@ def test_batch_create_features_rest_call_success(request_type): def test_batch_create_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -16989,9 +17021,11 @@ def test_get_feature_rest_call_success(request_type): def test_get_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17120,9 +17154,11 @@ def test_list_features_rest_call_success(request_type): def test_list_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17394,9 +17430,11 @@ def get_message_fields(field): def test_update_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17520,9 +17558,11 @@ def test_delete_feature_rest_call_success(request_type): def test_delete_feature_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17648,9 +17688,11 @@ def test_import_feature_values_rest_call_success(request_type): def test_import_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17776,9 +17818,11 @@ def test_batch_read_feature_values_rest_call_success(request_type): def test_batch_read_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -17904,9 +17948,11 @@ def test_export_feature_values_rest_call_success(request_type): def test_export_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -18032,9 +18078,11 @@ def test_delete_feature_values_rest_call_success(request_type): def test_delete_feature_values_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -18162,9 +18210,11 @@ def test_search_features_rest_call_success(request_type): def test_search_features_rest_interceptors(null_interceptor): transport = transports.FeaturestoreServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.FeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.FeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceClient(transport=transport) @@ -19500,9 +19550,11 @@ async def test_create_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -19656,9 +19708,11 @@ async def test_get_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -19800,9 +19854,11 @@ async def test_list_featurestores_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20040,9 +20096,11 @@ async def test_update_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20180,9 +20238,11 @@ async def test_delete_featurestore_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20413,9 +20473,11 @@ async def test_create_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20573,9 +20635,11 @@ async def test_get_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20721,9 +20785,11 @@ async def test_list_entity_types_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -20977,9 +21043,11 @@ async def test_update_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21119,9 +21187,11 @@ async def test_delete_entity_type_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21384,9 +21454,11 @@ async def test_create_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21528,9 +21600,11 @@ async def test_batch_create_features_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21690,9 +21764,11 @@ async def test_get_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -21838,9 +21914,11 @@ async def test_list_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22128,9 +22206,11 @@ async def test_update_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22270,9 +22350,11 @@ async def test_delete_feature_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22414,9 +22496,11 @@ async def test_import_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22558,9 +22642,11 @@ async def test_batch_read_feature_values_rest_asyncio_interceptors(null_intercep ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22704,9 +22790,11 @@ async def test_export_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22848,9 +22936,11 @@ async def test_delete_feature_values_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) @@ -22994,9 +23084,11 @@ async def test_search_features_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncFeaturestoreServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncFeaturestoreServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncFeaturestoreServiceRestInterceptor() + ), ) client = FeaturestoreServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_cache_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_cache_service.py index 438f02fbe9..29a6f22a77 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_cache_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_cache_service.py @@ -68,10 +68,16 @@ from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import ( GenAiCacheServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import pagers -from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import transports +from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.gen_ai_cache_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import cached_content -from google.cloud.aiplatform_v1beta1.types import cached_content as gca_cached_content +from google.cloud.aiplatform_v1beta1.types import ( + cached_content as gca_cached_content, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import gen_ai_cache_service @@ -1275,9 +1281,9 @@ def test_create_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cached_content] = ( + mock_rpc + ) request = {} client.create_cached_content(request) @@ -1647,9 +1653,9 @@ def test_get_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cached_content] = ( + mock_rpc + ) request = {} client.get_cached_content(request) @@ -1994,9 +2000,9 @@ def test_update_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cached_content] = ( + mock_rpc + ) request = {} client.update_cached_content(request) @@ -2360,9 +2366,9 @@ def test_delete_cached_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cached_content] = ( + mock_rpc + ) request = {} client.delete_cached_content(request) @@ -2695,9 +2701,9 @@ def test_list_cached_contents_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_cached_contents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_cached_contents] = ( + mock_rpc + ) request = {} client.list_cached_contents(request) @@ -3172,9 +3178,9 @@ def test_create_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_cached_content] = ( + mock_rpc + ) request = {} client.create_cached_content(request) @@ -3369,9 +3375,9 @@ def test_get_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cached_content] = ( + mock_rpc + ) request = {} client.get_cached_content(request) @@ -3554,9 +3560,9 @@ def test_update_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cached_content] = ( + mock_rpc + ) request = {} client.update_cached_content(request) @@ -3753,9 +3759,9 @@ def test_delete_cached_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_cached_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_cached_content] = ( + mock_rpc + ) request = {} client.delete_cached_content(request) @@ -3932,9 +3938,9 @@ def test_list_cached_contents_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_cached_contents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_cached_contents] = ( + mock_rpc + ) request = {} client.list_cached_contents(request) @@ -4873,9 +4879,9 @@ def get_message_fields(field): def test_create_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5007,9 +5013,9 @@ def test_get_cached_content_rest_call_success(request_type): def test_get_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5389,9 +5395,9 @@ def get_message_fields(field): def test_update_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5513,9 +5519,9 @@ def test_delete_cached_content_rest_call_success(request_type): def test_delete_cached_content_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -5628,9 +5634,9 @@ def test_list_cached_contents_rest_call_success(request_type): def test_list_cached_contents_rest_interceptors(null_interceptor): transport = transports.GenAiCacheServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiCacheServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceClient(transport=transport) @@ -6774,9 +6780,11 @@ async def test_create_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -6924,9 +6932,11 @@ async def test_get_cached_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7322,9 +7332,11 @@ async def test_update_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7462,9 +7474,11 @@ async def test_delete_cached_content_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) @@ -7593,9 +7607,11 @@ async def test_list_cached_contents_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiCacheServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiCacheServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiCacheServiceRestInterceptor() + ), ) client = GenAiCacheServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_tuning_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_tuning_service.py index 1c202fd94f..52ca98111a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_tuning_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_gen_ai_tuning_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import ( GenAiTuningServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import pagers -from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import transports +from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.gen_ai_tuning_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import evaluation_service @@ -82,7 +86,9 @@ from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import tool from google.cloud.aiplatform_v1beta1.types import tuning_job -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -1295,9 +1301,9 @@ def test_create_tuning_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tuning_job] = ( + mock_rpc + ) request = {} client.create_tuning_job(request) @@ -2018,9 +2024,9 @@ def test_list_tuning_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tuning_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tuning_jobs] = ( + mock_rpc + ) request = {} client.list_tuning_jobs(request) @@ -2539,9 +2545,9 @@ def test_cancel_tuning_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_tuning_job] = ( + mock_rpc + ) request = {} client.cancel_tuning_job(request) @@ -2869,9 +2875,9 @@ def test_rebase_tuned_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.rebase_tuned_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rebase_tuned_model] = ( + mock_rpc + ) request = {} client.rebase_tuned_model(request) @@ -3158,9 +3164,9 @@ def test_create_tuning_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tuning_job] = ( + mock_rpc + ) request = {} client.create_tuning_job(request) @@ -3529,9 +3535,9 @@ def test_list_tuning_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tuning_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tuning_jobs] = ( + mock_rpc + ) request = {} client.list_tuning_jobs(request) @@ -3789,9 +3795,9 @@ def test_cancel_tuning_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_tuning_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_tuning_job] = ( + mock_rpc + ) request = {} client.cancel_tuning_job(request) @@ -3969,9 +3975,9 @@ def test_rebase_tuned_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.rebase_tuned_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.rebase_tuned_model] = ( + mock_rpc + ) request = {} client.rebase_tuned_model(request) @@ -4928,9 +4934,9 @@ def get_message_fields(field): def test_create_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5073,9 +5079,9 @@ def test_get_tuning_job_rest_call_success(request_type): def test_get_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5201,9 +5207,9 @@ def test_list_tuning_jobs_rest_call_success(request_type): def test_list_tuning_jobs_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5328,9 +5334,9 @@ def test_cancel_tuning_job_rest_call_success(request_type): def test_cancel_tuning_job_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -5437,9 +5443,9 @@ def test_rebase_tuned_model_rest_call_success(request_type): def test_rebase_tuned_model_rest_interceptors(null_interceptor): transport = transports.GenAiTuningServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.GenAiTuningServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.GenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceClient(transport=transport) @@ -6681,9 +6687,11 @@ async def test_create_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6842,9 +6850,11 @@ async def test_get_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -6986,9 +6996,11 @@ async def test_list_tuning_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -7129,9 +7141,11 @@ async def test_cancel_tuning_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) @@ -7254,9 +7268,11 @@ async def test_rebase_tuned_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncGenAiTuningServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncGenAiTuningServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncGenAiTuningServiceRestInterceptor() + ), ) client = GenAiTuningServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_index_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_index_endpoint_service.py index acfaef5a6b..838bf05cf1 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_index_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_index_endpoint_service.py @@ -72,12 +72,18 @@ from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import ( IndexEndpointServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import pagers -from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import transports +from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import index_endpoint -from google.cloud.aiplatform_v1beta1.types import index_endpoint as gca_index_endpoint +from google.cloud.aiplatform_v1beta1.types import ( + index_endpoint as gca_index_endpoint, +) from google.cloud.aiplatform_v1beta1.types import index_endpoint_service from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -1295,9 +1301,9 @@ def test_create_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_index_endpoint] = ( + mock_rpc + ) request = {} client.create_index_endpoint(request) @@ -1672,9 +1678,9 @@ def test_get_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_index_endpoint] = ( + mock_rpc + ) request = {} client.get_index_endpoint(request) @@ -2036,9 +2042,9 @@ def test_list_index_endpoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_index_endpoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_index_endpoints] = ( + mock_rpc + ) request = {} client.list_index_endpoints(request) @@ -2595,9 +2601,9 @@ def test_update_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_index_endpoint] = ( + mock_rpc + ) request = {} client.update_index_endpoint(request) @@ -2963,9 +2969,9 @@ def test_delete_index_endpoint_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_index_endpoint] = ( + mock_rpc + ) request = {} client.delete_index_endpoint(request) @@ -3988,9 +3994,9 @@ def test_mutate_deployed_index_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_index - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_index] = ( + mock_rpc + ) request = {} client.mutate_deployed_index(request) @@ -4280,9 +4286,9 @@ def test_create_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_index_endpoint] = ( + mock_rpc + ) request = {} client.create_index_endpoint(request) @@ -4472,9 +4478,9 @@ def test_get_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_index_endpoint] = ( + mock_rpc + ) request = {} client.get_index_endpoint(request) @@ -4656,9 +4662,9 @@ def test_list_index_endpoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_index_endpoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_index_endpoints] = ( + mock_rpc + ) request = {} client.list_index_endpoints(request) @@ -4926,9 +4932,9 @@ def test_update_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_index_endpoint] = ( + mock_rpc + ) request = {} client.update_index_endpoint(request) @@ -5121,9 +5127,9 @@ def test_delete_index_endpoint_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_index_endpoint - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_index_endpoint] = ( + mock_rpc + ) request = {} client.delete_index_endpoint(request) @@ -5689,9 +5695,9 @@ def test_mutate_deployed_index_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.mutate_deployed_index - ] = mock_rpc + client._transport._wrapped_methods[client._transport.mutate_deployed_index] = ( + mock_rpc + ) request = {} client.mutate_deployed_index(request) @@ -6640,9 +6646,11 @@ def get_message_fields(field): def test_create_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -6788,9 +6796,11 @@ def test_get_index_endpoint_rest_call_success(request_type): def test_get_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -6920,9 +6930,11 @@ def test_list_index_endpoints_rest_call_success(request_type): def test_list_index_endpoints_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7253,9 +7265,11 @@ def get_message_fields(field): def test_update_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7377,9 +7391,11 @@ def test_delete_index_endpoint_rest_call_success(request_type): def test_delete_index_endpoint_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7505,9 +7521,11 @@ def test_deploy_index_rest_call_success(request_type): def test_deploy_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7633,9 +7651,11 @@ def test_undeploy_index_rest_call_success(request_type): def test_undeploy_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -7899,9 +7919,11 @@ def get_message_fields(field): def test_mutate_deployed_index_rest_interceptors(null_interceptor): transport = transports.IndexEndpointServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.IndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceClient(transport=transport) @@ -9045,9 +9067,11 @@ async def test_create_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9210,9 +9234,11 @@ async def test_get_index_endpoint_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9358,9 +9384,11 @@ async def test_list_index_endpoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9707,9 +9735,11 @@ async def test_update_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9848,9 +9878,11 @@ async def test_delete_index_endpoint_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -9993,9 +10025,11 @@ async def test_deploy_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -10137,9 +10171,11 @@ async def test_undeploy_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) @@ -10419,9 +10455,11 @@ async def test_mutate_deployed_index_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncIndexEndpointServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexEndpointServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncIndexEndpointServiceRestInterceptor() + ), ) client = IndexEndpointServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_index_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_index_service.py index 327ed1f430..259e690c9c 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_index_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_index_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1beta1.services.index_service import ( IndexServiceAsyncClient, ) -from google.cloud.aiplatform_v1beta1.services.index_service import IndexServiceClient +from google.cloud.aiplatform_v1beta1.services.index_service import ( + IndexServiceClient, +) from google.cloud.aiplatform_v1beta1.services.index_service import pagers from google.cloud.aiplatform_v1beta1.services.index_service import transports from google.cloud.aiplatform_v1beta1.types import deployed_index_ref @@ -3312,9 +3314,9 @@ def test_upsert_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.upsert_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.upsert_datapoints] = ( + mock_rpc + ) request = {} client.upsert_datapoints(request) @@ -3559,9 +3561,9 @@ def test_remove_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.remove_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.remove_datapoints] = ( + mock_rpc + ) request = {} client.remove_datapoints(request) @@ -4849,9 +4851,9 @@ def test_upsert_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.upsert_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.upsert_datapoints] = ( + mock_rpc + ) request = {} client.upsert_datapoints(request) @@ -4972,9 +4974,9 @@ def test_remove_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.remove_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.remove_datapoints] = ( + mock_rpc + ) request = {} client.remove_datapoints(request) @@ -5750,9 +5752,9 @@ def get_message_fields(field): def test_create_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -5891,9 +5893,9 @@ def test_get_index_rest_call_success(request_type): def test_get_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6008,9 +6010,9 @@ def test_import_index_rest_call_success(request_type): def test_import_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6135,9 +6137,9 @@ def test_list_indexes_rest_call_success(request_type): def test_list_indexes_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6361,9 +6363,9 @@ def get_message_fields(field): def test_update_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6482,9 +6484,9 @@ def test_delete_index_rest_call_success(request_type): def test_delete_index_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6608,9 +6610,9 @@ def test_upsert_datapoints_rest_call_success(request_type): def test_upsert_datapoints_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -6737,9 +6739,9 @@ def test_remove_datapoints_rest_call_success(request_type): def test_remove_datapoints_rest_interceptors(null_interceptor): transport = transports.IndexServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.IndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.IndexServiceRestInterceptor() + ), ) client = IndexServiceClient(transport=transport) @@ -7804,9 +7806,9 @@ async def test_create_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -7963,9 +7965,9 @@ async def test_get_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8098,9 +8100,9 @@ async def test_import_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8243,9 +8245,9 @@ async def test_list_indexes_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8487,9 +8489,9 @@ async def test_update_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8626,9 +8628,9 @@ async def test_delete_index_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8768,9 +8770,9 @@ async def test_upsert_datapoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) @@ -8914,9 +8916,9 @@ async def test_remove_datapoints_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncIndexServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncIndexServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncIndexServiceRestInterceptor() + ), ) client = IndexServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py index 5ee5b03b17..70fcfccb76 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py @@ -66,8 +66,12 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.aiplatform_v1beta1.services.job_service import JobServiceAsyncClient -from google.cloud.aiplatform_v1beta1.services.job_service import JobServiceClient +from google.cloud.aiplatform_v1beta1.services.job_service import ( + JobServiceAsyncClient, +) +from google.cloud.aiplatform_v1beta1.services.job_service import ( + JobServiceClient, +) from google.cloud.aiplatform_v1beta1.services.job_service import pagers from google.cloud.aiplatform_v1beta1.services.job_service import transports from google.cloud.aiplatform_v1beta1.types import accelerator_type @@ -77,7 +81,9 @@ ) from google.cloud.aiplatform_v1beta1.types import completion_stats from google.cloud.aiplatform_v1beta1.types import custom_job -from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job +from google.cloud.aiplatform_v1beta1.types import ( + custom_job as gca_custom_job, +) from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, @@ -95,9 +101,13 @@ from google.cloud.aiplatform_v1beta1.types import job_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources -from google.cloud.aiplatform_v1beta1.types import manual_batch_tuning_parameters +from google.cloud.aiplatform_v1beta1.types import ( + manual_batch_tuning_parameters, +) from google.cloud.aiplatform_v1beta1.types import model -from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job +from google.cloud.aiplatform_v1beta1.types import ( + model_deployment_monitoring_job, +) from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) @@ -1236,9 +1246,9 @@ def test_create_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_custom_job] = ( + mock_rpc + ) request = {} client.create_custom_job(request) @@ -1932,9 +1942,9 @@ def test_list_custom_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_custom_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_custom_jobs] = ( + mock_rpc + ) request = {} client.list_custom_jobs(request) @@ -2452,9 +2462,9 @@ def test_delete_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_custom_job] = ( + mock_rpc + ) request = {} client.delete_custom_job(request) @@ -2795,9 +2805,9 @@ def test_cancel_custom_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_custom_job] = ( + mock_rpc + ) request = {} client.cancel_custom_job(request) @@ -3529,9 +3539,9 @@ def test_get_data_labeling_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_data_labeling_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_labeling_job] = ( + mock_rpc + ) request = {} client.get_data_labeling_job(request) @@ -8963,9 +8973,9 @@ def test_get_nas_trial_detail_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_nas_trial_detail - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_nas_trial_detail] = ( + mock_rpc + ) request = {} client.get_nas_trial_detail(request) @@ -9309,9 +9319,9 @@ def test_list_nas_trial_details_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_nas_trial_details - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_nas_trial_details] = ( + mock_rpc + ) request = {} client.list_nas_trial_details(request) @@ -15144,9 +15154,9 @@ def test_create_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_custom_job] = ( + mock_rpc + ) request = {} client.create_custom_job(request) @@ -15515,9 +15525,9 @@ def test_list_custom_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_custom_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_custom_jobs] = ( + mock_rpc + ) request = {} client.list_custom_jobs(request) @@ -15777,9 +15787,9 @@ def test_delete_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_custom_job] = ( + mock_rpc + ) request = {} client.delete_custom_job(request) @@ -15958,9 +15968,9 @@ def test_cancel_custom_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_custom_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_custom_job] = ( + mock_rpc + ) request = {} client.cancel_custom_job(request) @@ -16333,9 +16343,9 @@ def test_get_data_labeling_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_data_labeling_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_data_labeling_job] = ( + mock_rpc + ) request = {} client.get_data_labeling_job(request) @@ -19162,9 +19172,9 @@ def test_get_nas_trial_detail_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_nas_trial_detail - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_nas_trial_detail] = ( + mock_rpc + ) request = {} client.get_nas_trial_detail(request) @@ -19347,9 +19357,9 @@ def test_list_nas_trial_details_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_nas_trial_details - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_nas_trial_details] = ( + mock_rpc + ) request = {} client.list_nas_trial_details(request) @@ -20885,9 +20895,9 @@ def test_search_model_deployment_monitoring_stats_anomalies_rest_required_fields # verify required fields with default values are now present - jsonified_request[ - "modelDeploymentMonitoringJob" - ] = "model_deployment_monitoring_job_value" + jsonified_request["modelDeploymentMonitoringJob"] = ( + "model_deployment_monitoring_job_value" + ) jsonified_request["deployedModelId"] = "deployed_model_id_value" unset_fields = transport_class( @@ -24592,9 +24602,9 @@ def get_message_fields(field): def test_create_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24725,9 +24735,9 @@ def test_get_custom_job_rest_call_success(request_type): def test_get_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24852,9 +24862,9 @@ def test_list_custom_jobs_rest_call_success(request_type): def test_list_custom_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -24975,9 +24985,9 @@ def test_delete_custom_job_rest_call_success(request_type): def test_delete_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25098,9 +25108,9 @@ def test_cancel_custom_job_rest_call_success(request_type): def test_cancel_custom_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25347,9 +25357,9 @@ def get_message_fields(field): def test_create_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25500,9 +25510,9 @@ def test_get_data_labeling_job_rest_call_success(request_type): def test_get_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25629,9 +25639,9 @@ def test_list_data_labeling_jobs_rest_call_success(request_type): def test_list_data_labeling_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25760,9 +25770,9 @@ def test_delete_data_labeling_job_rest_call_success(request_type): def test_delete_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -25888,9 +25898,9 @@ def test_cancel_data_labeling_job_rest_call_success(request_type): def test_cancel_data_labeling_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26311,9 +26321,9 @@ def get_message_fields(field): def test_create_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26464,9 +26474,9 @@ def test_get_hyperparameter_tuning_job_rest_call_success(request_type): def test_get_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26597,9 +26607,9 @@ def test_list_hyperparameter_tuning_jobs_rest_call_success(request_type): def test_list_hyperparameter_tuning_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26728,9 +26738,9 @@ def test_delete_hyperparameter_tuning_job_rest_call_success(request_type): def test_delete_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -26856,9 +26866,9 @@ def test_cancel_hyperparameter_tuning_job_rest_call_success(request_type): def test_cancel_hyperparameter_tuning_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27191,9 +27201,9 @@ def get_message_fields(field): def test_create_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27326,9 +27336,9 @@ def test_get_nas_job_rest_call_success(request_type): def test_get_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27449,9 +27459,9 @@ def test_list_nas_jobs_rest_call_success(request_type): def test_list_nas_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27568,9 +27578,9 @@ def test_delete_nas_job_rest_call_success(request_type): def test_delete_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27689,9 +27699,9 @@ def test_cancel_nas_job_rest_call_success(request_type): def test_cancel_nas_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27810,9 +27820,9 @@ def test_get_nas_trial_detail_rest_call_success(request_type): def test_get_nas_trial_detail_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -27937,9 +27947,9 @@ def test_list_nas_trial_details_rest_call_success(request_type): def test_list_nas_trial_details_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28392,9 +28402,9 @@ def get_message_fields(field): def test_create_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28547,9 +28557,9 @@ def test_get_batch_prediction_job_rest_call_success(request_type): def test_get_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28680,9 +28690,9 @@ def test_list_batch_prediction_jobs_rest_call_success(request_type): def test_list_batch_prediction_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28811,9 +28821,9 @@ def test_delete_batch_prediction_job_rest_call_success(request_type): def test_delete_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -28939,9 +28949,9 @@ def test_cancel_batch_prediction_job_rest_call_success(request_type): def test_cancel_batch_prediction_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29255,9 +29265,9 @@ def get_message_fields(field): def test_create_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29410,9 +29420,9 @@ def test_search_model_deployment_monitoring_stats_anomalies_rest_interceptors( ): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29580,9 +29590,9 @@ def test_get_model_deployment_monitoring_job_rest_call_success(request_type): def test_get_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -29719,9 +29729,9 @@ def test_list_model_deployment_monitoring_jobs_rest_call_success(request_type): def test_list_model_deployment_monitoring_jobs_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30030,9 +30040,9 @@ def get_message_fields(field): def test_update_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30160,9 +30170,9 @@ def test_delete_model_deployment_monitoring_job_rest_call_success(request_type): def test_delete_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30290,9 +30300,9 @@ def test_pause_model_deployment_monitoring_job_rest_call_success(request_type): def test_pause_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -30404,9 +30414,9 @@ def test_resume_model_deployment_monitoring_job_rest_call_success(request_type): def test_resume_model_deployment_monitoring_job_rest_interceptors(null_interceptor): transport = transports.JobServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.JobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.JobServiceRestInterceptor() + ), ) client = JobServiceClient(transport=transport) @@ -32128,9 +32138,9 @@ async def test_create_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32280,9 +32290,9 @@ async def test_get_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32423,9 +32433,9 @@ async def test_list_custom_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32562,9 +32572,9 @@ async def test_delete_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32702,9 +32712,9 @@ async def test_cancel_custom_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -32967,9 +32977,9 @@ async def test_create_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33136,9 +33146,9 @@ async def test_get_data_labeling_job_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33282,9 +33292,9 @@ async def test_list_data_labeling_jobs_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33429,9 +33439,9 @@ async def test_delete_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -33573,9 +33583,9 @@ async def test_cancel_data_labeling_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34014,9 +34024,9 @@ async def test_create_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34187,9 +34197,9 @@ async def test_get_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34338,9 +34348,9 @@ async def test_list_hyperparameter_tuning_jobs_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34488,9 +34498,9 @@ async def test_delete_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34636,9 +34646,9 @@ async def test_cancel_hyperparameter_tuning_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -34990,9 +35000,9 @@ async def test_create_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35143,9 +35153,9 @@ async def test_get_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35284,9 +35294,9 @@ async def test_list_nas_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35421,9 +35431,9 @@ async def test_delete_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35560,9 +35570,9 @@ async def test_cancel_nas_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35697,9 +35707,9 @@ async def test_get_nas_trial_detail_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -35841,9 +35851,9 @@ async def test_list_nas_trial_details_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36312,9 +36322,9 @@ async def test_create_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36483,9 +36493,9 @@ async def test_get_batch_prediction_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36632,9 +36642,9 @@ async def test_list_batch_prediction_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36779,9 +36789,9 @@ async def test_delete_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -36923,9 +36933,9 @@ async def test_cancel_batch_prediction_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37259,9 +37269,9 @@ async def test_create_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37432,9 +37442,9 @@ async def test_search_model_deployment_monitoring_stats_anomalies_rest_asyncio_i ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37622,9 +37632,9 @@ async def test_get_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -37783,9 +37793,9 @@ async def test_list_model_deployment_monitoring_jobs_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38114,9 +38124,9 @@ async def test_update_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38264,9 +38274,9 @@ async def test_delete_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38414,9 +38424,9 @@ async def test_pause_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) @@ -38548,9 +38558,9 @@ async def test_resume_model_deployment_monitoring_job_rest_asyncio_interceptors( ) transport = transports.AsyncJobServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncJobServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncJobServiceRestInterceptor() + ), ) client = JobServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_llm_utility_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_llm_utility_service.py index 5c3dbbb9bc..96611308bd 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_llm_utility_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_llm_utility_service.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1beta1.services.llm_utility_service import ( LlmUtilityServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.llm_utility_service import transports +from google.cloud.aiplatform_v1beta1.services.llm_utility_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import llm_utility_service from google.cloud.aiplatform_v1beta1.types import tool @@ -1923,9 +1925,9 @@ def test_compute_tokens_rest_call_success(request_type): def test_compute_tokens_rest_interceptors(null_interceptor): transport = transports.LlmUtilityServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LlmUtilityServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.LlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceClient(transport=transport) @@ -2731,9 +2733,11 @@ async def test_compute_tokens_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncLlmUtilityServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncLlmUtilityServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncLlmUtilityServiceRestInterceptor() + ), ) client = LlmUtilityServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_match_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_match_service.py index 888efcaec5..e484f70654 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_match_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_match_service.py @@ -65,7 +65,9 @@ from google.cloud.aiplatform_v1beta1.services.match_service import ( MatchServiceAsyncClient, ) -from google.cloud.aiplatform_v1beta1.services.match_service import MatchServiceClient +from google.cloud.aiplatform_v1beta1.services.match_service import ( + MatchServiceClient, +) from google.cloud.aiplatform_v1beta1.services.match_service import transports from google.cloud.aiplatform_v1beta1.types import index from google.cloud.aiplatform_v1beta1.types import match_service @@ -1434,9 +1436,9 @@ def test_read_index_datapoints_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_index_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_index_datapoints] = ( + mock_rpc + ) request = {} client.read_index_datapoints(request) @@ -1741,9 +1743,9 @@ def test_read_index_datapoints_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_index_datapoints - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_index_datapoints] = ( + mock_rpc + ) request = {} client.read_index_datapoints(request) @@ -2136,9 +2138,9 @@ def test_find_neighbors_rest_call_success(request_type): def test_find_neighbors_rest_interceptors(null_interceptor): transport = transports.MatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MatchServiceRestInterceptor() + ), ) client = MatchServiceClient(transport=transport) @@ -2269,9 +2271,9 @@ def test_read_index_datapoints_rest_call_success(request_type): def test_read_index_datapoints_rest_interceptors(null_interceptor): transport = transports.MatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MatchServiceRestInterceptor() + ), ) client = MatchServiceClient(transport=transport) @@ -3104,9 +3106,9 @@ async def test_find_neighbors_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMatchServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncMatchServiceRestInterceptor() + ), ) client = MatchServiceAsyncClient(transport=transport) @@ -3253,9 +3255,9 @@ async def test_read_index_datapoints_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMatchServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMatchServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncMatchServiceRestInterceptor() + ), ) client = MatchServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_memory_bank_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_memory_bank_service.py index 67bfe50503..c0e95d4476 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_memory_bank_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_memory_bank_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.memory_bank_service import ( MemoryBankServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.memory_bank_service import pagers -from google.cloud.aiplatform_v1beta1.services.memory_bank_service import transports +from google.cloud.aiplatform_v1beta1.services.memory_bank_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.memory_bank_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import memory_bank from google.cloud.aiplatform_v1beta1.types import memory_bank_service @@ -3025,9 +3029,9 @@ def test_generate_memories_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_memories - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_memories] = ( + mock_rpc + ) request = {} client.generate_memories(request) @@ -3373,9 +3377,9 @@ def test_retrieve_memories_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_memories - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_memories] = ( + mock_rpc + ) request = {} client.retrieve_memories(request) @@ -4577,9 +4581,9 @@ def test_generate_memories_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_memories - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_memories] = ( + mock_rpc + ) request = {} client.generate_memories(request) @@ -4759,9 +4763,9 @@ def test_retrieve_memories_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_memories - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_memories] = ( + mock_rpc + ) request = {} client.retrieve_memories(request) @@ -5540,9 +5544,9 @@ def get_message_fields(field): def test_create_memory_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -5677,9 +5681,9 @@ def test_get_memory_rest_call_success(request_type): def test_get_memory_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -5884,9 +5888,9 @@ def get_message_fields(field): def test_update_memory_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -6017,9 +6021,9 @@ def test_list_memories_rest_call_success(request_type): def test_list_memories_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -6147,9 +6151,9 @@ def test_delete_memory_rest_call_success(request_type): def test_delete_memory_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -6274,9 +6278,9 @@ def test_generate_memories_rest_call_success(request_type): def test_generate_memories_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -6410,9 +6414,9 @@ def test_retrieve_memories_rest_call_success(request_type): def test_retrieve_memories_rest_interceptors(null_interceptor): transport = transports.MemoryBankServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemoryBankServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceClient(transport=transport) @@ -7439,9 +7443,11 @@ async def test_create_memory_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -7595,9 +7601,11 @@ async def test_get_memory_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -7819,9 +7827,11 @@ async def test_update_memory_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -7969,9 +7979,11 @@ async def test_list_memories_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -8116,9 +8128,11 @@ async def test_delete_memory_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -8260,9 +8274,11 @@ async def test_generate_memories_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) @@ -8412,9 +8428,11 @@ async def test_retrieve_memories_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMemoryBankServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMemoryBankServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMemoryBankServiceRestInterceptor() + ), ) client = MemoryBankServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py index 09b7765399..b541a42b0d 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py @@ -73,7 +73,9 @@ MetadataServiceClient, ) from google.cloud.aiplatform_v1beta1.services.metadata_service import pagers -from google.cloud.aiplatform_v1beta1.services.metadata_service import transports +from google.cloud.aiplatform_v1beta1.services.metadata_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import artifact from google.cloud.aiplatform_v1beta1.types import artifact as gca_artifact from google.cloud.aiplatform_v1beta1.types import context @@ -84,10 +86,14 @@ from google.cloud.aiplatform_v1beta1.types import execution as gca_execution from google.cloud.aiplatform_v1beta1.types import lineage_subgraph from google.cloud.aiplatform_v1beta1.types import metadata_schema -from google.cloud.aiplatform_v1beta1.types import metadata_schema as gca_metadata_schema +from google.cloud.aiplatform_v1beta1.types import ( + metadata_schema as gca_metadata_schema, +) from google.cloud.aiplatform_v1beta1.types import metadata_service from google.cloud.aiplatform_v1beta1.types import metadata_store -from google.cloud.aiplatform_v1beta1.types import metadata_store as gca_metadata_store +from google.cloud.aiplatform_v1beta1.types import ( + metadata_store as gca_metadata_store, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1267,9 +1273,9 @@ def test_create_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_store] = ( + mock_rpc + ) request = {} client.create_metadata_store(request) @@ -1638,9 +1644,9 @@ def test_get_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_store] = ( + mock_rpc + ) request = {} client.get_metadata_store(request) @@ -1984,9 +1990,9 @@ def test_list_metadata_stores_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_stores] = ( + mock_rpc + ) request = {} client.list_metadata_stores(request) @@ -2526,9 +2532,9 @@ def test_delete_metadata_store_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_metadata_store] = ( + mock_rpc + ) request = {} client.delete_metadata_store(request) @@ -7729,9 +7735,9 @@ def test_add_context_children_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_context_children - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_context_children] = ( + mock_rpc + ) request = {} client.add_context_children(request) @@ -8770,9 +8776,9 @@ def test_create_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_execution] = ( + mock_rpc + ) request = {} client.create_execution(request) @@ -10002,9 +10008,9 @@ def test_update_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_execution] = ( + mock_rpc + ) request = {} client.update_execution(request) @@ -10349,9 +10355,9 @@ def test_delete_execution_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_execution] = ( + mock_rpc + ) request = {} client.delete_execution(request) @@ -10681,9 +10687,9 @@ def test_purge_executions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.purge_executions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_executions] = ( + mock_rpc + ) request = {} client.purge_executions(request) @@ -11017,9 +11023,9 @@ def test_add_execution_events_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_execution_events - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_execution_events] = ( + mock_rpc + ) request = {} client.add_execution_events(request) @@ -11717,9 +11723,9 @@ def test_create_metadata_schema_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_schema] = ( + mock_rpc + ) request = {} client.create_metadata_schema(request) @@ -12101,9 +12107,9 @@ def test_get_metadata_schema_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_schema] = ( + mock_rpc + ) request = {} client.get_metadata_schema(request) @@ -12459,9 +12465,9 @@ def test_list_metadata_schemas_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_schemas - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_schemas] = ( + mock_rpc + ) request = {} client.list_metadata_schemas(request) @@ -13275,9 +13281,9 @@ def test_create_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_store] = ( + mock_rpc + ) request = {} client.create_metadata_store(request) @@ -13471,9 +13477,9 @@ def test_get_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_store] = ( + mock_rpc + ) request = {} client.get_metadata_store(request) @@ -13655,9 +13661,9 @@ def test_list_metadata_stores_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_stores - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_stores] = ( + mock_rpc + ) request = {} client.list_metadata_stores(request) @@ -13916,9 +13922,9 @@ def test_delete_metadata_store_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_metadata_store - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_metadata_store] = ( + mock_rpc + ) request = {} client.delete_metadata_store(request) @@ -16726,9 +16732,9 @@ def test_add_context_children_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_context_children - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_context_children] = ( + mock_rpc + ) request = {} client.add_context_children(request) @@ -17288,9 +17294,9 @@ def test_create_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_execution] = ( + mock_rpc + ) request = {} client.create_execution(request) @@ -17929,9 +17935,9 @@ def test_update_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_execution] = ( + mock_rpc + ) request = {} client.update_execution(request) @@ -18126,9 +18132,9 @@ def test_delete_execution_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_execution - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_execution] = ( + mock_rpc + ) request = {} client.delete_execution(request) @@ -18309,9 +18315,9 @@ def test_purge_executions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.purge_executions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_executions] = ( + mock_rpc + ) request = {} client.purge_executions(request) @@ -18505,9 +18511,9 @@ def test_add_execution_events_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_execution_events - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_execution_events] = ( + mock_rpc + ) request = {} client.add_execution_events(request) @@ -18882,9 +18888,9 @@ def test_create_metadata_schema_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_metadata_schema] = ( + mock_rpc + ) request = {} client.create_metadata_schema(request) @@ -19081,9 +19087,9 @@ def test_get_metadata_schema_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_schema - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_metadata_schema] = ( + mock_rpc + ) request = {} client.get_metadata_schema(request) @@ -19266,9 +19272,9 @@ def test_list_metadata_schemas_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_schemas - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_metadata_schemas] = ( + mock_rpc + ) request = {} client.list_metadata_schemas(request) @@ -21607,9 +21613,9 @@ def get_message_fields(field): def test_create_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21739,9 +21745,9 @@ def test_get_metadata_store_rest_call_success(request_type): def test_get_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21869,9 +21875,9 @@ def test_list_metadata_stores_rest_call_success(request_type): def test_list_metadata_stores_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -21996,9 +22002,9 @@ def test_delete_metadata_store_rest_call_success(request_type): def test_delete_metadata_store_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22225,9 +22231,9 @@ def get_message_fields(field): def test_create_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22370,9 +22376,9 @@ def test_get_artifact_rest_call_success(request_type): def test_get_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22501,9 +22507,9 @@ def test_list_artifacts_rest_call_success(request_type): def test_list_artifacts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22736,9 +22742,9 @@ def get_message_fields(field): def test_update_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22861,9 +22867,9 @@ def test_delete_artifact_rest_call_success(request_type): def test_delete_artifact_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -22988,9 +22994,9 @@ def test_purge_artifacts_rest_call_success(request_type): def test_purge_artifacts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23213,9 +23219,9 @@ def get_message_fields(field): def test_create_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23354,9 +23360,9 @@ def test_get_context_rest_call_success(request_type): def test_get_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23485,9 +23491,9 @@ def test_list_contexts_rest_call_success(request_type): def test_list_contexts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23717,9 +23723,9 @@ def get_message_fields(field): def test_update_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23842,9 +23848,9 @@ def test_delete_context_rest_call_success(request_type): def test_delete_context_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -23969,9 +23975,9 @@ def test_purge_contexts_rest_call_success(request_type): def test_purge_contexts_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24103,9 +24109,9 @@ def test_add_context_artifacts_and_executions_rest_call_success(request_type): def test_add_context_artifacts_and_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24241,9 +24247,9 @@ def test_add_context_children_rest_call_success(request_type): def test_add_context_children_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24375,9 +24381,9 @@ def test_remove_context_children_rest_call_success(request_type): def test_remove_context_children_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24509,9 +24515,9 @@ def test_query_context_lineage_subgraph_rest_call_success(request_type): def test_query_context_lineage_subgraph_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24735,9 +24741,9 @@ def get_message_fields(field): def test_create_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -24878,9 +24884,9 @@ def test_get_execution_rest_call_success(request_type): def test_get_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25009,9 +25015,9 @@ def test_list_executions_rest_call_success(request_type): def test_list_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25241,9 +25247,9 @@ def get_message_fields(field): def test_update_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25366,9 +25372,9 @@ def test_delete_execution_rest_call_success(request_type): def test_delete_execution_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25493,9 +25499,9 @@ def test_purge_executions_rest_call_success(request_type): def test_purge_executions_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25623,9 +25629,9 @@ def test_add_execution_events_rest_call_success(request_type): def test_add_execution_events_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25757,9 +25763,9 @@ def test_query_execution_inputs_and_outputs_rest_call_success(request_type): def test_query_execution_inputs_and_outputs_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -25981,9 +25987,9 @@ def get_message_fields(field): def test_create_metadata_schema_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26126,9 +26132,9 @@ def test_get_metadata_schema_rest_call_success(request_type): def test_get_metadata_schema_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26260,9 +26266,9 @@ def test_list_metadata_schemas_rest_call_success(request_type): def test_list_metadata_schemas_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -26394,9 +26400,9 @@ def test_query_artifact_lineage_subgraph_rest_call_success(request_type): def test_query_artifact_lineage_subgraph_rest_interceptors(null_interceptor): transport = transports.MetadataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MetadataServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MetadataServiceRestInterceptor() + ), ) client = MetadataServiceClient(transport=transport) @@ -27941,9 +27947,11 @@ async def test_create_metadata_store_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28089,9 +28097,11 @@ async def test_get_metadata_store_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28235,9 +28245,11 @@ async def test_list_metadata_stores_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28378,9 +28390,11 @@ async def test_delete_metadata_store_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28623,9 +28637,11 @@ async def test_create_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28785,9 +28801,11 @@ async def test_get_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -28933,9 +28951,11 @@ async def test_list_artifacts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29185,9 +29205,11 @@ async def test_update_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29327,9 +29349,11 @@ async def test_delete_artifact_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29471,9 +29495,11 @@ async def test_purge_artifacts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29713,9 +29739,11 @@ async def test_create_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -29873,9 +29901,11 @@ async def test_get_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30020,9 +30050,11 @@ async def test_list_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30269,9 +30301,11 @@ async def test_update_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30411,9 +30445,11 @@ async def test_delete_context_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30555,9 +30591,11 @@ async def test_purge_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30710,9 +30748,11 @@ async def test_add_context_artifacts_and_executions_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -30864,9 +30904,11 @@ async def test_add_context_children_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31014,9 +31056,11 @@ async def test_remove_context_children_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31166,9 +31210,11 @@ async def test_query_context_lineage_subgraph_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31410,9 +31456,11 @@ async def test_create_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31570,9 +31618,11 @@ async def test_get_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31718,9 +31768,11 @@ async def test_list_executions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -31967,9 +32019,11 @@ async def test_update_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32109,9 +32163,11 @@ async def test_delete_execution_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32253,9 +32309,11 @@ async def test_purge_executions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32400,9 +32458,11 @@ async def test_add_execution_events_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32554,9 +32614,11 @@ async def test_query_execution_inputs_and_outputs_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32794,9 +32856,11 @@ async def test_create_metadata_schema_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -32955,9 +33019,11 @@ async def test_get_metadata_schema_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -33105,9 +33171,11 @@ async def test_list_metadata_schemas_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) @@ -33257,9 +33325,11 @@ async def test_query_artifact_lineage_subgraph_rest_asyncio_interceptors( ) transport = transports.AsyncMetadataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMetadataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMetadataServiceRestInterceptor() + ), ) client = MetadataServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 1069dbb2b0..3c96b72667 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -73,7 +73,9 @@ MigrationServiceClient, ) from google.cloud.aiplatform_v1beta1.services.migration_service import pagers -from google.cloud.aiplatform_v1beta1.services.migration_service import transports +from google.cloud.aiplatform_v1beta1.services.migration_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import migratable_resource from google.cloud.aiplatform_v1beta1.types import migration_service from google.cloud.location import locations_pb2 @@ -2872,9 +2874,9 @@ def test_search_migratable_resources_rest_call_success(request_type): def test_search_migratable_resources_rest_interceptors(null_interceptor): transport = transports.MigrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MigrationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MigrationServiceRestInterceptor() + ), ) client = MigrationServiceClient(transport=transport) @@ -2999,9 +3001,9 @@ def test_batch_migrate_resources_rest_call_success(request_type): def test_batch_migrate_resources_rest_interceptors(null_interceptor): transport = transports.MigrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MigrationServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.MigrationServiceRestInterceptor() + ), ) client = MigrationServiceClient(transport=transport) @@ -3851,9 +3853,11 @@ async def test_search_migratable_resources_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncMigrationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMigrationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMigrationServiceRestInterceptor() + ), ) client = MigrationServiceAsyncClient(transport=transport) @@ -3996,9 +4000,11 @@ async def test_batch_migrate_resources_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncMigrationServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncMigrationServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncMigrationServiceRestInterceptor() + ), ) client = MigrationServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_garden_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_garden_service.py index 9c2fac58ff..270f4bd823 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_garden_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_garden_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.model_garden_service import ( ModelGardenServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.model_garden_service import pagers -from google.cloud.aiplatform_v1beta1.services.model_garden_service import transports +from google.cloud.aiplatform_v1beta1.services.model_garden_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.model_garden_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import env_var from google.cloud.aiplatform_v1beta1.types import io @@ -1299,9 +1303,9 @@ def test_get_publisher_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_publisher_model] = ( + mock_rpc + ) request = {} client.get_publisher_model(request) @@ -1670,9 +1674,9 @@ def test_list_publisher_models_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_publisher_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_publisher_models] = ( + mock_rpc + ) request = {} client.list_publisher_models(request) @@ -2251,9 +2255,9 @@ async def test_deploy_async_use_cached_wrapped_rpc(transport: str = "grpc_asynci # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.deploy - ] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.deploy] = ( + mock_rpc + ) request = {} await client.deploy(request) @@ -2467,9 +2471,9 @@ def test_deploy_publisher_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.deploy_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.deploy_publisher_model] = ( + mock_rpc + ) request = {} client.deploy_publisher_model(request) @@ -2730,9 +2734,9 @@ def test_export_publisher_model_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_publisher_model] = ( + mock_rpc + ) request = {} client.export_publisher_model(request) @@ -3653,9 +3657,9 @@ def test_get_publisher_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_publisher_model] = ( + mock_rpc + ) request = {} client.get_publisher_model(request) @@ -3855,9 +3859,9 @@ def test_list_publisher_models_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_publisher_models - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_publisher_models] = ( + mock_rpc + ) request = {} client.list_publisher_models(request) @@ -4247,9 +4251,9 @@ def test_deploy_publisher_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.deploy_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.deploy_publisher_model] = ( + mock_rpc + ) request = {} client.deploy_publisher_model(request) @@ -4386,9 +4390,9 @@ def test_export_publisher_model_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_publisher_model - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_publisher_model] = ( + mock_rpc + ) request = {} client.export_publisher_model(request) @@ -5494,9 +5498,9 @@ def test_get_publisher_model_rest_call_success(request_type): def test_get_publisher_model_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -5624,9 +5628,9 @@ def test_list_publisher_models_rest_call_success(request_type): def test_list_publisher_models_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -5749,9 +5753,9 @@ def test_deploy_rest_call_success(request_type): def test_deploy_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -5872,9 +5876,9 @@ def test_deploy_publisher_model_rest_call_success(request_type): def test_deploy_publisher_model_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -6002,9 +6006,9 @@ def test_export_publisher_model_rest_call_success(request_type): def test_export_publisher_model_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -6138,9 +6142,9 @@ def test_check_publisher_model_eula_acceptance_rest_call_success(request_type): def test_check_publisher_model_eula_acceptance_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -6279,9 +6283,9 @@ def test_accept_publisher_model_eula_rest_call_success(request_type): def test_accept_publisher_model_eula_rest_interceptors(null_interceptor): transport = transports.ModelGardenServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelGardenServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceClient(transport=transport) @@ -7260,9 +7264,11 @@ async def test_get_publisher_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -7406,9 +7412,11 @@ async def test_list_publisher_models_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -7549,9 +7557,11 @@ async def test_deploy_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -7688,9 +7698,11 @@ async def test_deploy_publisher_model_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -7834,9 +7846,11 @@ async def test_export_publisher_model_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -7990,9 +8004,11 @@ async def test_check_publisher_model_eula_acceptance_rest_asyncio_interceptors( ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) @@ -8147,9 +8163,11 @@ async def test_accept_publisher_model_eula_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncModelGardenServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelGardenServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelGardenServiceRestInterceptor() + ), ) client = ModelGardenServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_monitoring_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_monitoring_service.py index 81f67276f0..a559adbbc1 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_monitoring_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_monitoring_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import ( ModelMonitoringServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import pagers -from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import transports +from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.model_monitoring_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import explanation @@ -82,7 +86,9 @@ from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import model_monitor -from google.cloud.aiplatform_v1beta1.types import model_monitor as gca_model_monitor +from google.cloud.aiplatform_v1beta1.types import ( + model_monitor as gca_model_monitor, +) from google.cloud.aiplatform_v1beta1.types import model_monitoring_alert from google.cloud.aiplatform_v1beta1.types import model_monitoring_job from google.cloud.aiplatform_v1beta1.types import ( @@ -1314,9 +1320,9 @@ def test_create_model_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_model_monitor] = ( + mock_rpc + ) request = {} client.create_model_monitor(request) @@ -1702,9 +1708,9 @@ def test_update_model_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_model_monitor] = ( + mock_rpc + ) request = {} client.update_model_monitor(request) @@ -2101,9 +2107,9 @@ def test_get_model_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_monitor] = ( + mock_rpc + ) request = {} client.get_model_monitor(request) @@ -2453,9 +2459,9 @@ def test_list_model_monitors_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_monitors - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_monitors] = ( + mock_rpc + ) request = {} client.list_model_monitors(request) @@ -2994,9 +3000,9 @@ def test_delete_model_monitor_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_monitor] = ( + mock_rpc + ) request = {} client.delete_model_monitor(request) @@ -6039,9 +6045,9 @@ def test_create_model_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_model_monitor] = ( + mock_rpc + ) request = {} client.create_model_monitor(request) @@ -6245,9 +6251,9 @@ def test_update_model_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_model_monitor] = ( + mock_rpc + ) request = {} client.update_model_monitor(request) @@ -6448,9 +6454,9 @@ def test_get_model_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_monitor] = ( + mock_rpc + ) request = {} client.get_model_monitor(request) @@ -6632,9 +6638,9 @@ def test_list_model_monitors_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_monitors - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_monitors] = ( + mock_rpc + ) request = {} client.list_model_monitors(request) @@ -6901,9 +6907,9 @@ def test_delete_model_monitor_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_monitor - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_monitor] = ( + mock_rpc + ) request = {} client.delete_model_monitor(request) @@ -9412,9 +9418,11 @@ def get_message_fields(field): def test_create_model_monitor_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -9752,9 +9760,11 @@ def get_message_fields(field): def test_update_model_monitor_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -9888,9 +9898,11 @@ def test_get_model_monitor_rest_call_success(request_type): def test_get_model_monitor_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10018,9 +10030,11 @@ def test_list_model_monitors_rest_call_success(request_type): def test_list_model_monitors_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10145,9 +10159,11 @@ def test_delete_model_monitor_rest_call_success(request_type): def test_delete_model_monitor_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10504,9 +10520,11 @@ def get_message_fields(field): def test_create_model_monitoring_job_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10649,9 +10667,11 @@ def test_get_model_monitoring_job_rest_call_success(request_type): def test_get_model_monitoring_job_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10789,9 +10809,11 @@ def test_list_model_monitoring_jobs_rest_call_success(request_type): def test_list_model_monitoring_jobs_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -10922,9 +10944,11 @@ def test_delete_model_monitoring_job_rest_call_success(request_type): def test_delete_model_monitoring_job_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -11060,9 +11084,11 @@ def test_search_model_monitoring_stats_rest_call_success(request_type): def test_search_model_monitoring_stats_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -11207,9 +11233,11 @@ def test_search_model_monitoring_alerts_rest_call_success(request_type): def test_search_model_monitoring_alerts_rest_interceptors(null_interceptor): transport = transports.ModelMonitoringServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceClient(transport=transport) @@ -12466,9 +12494,11 @@ async def test_create_model_monitor_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -12824,9 +12854,11 @@ async def test_update_model_monitor_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -12978,9 +13010,11 @@ async def test_get_model_monitor_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -13124,9 +13158,11 @@ async def test_list_model_monitors_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -13268,9 +13304,11 @@ async def test_delete_model_monitor_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -13645,9 +13683,11 @@ async def test_create_model_monitoring_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -13806,9 +13846,11 @@ async def test_get_model_monitoring_job_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -13963,9 +14005,11 @@ async def test_list_model_monitoring_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -14112,9 +14156,11 @@ async def test_delete_model_monitoring_job_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -14268,9 +14314,11 @@ async def test_search_model_monitoring_stats_rest_asyncio_interceptors( ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) @@ -14433,9 +14481,11 @@ async def test_search_model_monitoring_alerts_rest_asyncio_interceptors( ) transport = transports.AsyncModelMonitoringServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelMonitoringServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncModelMonitoringServiceRestInterceptor() + ), ) client = ModelMonitoringServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py index d06a10ee42..efb5161661 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1beta1.services.model_service import ( ModelServiceAsyncClient, ) -from google.cloud.aiplatform_v1beta1.services.model_service import ModelServiceClient +from google.cloud.aiplatform_v1beta1.services.model_service import ( + ModelServiceClient, +) from google.cloud.aiplatform_v1beta1.services.model_service import pagers from google.cloud.aiplatform_v1beta1.services.model_service import transports from google.cloud.aiplatform_v1beta1.types import deployed_model_ref @@ -2492,9 +2494,9 @@ def test_list_model_versions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_versions] = ( + mock_rpc + ) request = {} client.list_model_versions(request) @@ -4666,9 +4668,9 @@ def test_delete_model_version_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_version] = ( + mock_rpc + ) request = {} client.delete_model_version(request) @@ -5056,9 +5058,9 @@ def test_merge_version_aliases_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.merge_version_aliases - ] = mock_rpc + client._transport._wrapped_methods[client._transport.merge_version_aliases] = ( + mock_rpc + ) request = {} client.merge_version_aliases(request) @@ -7253,9 +7255,9 @@ def test_get_model_evaluation_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_evaluation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_evaluation] = ( + mock_rpc + ) request = {} client.get_model_evaluation(request) @@ -7606,9 +7608,9 @@ def test_list_model_evaluations_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_evaluations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_evaluations] = ( + mock_rpc + ) request = {} client.list_model_evaluations(request) @@ -9848,9 +9850,9 @@ def test_list_model_versions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_versions] = ( + mock_rpc + ) request = {} client.list_model_versions(request) @@ -10932,9 +10934,9 @@ def test_delete_model_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_model_version - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_model_version] = ( + mock_rpc + ) request = {} client.delete_model_version(request) @@ -11114,9 +11116,9 @@ def test_merge_version_aliases_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.merge_version_aliases - ] = mock_rpc + client._transport._wrapped_methods[client._transport.merge_version_aliases] = ( + mock_rpc + ) request = {} client.merge_version_aliases(request) @@ -12307,9 +12309,9 @@ def test_get_model_evaluation_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_model_evaluation - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_model_evaluation] = ( + mock_rpc + ) request = {} client.get_model_evaluation(request) @@ -12492,9 +12494,9 @@ def test_list_model_evaluations_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_model_evaluations - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_model_evaluations] = ( + mock_rpc + ) request = {} client.list_model_evaluations(request) @@ -14583,9 +14585,9 @@ def test_upload_model_rest_call_success(request_type): def test_upload_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14750,9 +14752,9 @@ def test_get_model_rest_call_success(request_type): def test_get_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -14873,9 +14875,9 @@ def test_list_models_rest_call_success(request_type): def test_list_models_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15002,9 +15004,9 @@ def test_list_model_versions_rest_call_success(request_type): def test_list_model_versions_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15136,9 +15138,9 @@ def test_list_model_version_checkpoints_rest_call_success(request_type): def test_list_model_version_checkpoints_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15513,9 +15515,9 @@ def get_message_fields(field): def test_update_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15634,9 +15636,9 @@ def test_update_explanation_dataset_rest_call_success(request_type): def test_update_explanation_dataset_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15756,9 +15758,9 @@ def test_delete_model_rest_call_success(request_type): def test_delete_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -15879,9 +15881,9 @@ def test_delete_model_version_rest_call_success(request_type): def test_delete_model_version_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16049,9 +16051,9 @@ def test_merge_version_aliases_rest_call_success(request_type): def test_merge_version_aliases_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16169,9 +16171,9 @@ def test_export_model_rest_call_success(request_type): def test_export_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16290,9 +16292,9 @@ def test_copy_model_rest_call_success(request_type): def test_copy_model_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16423,9 +16425,9 @@ def test_import_model_evaluation_rest_call_success(request_type): def test_import_model_evaluation_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16564,9 +16566,9 @@ def test_batch_import_model_evaluation_slices_rest_call_success(request_type): def test_batch_import_model_evaluation_slices_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16705,9 +16707,9 @@ def test_batch_import_evaluated_annotations_rest_call_success(request_type): def test_batch_import_evaluated_annotations_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16849,9 +16851,9 @@ def test_get_model_evaluation_rest_call_success(request_type): def test_get_model_evaluation_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -16979,9 +16981,9 @@ def test_list_model_evaluations_rest_call_success(request_type): def test_list_model_evaluations_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -17118,9 +17120,9 @@ def test_get_model_evaluation_slice_rest_call_success(request_type): def test_get_model_evaluation_slice_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -17255,9 +17257,9 @@ def test_list_model_evaluation_slices_rest_call_success(request_type): def test_list_model_evaluation_slices_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -17388,9 +17390,9 @@ def test_recommend_spec_rest_call_success(request_type): def test_recommend_spec_rest_interceptors(null_interceptor): transport = transports.ModelServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ModelServiceRestInterceptor() + ), ) client = ModelServiceClient(transport=transport) @@ -18614,9 +18616,9 @@ async def test_upload_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18799,9 +18801,9 @@ async def test_get_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -18940,9 +18942,9 @@ async def test_list_models_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19085,9 +19087,9 @@ async def test_list_model_versions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19238,9 +19240,9 @@ async def test_list_model_version_checkpoints_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19635,9 +19637,9 @@ async def test_update_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19772,9 +19774,9 @@ async def test_update_explanation_dataset_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -19912,9 +19914,9 @@ async def test_delete_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20051,9 +20053,9 @@ async def test_delete_model_version_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20237,9 +20239,9 @@ async def test_merge_version_aliases_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20375,9 +20377,9 @@ async def test_export_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20514,9 +20516,9 @@ async def test_copy_model_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20663,9 +20665,9 @@ async def test_import_model_evaluation_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20824,9 +20826,9 @@ async def test_batch_import_model_evaluation_slices_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -20985,9 +20987,9 @@ async def test_batch_import_evaluated_annotations_rest_asyncio_interceptors( ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21146,9 +21148,9 @@ async def test_get_model_evaluation_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21292,9 +21294,9 @@ async def test_list_model_evaluations_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21447,9 +21449,9 @@ async def test_get_model_evaluation_slice_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21600,9 +21602,9 @@ async def test_list_model_evaluation_slices_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) @@ -21749,9 +21751,9 @@ async def test_recommend_spec_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncModelServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncModelServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncModelServiceRestInterceptor() + ), ) client = ModelServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_notebook_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_notebook_service.py index e7488e8596..15093005c1 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_notebook_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_notebook_service.py @@ -73,7 +73,9 @@ NotebookServiceClient, ) from google.cloud.aiplatform_v1beta1.services.notebook_service import pagers -from google.cloud.aiplatform_v1beta1.services.notebook_service import transports +from google.cloud.aiplatform_v1beta1.services.notebook_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import env_var @@ -85,12 +87,16 @@ from google.cloud.aiplatform_v1beta1.types import ( notebook_execution_job as gca_notebook_execution_job, ) -from google.cloud.aiplatform_v1beta1.types import notebook_idle_shutdown_config +from google.cloud.aiplatform_v1beta1.types import ( + notebook_idle_shutdown_config, +) from google.cloud.aiplatform_v1beta1.types import notebook_runtime from google.cloud.aiplatform_v1beta1.types import ( notebook_runtime as gca_notebook_runtime, ) -from google.cloud.aiplatform_v1beta1.types import notebook_runtime_template_ref +from google.cloud.aiplatform_v1beta1.types import ( + notebook_runtime_template_ref, +) from google.cloud.aiplatform_v1beta1.types import notebook_service from google.cloud.aiplatform_v1beta1.types import notebook_software_config from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -3736,9 +3742,9 @@ def test_get_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_notebook_runtime] = ( + mock_rpc + ) request = {} client.get_notebook_runtime(request) @@ -4116,9 +4122,9 @@ def test_list_notebook_runtimes_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_notebook_runtimes - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_notebook_runtimes] = ( + mock_rpc + ) request = {} client.list_notebook_runtimes(request) @@ -5352,9 +5358,9 @@ def test_start_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.start_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.start_notebook_runtime] = ( + mock_rpc + ) request = {} client.start_notebook_runtime(request) @@ -5699,9 +5705,9 @@ def test_stop_notebook_runtime_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stop_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stop_notebook_runtime] = ( + mock_rpc + ) request = {} client.stop_notebook_runtime(request) @@ -8900,9 +8906,9 @@ def test_get_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_notebook_runtime] = ( + mock_rpc + ) request = {} client.get_notebook_runtime(request) @@ -9085,9 +9091,9 @@ def test_list_notebook_runtimes_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_notebook_runtimes - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_notebook_runtimes] = ( + mock_rpc + ) request = {} client.list_notebook_runtimes(request) @@ -9723,9 +9729,9 @@ def test_start_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.start_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.start_notebook_runtime] = ( + mock_rpc + ) request = {} client.start_notebook_runtime(request) @@ -9908,9 +9914,9 @@ def test_stop_notebook_runtime_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stop_notebook_runtime - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stop_notebook_runtime] = ( + mock_rpc + ) request = {} client.stop_notebook_runtime(request) @@ -12085,9 +12091,9 @@ def get_message_fields(field): def test_create_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12238,9 +12244,9 @@ def test_get_notebook_runtime_template_rest_call_success(request_type): def test_get_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12373,9 +12379,9 @@ def test_list_notebook_runtime_templates_rest_call_success(request_type): def test_list_notebook_runtime_templates_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12505,9 +12511,9 @@ def test_delete_notebook_runtime_template_rest_call_success(request_type): def test_delete_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12789,9 +12795,9 @@ def get_message_fields(field): def test_update_notebook_runtime_template_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -12918,9 +12924,9 @@ def test_assign_notebook_runtime_rest_call_success(request_type): def test_assign_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13083,9 +13089,9 @@ def test_get_notebook_runtime_rest_call_success(request_type): def test_get_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13213,9 +13219,9 @@ def test_list_notebook_runtimes_rest_call_success(request_type): def test_list_notebook_runtimes_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13344,9 +13350,9 @@ def test_delete_notebook_runtime_rest_call_success(request_type): def test_delete_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13472,9 +13478,9 @@ def test_upgrade_notebook_runtime_rest_call_success(request_type): def test_upgrade_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13600,9 +13606,9 @@ def test_start_notebook_runtime_rest_call_success(request_type): def test_start_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13728,9 +13734,9 @@ def test_stop_notebook_runtime_rest_call_success(request_type): def test_stop_notebook_runtime_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -13980,9 +13986,9 @@ def get_message_fields(field): def test_create_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14125,9 +14131,9 @@ def test_get_notebook_execution_job_rest_call_success(request_type): def test_get_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14260,9 +14266,9 @@ def test_list_notebook_execution_jobs_rest_call_success(request_type): def test_list_notebook_execution_jobs_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -14391,9 +14397,9 @@ def test_delete_notebook_execution_job_rest_call_success(request_type): def test_delete_notebook_execution_job_rest_interceptors(null_interceptor): transport = transports.NotebookServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.NotebookServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.NotebookServiceRestInterceptor() + ), ) client = NotebookServiceClient(transport=transport) @@ -15672,9 +15678,11 @@ async def test_create_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -15843,9 +15851,11 @@ async def test_get_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -15998,9 +16008,11 @@ async def test_list_notebook_runtime_templates_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16149,9 +16161,11 @@ async def test_delete_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16451,9 +16465,11 @@ async def test_update_notebook_runtime_template_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16596,9 +16612,11 @@ async def test_assign_notebook_runtime_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16777,9 +16795,11 @@ async def test_get_notebook_runtime_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -16923,9 +16943,11 @@ async def test_list_notebook_runtimes_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17070,9 +17092,11 @@ async def test_delete_notebook_runtime_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17214,9 +17238,11 @@ async def test_upgrade_notebook_runtime_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17358,9 +17384,11 @@ async def test_start_notebook_runtime_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17502,9 +17530,11 @@ async def test_stop_notebook_runtime_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17772,9 +17802,11 @@ async def test_create_notebook_execution_job_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -17935,9 +17967,11 @@ async def test_get_notebook_execution_job_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -18087,9 +18121,11 @@ async def test_list_notebook_execution_jobs_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) @@ -18238,9 +18274,11 @@ async def test_delete_notebook_execution_job_rest_asyncio_interceptors( ) transport = transports.AsyncNotebookServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncNotebookServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncNotebookServiceRestInterceptor() + ), ) client = NotebookServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_persistent_resource_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_persistent_resource_service.py index 4887220ab7..b553eec53f 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_persistent_resource_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_persistent_resource_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import ( PersistentResourceServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import pagers +from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import ( + pagers, +) from google.cloud.aiplatform_v1beta1.services.persistent_resource_service import ( transports, ) @@ -5487,9 +5489,11 @@ def get_message_fields(field): def test_create_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5635,9 +5639,11 @@ def test_get_persistent_resource_rest_call_success(request_type): def test_get_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5772,9 +5778,11 @@ def test_list_persistent_resources_rest_call_success(request_type): def test_list_persistent_resources_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -5909,9 +5917,11 @@ def test_delete_persistent_resource_rest_call_success(request_type): def test_delete_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -6203,9 +6213,11 @@ def get_message_fields(field): def test_update_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -6333,9 +6345,11 @@ def test_reboot_persistent_resource_rest_call_success(request_type): def test_reboot_persistent_resource_rest_interceptors(null_interceptor): transport = transports.PersistentResourceServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.PersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceClient(transport=transport) @@ -7427,9 +7441,11 @@ async def test_create_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7591,9 +7607,11 @@ async def test_get_persistent_resource_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7744,9 +7762,11 @@ async def test_list_persistent_resources_rest_asyncio_interceptors(null_intercep ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -7897,9 +7917,11 @@ async def test_delete_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -8207,9 +8229,11 @@ async def test_update_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) @@ -8353,9 +8377,11 @@ async def test_reboot_persistent_resource_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPersistentResourceServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPersistentResourceServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPersistentResourceServiceRestInterceptor() + ), ) client = PersistentResourceServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py index 4f85883cac..f6fe3570a7 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py @@ -73,7 +73,9 @@ PipelineServiceClient, ) from google.cloud.aiplatform_v1beta1.services.pipeline_service import pagers -from google.cloud.aiplatform_v1beta1.services.pipeline_service import transports +from google.cloud.aiplatform_v1beta1.services.pipeline_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import artifact from google.cloud.aiplatform_v1beta1.types import context from google.cloud.aiplatform_v1beta1.types import deployed_model_ref @@ -87,7 +89,9 @@ from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import pipeline_failure_policy from google.cloud.aiplatform_v1beta1.types import pipeline_job -from google.cloud.aiplatform_v1beta1.types import pipeline_job as gca_pipeline_job +from google.cloud.aiplatform_v1beta1.types import ( + pipeline_job as gca_pipeline_job, +) from google.cloud.aiplatform_v1beta1.types import pipeline_service from google.cloud.aiplatform_v1beta1.types import pipeline_state from google.cloud.aiplatform_v1beta1.types import service_networking @@ -1662,9 +1666,9 @@ def test_get_training_pipeline_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_training_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_training_pipeline] = ( + mock_rpc + ) request = {} client.get_training_pipeline(request) @@ -3265,9 +3269,9 @@ def test_create_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_pipeline_job] = ( + mock_rpc + ) request = {} client.create_pipeline_job(request) @@ -3665,9 +3669,9 @@ def test_get_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_pipeline_job] = ( + mock_rpc + ) request = {} client.get_pipeline_job(request) @@ -4024,9 +4028,9 @@ def test_list_pipeline_jobs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_pipeline_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_pipeline_jobs] = ( + mock_rpc + ) request = {} client.list_pipeline_jobs(request) @@ -4565,9 +4569,9 @@ def test_delete_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_pipeline_job] = ( + mock_rpc + ) request = {} client.delete_pipeline_job(request) @@ -5268,9 +5272,9 @@ def test_cancel_pipeline_job_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_pipeline_job] = ( + mock_rpc + ) request = {} client.cancel_pipeline_job(request) @@ -6085,9 +6089,9 @@ def test_get_training_pipeline_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_training_pipeline - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_training_pipeline] = ( + mock_rpc + ) request = {} client.get_training_pipeline(request) @@ -6901,9 +6905,9 @@ def test_create_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_pipeline_job] = ( + mock_rpc + ) request = {} client.create_pipeline_job(request) @@ -7096,9 +7100,9 @@ def test_get_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_pipeline_job] = ( + mock_rpc + ) request = {} client.get_pipeline_job(request) @@ -7280,9 +7284,9 @@ def test_list_pipeline_jobs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_pipeline_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_pipeline_jobs] = ( + mock_rpc + ) request = {} client.list_pipeline_jobs(request) @@ -7546,9 +7550,9 @@ def test_delete_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_pipeline_job] = ( + mock_rpc + ) request = {} client.delete_pipeline_job(request) @@ -7926,9 +7930,9 @@ def test_cancel_pipeline_job_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.cancel_pipeline_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_pipeline_job] = ( + mock_rpc + ) request = {} client.cancel_pipeline_job(request) @@ -9388,9 +9392,9 @@ def get_message_fields(field): def test_create_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9535,9 +9539,9 @@ def test_get_training_pipeline_rest_call_success(request_type): def test_get_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9665,9 +9669,9 @@ def test_list_training_pipelines_rest_call_success(request_type): def test_list_training_pipelines_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9796,9 +9800,9 @@ def test_delete_training_pipeline_rest_call_success(request_type): def test_delete_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -9924,9 +9928,9 @@ def test_cancel_training_pipeline_rest_call_success(request_type): def test_cancel_training_pipeline_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10261,9 +10265,9 @@ def get_message_fields(field): def test_create_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10413,9 +10417,9 @@ def test_get_pipeline_job_rest_call_success(request_type): def test_get_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10540,9 +10544,9 @@ def test_list_pipeline_jobs_rest_call_success(request_type): def test_list_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10667,9 +10671,9 @@ def test_delete_pipeline_job_rest_call_success(request_type): def test_delete_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10791,9 +10795,9 @@ def test_batch_delete_pipeline_jobs_rest_call_success(request_type): def test_batch_delete_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -10915,9 +10919,9 @@ def test_cancel_pipeline_job_rest_call_success(request_type): def test_cancel_pipeline_job_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -11024,9 +11028,9 @@ def test_batch_cancel_pipeline_jobs_rest_call_success(request_type): def test_batch_cancel_pipeline_jobs_rest_interceptors(null_interceptor): transport = transports.PipelineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PipelineServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PipelineServiceRestInterceptor() + ), ) client = PipelineServiceClient(transport=transport) @@ -12368,9 +12372,11 @@ async def test_create_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12531,9 +12537,11 @@ async def test_get_training_pipeline_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12677,9 +12685,11 @@ async def test_list_training_pipelines_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12824,9 +12834,11 @@ async def test_delete_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -12968,9 +12980,11 @@ async def test_cancel_training_pipeline_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13321,9 +13335,11 @@ async def test_create_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13489,9 +13505,11 @@ async def test_get_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13633,9 +13651,11 @@ async def test_list_pipeline_jobs_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13776,9 +13796,11 @@ async def test_delete_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -13916,9 +13938,11 @@ async def test_batch_delete_pipeline_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -14057,9 +14081,11 @@ async def test_cancel_pipeline_job_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) @@ -14182,9 +14208,11 @@ async def test_batch_cancel_pipeline_jobs_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncPipelineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPipelineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPipelineServiceRestInterceptor() + ), ) client = PipelineServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py index f066ba5232..0699520b90 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py @@ -69,7 +69,9 @@ from google.cloud.aiplatform_v1beta1.services.prediction_service import ( PredictionServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.prediction_service import transports +from google.cloud.aiplatform_v1beta1.services.prediction_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import io @@ -1892,9 +1894,9 @@ def test_stream_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_raw_predict] = ( + mock_rpc + ) request = {} client.stream_raw_predict(request) @@ -2476,9 +2478,9 @@ def test_direct_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.direct_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.direct_raw_predict] = ( + mock_rpc + ) request = {} client.direct_raw_predict(request) @@ -2701,9 +2703,9 @@ def test_stream_direct_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_direct_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_direct_predict] = ( + mock_rpc + ) request = [{}] client.stream_direct_predict(request) @@ -3016,9 +3018,9 @@ def test_streaming_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.streaming_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.streaming_predict] = ( + mock_rpc + ) request = [{}] client.streaming_predict(request) @@ -3430,9 +3432,9 @@ def test_streaming_raw_predict_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.streaming_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.streaming_raw_predict] = ( + mock_rpc + ) request = [{}] client.streaming_raw_predict(request) @@ -4233,9 +4235,9 @@ def test_generate_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_content] = ( + mock_rpc + ) request = {} client.generate_content(request) @@ -4920,9 +4922,9 @@ def test_chat_completions_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.chat_completions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.chat_completions] = ( + mock_rpc + ) request = {} client.chat_completions(request) @@ -5558,9 +5560,9 @@ def test_stream_raw_predict_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.stream_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.stream_raw_predict] = ( + mock_rpc + ) request = {} client.stream_raw_predict(request) @@ -5868,9 +5870,9 @@ def test_direct_raw_predict_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.direct_raw_predict - ] = mock_rpc + client._transport._wrapped_methods[client._transport.direct_raw_predict] = ( + mock_rpc + ) request = {} client.direct_raw_predict(request) @@ -6540,9 +6542,9 @@ def test_generate_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.generate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.generate_content] = ( + mock_rpc + ) request = {} client.generate_content(request) @@ -6935,9 +6937,9 @@ def test_chat_completions_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.chat_completions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.chat_completions] = ( + mock_rpc + ) request = {} client.chat_completions(request) @@ -7893,9 +7895,9 @@ def test_predict_rest_call_success(request_type): def test_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8021,9 +8023,9 @@ def test_raw_predict_rest_call_success(request_type): def test_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8151,9 +8153,9 @@ def test_stream_raw_predict_rest_call_success(request_type): def test_stream_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8276,9 +8278,9 @@ def test_direct_predict_rest_call_success(request_type): def test_direct_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8408,9 +8410,9 @@ def test_direct_raw_predict_rest_call_success(request_type): def test_direct_raw_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8581,9 +8583,9 @@ def test_server_streaming_predict_rest_call_success(request_type): def test_server_streaming_predict_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8725,9 +8727,9 @@ def test_explain_rest_call_success(request_type): def test_explain_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8856,9 +8858,9 @@ def test_count_tokens_rest_call_success(request_type): def test_count_tokens_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -8990,9 +8992,9 @@ def test_generate_content_rest_call_success(request_type): def test_generate_content_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -9129,9 +9131,9 @@ def test_stream_generate_content_rest_call_success(request_type): def test_stream_generate_content_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -9342,9 +9344,9 @@ def get_message_fields(field): def test_chat_completions_rest_interceptors(null_interceptor): transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.PredictionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.PredictionServiceRestInterceptor() + ), ) client = PredictionServiceClient(transport=transport) @@ -10363,9 +10365,11 @@ async def test_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -10507,9 +10511,11 @@ async def test_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -10652,9 +10658,11 @@ async def test_stream_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -10793,9 +10801,11 @@ async def test_direct_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -10942,9 +10952,11 @@ async def test_direct_raw_predict_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11144,9 +11156,11 @@ async def test_server_streaming_predict_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11312,9 +11326,11 @@ async def test_explain_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11459,9 +11475,11 @@ async def test_count_tokens_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11610,9 +11628,11 @@ async def test_generate_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11763,9 +11783,11 @@ async def test_stream_generate_content_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) @@ -11990,9 +12012,11 @@ async def test_chat_completions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncPredictionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncPredictionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncPredictionServiceRestInterceptor() + ), ) client = PredictionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_execution_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_execution_service.py index 48f82d1086..8221205f47 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_execution_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_execution_service.py @@ -72,7 +72,9 @@ from google.cloud.aiplatform_v1beta1.services.reasoning_engine_execution_service import ( transports, ) -from google.cloud.aiplatform_v1beta1.types import reasoning_engine_execution_service +from google.cloud.aiplatform_v1beta1.types import ( + reasoning_engine_execution_service, +) from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -1327,9 +1329,9 @@ def test_query_reasoning_engine_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_reasoning_engine] = ( + mock_rpc + ) request = {} client.query_reasoning_engine(request) @@ -1772,9 +1774,9 @@ def test_query_reasoning_engine_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.query_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_reasoning_engine] = ( + mock_rpc + ) request = {} client.query_reasoning_engine(request) @@ -2315,9 +2317,11 @@ def test_query_reasoning_engine_rest_call_success(request_type): def test_query_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceClient(transport=transport) @@ -2461,9 +2465,11 @@ def test_stream_query_reasoning_engine_rest_call_success(request_type): def test_stream_query_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineExecutionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceClient(transport=transport) @@ -3305,9 +3311,11 @@ async def test_query_reasoning_engine_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncReasoningEngineExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceAsyncClient(transport=transport) @@ -3467,9 +3475,11 @@ async def test_stream_query_reasoning_engine_rest_asyncio_interceptors( ) transport = transports.AsyncReasoningEngineExecutionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineExecutionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineExecutionServiceRestInterceptor() + ), ) client = ReasoningEngineExecutionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_service.py index 7906d316f4..2f1bb53387 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_reasoning_engine_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import ( ReasoningEngineServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import pagers -from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import transports +from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.reasoning_engine_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import env_var from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -1665,9 +1669,9 @@ def test_get_reasoning_engine_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_reasoning_engine] = ( + mock_rpc + ) request = {} client.get_reasoning_engine(request) @@ -2018,9 +2022,9 @@ def test_list_reasoning_engines_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_reasoning_engines - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_reasoning_engines] = ( + mock_rpc + ) request = {} client.list_reasoning_engines(request) @@ -3387,9 +3391,9 @@ def test_get_reasoning_engine_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_reasoning_engine - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_reasoning_engine] = ( + mock_rpc + ) request = {} client.get_reasoning_engine(request) @@ -3572,9 +3576,9 @@ def test_list_reasoning_engines_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_reasoning_engines - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_reasoning_engines] = ( + mock_rpc + ) request = {} client.list_reasoning_engines(request) @@ -4766,9 +4770,11 @@ def get_message_fields(field): def test_create_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -4906,9 +4912,11 @@ def test_get_reasoning_engine_rest_call_success(request_type): def test_get_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5038,9 +5046,11 @@ def test_list_reasoning_engines_rest_call_success(request_type): def test_list_reasoning_engines_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5304,9 +5314,11 @@ def get_message_fields(field): def test_update_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -5432,9 +5444,11 @@ def test_delete_reasoning_engine_rest_call_success(request_type): def test_delete_reasoning_engine_rest_interceptors(null_interceptor): transport = transports.ReasoningEngineServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.ReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceClient(transport=transport) @@ -6473,9 +6487,11 @@ async def test_create_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -6631,9 +6647,11 @@ async def test_get_reasoning_engine_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -6781,9 +6799,11 @@ async def test_list_reasoning_engines_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -7065,9 +7085,11 @@ async def test_update_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) @@ -7211,9 +7233,11 @@ async def test_delete_reasoning_engine_rest_asyncio_interceptors(null_intercepto ) transport = transports.AsyncReasoningEngineServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncReasoningEngineServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncReasoningEngineServiceRestInterceptor() + ), ) client = ReasoningEngineServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_schedule_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_schedule_service.py index d07b5c53b8..f9508aa436 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_schedule_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_schedule_service.py @@ -73,7 +73,9 @@ ScheduleServiceClient, ) from google.cloud.aiplatform_v1beta1.services.schedule_service import pagers -from google.cloud.aiplatform_v1beta1.services.schedule_service import transports +from google.cloud.aiplatform_v1beta1.services.schedule_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import accelerator_type from google.cloud.aiplatform_v1beta1.types import artifact from google.cloud.aiplatform_v1beta1.types import context @@ -6087,9 +6089,9 @@ def get_message_fields(field): def test_create_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6208,9 +6210,9 @@ def test_delete_schedule_rest_call_success(request_type): def test_delete_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6352,9 +6354,9 @@ def test_get_schedule_rest_call_success(request_type): def test_get_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6479,9 +6481,9 @@ def test_list_schedules_rest_call_success(request_type): def test_list_schedules_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6605,9 +6607,9 @@ def test_pause_schedule_rest_call_success(request_type): def test_pause_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -6714,9 +6716,9 @@ def test_resume_schedule_rest_call_success(request_type): def test_resume_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -7283,9 +7285,9 @@ def get_message_fields(field): def test_update_schedule_rest_interceptors(null_interceptor): transport = transports.ScheduleServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ScheduleServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.ScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceClient(transport=transport) @@ -8676,9 +8678,11 @@ async def test_create_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8814,9 +8818,11 @@ async def test_delete_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -8975,9 +8981,11 @@ async def test_get_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -9119,9 +9127,11 @@ async def test_list_schedules_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -9262,9 +9272,11 @@ async def test_pause_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -9387,9 +9399,11 @@ async def test_resume_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) @@ -9972,9 +9986,11 @@ async def test_update_schedule_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncScheduleServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncScheduleServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncScheduleServiceRestInterceptor() + ), ) client = ScheduleServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_session_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_session_service.py index 15ef958cda..b9b8725211 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_session_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_session_service.py @@ -73,7 +73,9 @@ SessionServiceClient, ) from google.cloud.aiplatform_v1beta1.services.session_service import pagers -from google.cloud.aiplatform_v1beta1.services.session_service import transports +from google.cloud.aiplatform_v1beta1.services.session_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import session @@ -5917,9 +5919,9 @@ def get_message_fields(field): def test_create_session_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6052,9 +6054,9 @@ def test_get_session_rest_call_success(request_type): def test_get_session_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6183,9 +6185,9 @@ def test_list_sessions_rest_call_success(request_type): def test_list_sessions_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6404,9 +6406,9 @@ def get_message_fields(field): def test_update_session_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6529,9 +6531,9 @@ def test_delete_session_rest_call_success(request_type): def test_delete_session_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6660,9 +6662,9 @@ def test_list_events_rest_call_success(request_type): def test_list_events_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -6966,9 +6968,9 @@ def get_message_fields(field): def test_append_event_rest_interceptors(null_interceptor): transport = transports.SessionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SessionServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.SessionServiceRestInterceptor() + ), ) client = SessionServiceClient(transport=transport) @@ -7989,9 +7991,11 @@ async def test_create_session_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -8143,9 +8147,11 @@ async def test_get_session_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -8290,9 +8296,11 @@ async def test_list_sessions_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -8528,9 +8536,11 @@ async def test_update_session_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -8670,9 +8680,11 @@ async def test_delete_session_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -8820,9 +8832,11 @@ async def test_list_events_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) @@ -9144,9 +9158,11 @@ async def test_append_event_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSessionServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSessionServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSessionServiceRestInterceptor() + ), ) client = SessionServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py index c1fca28224..4a8e99c711 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py @@ -72,11 +72,17 @@ from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import ( SpecialistPoolServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import pagers -from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import transports +from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import specialist_pool -from google.cloud.aiplatform_v1beta1.types import specialist_pool as gca_specialist_pool +from google.cloud.aiplatform_v1beta1.types import ( + specialist_pool as gca_specialist_pool, +) from google.cloud.aiplatform_v1beta1.types import specialist_pool_service from google.cloud.location import locations_pb2 from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1290,9 +1296,9 @@ def test_create_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_specialist_pool] = ( + mock_rpc + ) request = {} client.create_specialist_pool(request) @@ -1659,9 +1665,9 @@ def test_get_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_specialist_pool] = ( + mock_rpc + ) request = {} client.get_specialist_pool(request) @@ -2014,9 +2020,9 @@ def test_list_specialist_pools_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_specialist_pools - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_specialist_pools] = ( + mock_rpc + ) request = {} client.list_specialist_pools(request) @@ -2556,9 +2562,9 @@ def test_delete_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_specialist_pool] = ( + mock_rpc + ) request = {} client.delete_specialist_pool(request) @@ -2899,9 +2905,9 @@ def test_update_specialist_pool_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_specialist_pool] = ( + mock_rpc + ) request = {} client.update_specialist_pool(request) @@ -3191,9 +3197,9 @@ def test_create_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_specialist_pool] = ( + mock_rpc + ) request = {} client.create_specialist_pool(request) @@ -3383,9 +3389,9 @@ def test_get_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_specialist_pool] = ( + mock_rpc + ) request = {} client.get_specialist_pool(request) @@ -3568,9 +3574,9 @@ def test_list_specialist_pools_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_specialist_pools - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_specialist_pools] = ( + mock_rpc + ) request = {} client.list_specialist_pools(request) @@ -3836,9 +3842,9 @@ def test_delete_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_specialist_pool] = ( + mock_rpc + ) request = {} client.delete_specialist_pool(request) @@ -4022,9 +4028,9 @@ def test_update_specialist_pool_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_specialist_pool - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_specialist_pool] = ( + mock_rpc + ) request = {} client.update_specialist_pool(request) @@ -4727,9 +4733,11 @@ def get_message_fields(field): def test_create_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -4871,9 +4879,11 @@ def test_get_specialist_pool_rest_call_success(request_type): def test_get_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5003,9 +5013,11 @@ def test_list_specialist_pools_rest_call_success(request_type): def test_list_specialist_pools_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5134,9 +5146,11 @@ def test_delete_specialist_pool_rest_call_success(request_type): def test_delete_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -5352,9 +5366,11 @@ def get_message_fields(field): def test_update_specialist_pool_rest_interceptors(null_interceptor): transport = transports.SpecialistPoolServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.SpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceClient(transport=transport) @@ -6348,9 +6364,11 @@ async def test_create_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6510,9 +6528,11 @@ async def test_get_specialist_pool_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6658,9 +6678,11 @@ async def test_list_specialist_pools_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -6807,9 +6829,11 @@ async def test_delete_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) @@ -7043,9 +7067,11 @@ async def test_update_specialist_pool_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncSpecialistPoolServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncSpecialistPoolServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncSpecialistPoolServiceRestInterceptor() + ), ) client = SpecialistPoolServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_tensorboard_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_tensorboard_service.py index 8128674bc8..86ee2b8553 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_tensorboard_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_tensorboard_service.py @@ -72,19 +72,27 @@ from google.cloud.aiplatform_v1beta1.services.tensorboard_service import ( TensorboardServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.tensorboard_service import pagers -from google.cloud.aiplatform_v1beta1.services.tensorboard_service import transports +from google.cloud.aiplatform_v1beta1.services.tensorboard_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.tensorboard_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import tensorboard -from google.cloud.aiplatform_v1beta1.types import tensorboard as gca_tensorboard +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard as gca_tensorboard, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_data from google.cloud.aiplatform_v1beta1.types import tensorboard_experiment from google.cloud.aiplatform_v1beta1.types import ( tensorboard_experiment as gca_tensorboard_experiment, ) from google.cloud.aiplatform_v1beta1.types import tensorboard_run -from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run +from google.cloud.aiplatform_v1beta1.types import ( + tensorboard_run as gca_tensorboard_run, +) from google.cloud.aiplatform_v1beta1.types import tensorboard_service from google.cloud.aiplatform_v1beta1.types import tensorboard_time_series from google.cloud.aiplatform_v1beta1.types import ( @@ -1282,9 +1290,9 @@ def test_create_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard] = ( + mock_rpc + ) request = {} client.create_tensorboard(request) @@ -1990,9 +1998,9 @@ def test_update_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard] = ( + mock_rpc + ) request = {} client.update_tensorboard(request) @@ -2353,9 +2361,9 @@ def test_list_tensorboards_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboards - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboards] = ( + mock_rpc + ) request = {} client.list_tensorboards(request) @@ -2894,9 +2902,9 @@ def test_delete_tensorboard_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard] = ( + mock_rpc + ) request = {} client.delete_tensorboard(request) @@ -3241,9 +3249,9 @@ def test_read_tensorboard_usage_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_usage - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_usage] = ( + mock_rpc + ) request = {} client.read_tensorboard_usage(request) @@ -3581,9 +3589,9 @@ def test_read_tensorboard_size_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_size - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_size] = ( + mock_rpc + ) request = {} client.read_tensorboard_size(request) @@ -5958,9 +5966,9 @@ def test_create_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard_run] = ( + mock_rpc + ) request = {} client.create_tensorboard_run(request) @@ -6691,9 +6699,9 @@ def test_get_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_tensorboard_run] = ( + mock_rpc + ) request = {} client.get_tensorboard_run(request) @@ -7042,9 +7050,9 @@ def test_update_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard_run] = ( + mock_rpc + ) request = {} client.update_tensorboard_run(request) @@ -7407,9 +7415,9 @@ def test_list_tensorboard_runs_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboard_runs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboard_runs] = ( + mock_rpc + ) request = {} client.list_tensorboard_runs(request) @@ -7949,9 +7957,9 @@ def test_delete_tensorboard_run_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard_run] = ( + mock_rpc + ) request = {} client.delete_tensorboard_run(request) @@ -13020,9 +13028,9 @@ def test_create_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard] = ( + mock_rpc + ) request = {} client.create_tensorboard(request) @@ -13392,9 +13400,9 @@ def test_update_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard] = ( + mock_rpc + ) request = {} client.update_tensorboard(request) @@ -13583,9 +13591,9 @@ def test_list_tensorboards_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboards - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboards] = ( + mock_rpc + ) request = {} client.list_tensorboards(request) @@ -13849,9 +13857,9 @@ def test_delete_tensorboard_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard] = ( + mock_rpc + ) request = {} client.delete_tensorboard(request) @@ -14033,9 +14041,9 @@ def test_read_tensorboard_usage_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_usage - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_usage] = ( + mock_rpc + ) request = {} client.read_tensorboard_usage(request) @@ -14220,9 +14228,9 @@ def test_read_tensorboard_size_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.read_tensorboard_size - ] = mock_rpc + client._transport._wrapped_methods[client._transport.read_tensorboard_size] = ( + mock_rpc + ) request = {} client.read_tensorboard_size(request) @@ -15486,9 +15494,9 @@ def test_create_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_tensorboard_run] = ( + mock_rpc + ) request = {} client.create_tensorboard_run(request) @@ -15905,9 +15913,9 @@ def test_get_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_tensorboard_run] = ( + mock_rpc + ) request = {} client.get_tensorboard_run(request) @@ -16090,9 +16098,9 @@ def test_update_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_tensorboard_run] = ( + mock_rpc + ) request = {} client.update_tensorboard_run(request) @@ -16285,9 +16293,9 @@ def test_list_tensorboard_runs_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_tensorboard_runs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tensorboard_runs] = ( + mock_rpc + ) request = {} client.list_tensorboard_runs(request) @@ -16558,9 +16566,9 @@ def test_delete_tensorboard_run_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_tensorboard_run - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_tensorboard_run] = ( + mock_rpc + ) request = {} client.delete_tensorboard_run(request) @@ -21168,9 +21176,9 @@ def get_message_fields(field): def test_create_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21314,9 +21322,9 @@ def test_get_tensorboard_rest_call_success(request_type): def test_get_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21526,9 +21534,9 @@ def get_message_fields(field): def test_update_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21656,9 +21664,9 @@ def test_list_tensorboards_rest_call_success(request_type): def test_list_tensorboards_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21783,9 +21791,9 @@ def test_delete_tensorboard_rest_call_success(request_type): def test_delete_tensorboard_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -21914,9 +21922,9 @@ def test_read_tensorboard_usage_rest_call_success(request_type): def test_read_tensorboard_usage_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22051,9 +22059,9 @@ def test_read_tensorboard_size_rest_call_success(request_type): def test_read_tensorboard_size_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22273,9 +22281,9 @@ def get_message_fields(field): def test_create_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22420,9 +22428,9 @@ def test_get_tensorboard_experiment_rest_call_success(request_type): def test_get_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22650,9 +22658,9 @@ def get_message_fields(field): def test_update_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22787,9 +22795,9 @@ def test_list_tensorboard_experiments_rest_call_success(request_type): def test_list_tensorboard_experiments_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -22919,9 +22927,9 @@ def test_delete_tensorboard_experiment_rest_call_success(request_type): def test_delete_tensorboard_experiment_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23139,9 +23147,9 @@ def get_message_fields(field): def test_create_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23272,9 +23280,9 @@ def test_batch_create_tensorboard_runs_rest_call_success(request_type): def test_batch_create_tensorboard_runs_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23417,9 +23425,9 @@ def test_get_tensorboard_run_rest_call_success(request_type): def test_get_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23639,9 +23647,9 @@ def get_message_fields(field): def test_update_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23773,9 +23781,9 @@ def test_list_tensorboard_runs_rest_call_success(request_type): def test_list_tensorboard_runs_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -23904,9 +23912,9 @@ def test_delete_tensorboard_run_rest_call_success(request_type): def test_delete_tensorboard_run_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24039,9 +24047,9 @@ def test_batch_create_tensorboard_time_series_rest_call_success(request_type): def test_batch_create_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24286,9 +24294,9 @@ def get_message_fields(field): def test_create_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24440,9 +24448,9 @@ def test_get_tensorboard_time_series_rest_call_success(request_type): def test_get_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24685,9 +24693,9 @@ def get_message_fields(field): def test_update_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24826,9 +24834,9 @@ def test_list_tensorboard_time_series_rest_call_success(request_type): def test_list_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -24958,9 +24966,9 @@ def test_delete_tensorboard_time_series_rest_call_success(request_type): def test_delete_tensorboard_time_series_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25097,9 +25105,9 @@ def test_batch_read_tensorboard_time_series_data_rest_call_success(request_type) def test_batch_read_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25241,9 +25249,9 @@ def test_read_tensorboard_time_series_data_rest_call_success(request_type): def test_read_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25385,9 +25393,9 @@ def test_read_tensorboard_blob_data_rest_call_success(request_type): def test_read_tensorboard_blob_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25523,9 +25531,9 @@ def test_write_tensorboard_experiment_data_rest_call_success(request_type): def test_write_tensorboard_experiment_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25663,9 +25671,9 @@ def test_write_tensorboard_run_data_rest_call_success(request_type): def test_write_tensorboard_run_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -25802,9 +25810,9 @@ def test_export_tensorboard_time_series_data_rest_call_success(request_type): def test_export_tensorboard_time_series_data_rest_interceptors(null_interceptor): transport = transports.TensorboardServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TensorboardServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.TensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceClient(transport=transport) @@ -27351,9 +27359,11 @@ async def test_create_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27513,9 +27523,11 @@ async def test_get_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27741,9 +27753,11 @@ async def test_update_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -27887,9 +27901,11 @@ async def test_list_tensorboards_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28030,9 +28046,11 @@ async def test_delete_tensorboard_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28177,9 +28195,11 @@ async def test_read_tensorboard_usage_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28330,9 +28350,11 @@ async def test_read_tensorboard_size_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28570,9 +28592,11 @@ async def test_create_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28733,9 +28757,11 @@ async def test_get_tensorboard_experiment_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -28983,9 +29009,11 @@ async def test_update_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29136,9 +29164,11 @@ async def test_list_tensorboard_experiments_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29287,9 +29317,11 @@ async def test_delete_tensorboard_experiment_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29523,9 +29555,11 @@ async def test_create_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29674,9 +29708,11 @@ async def test_batch_create_tensorboard_runs_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -29835,9 +29871,11 @@ async def test_get_tensorboard_run_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30073,9 +30111,11 @@ async def test_update_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30223,9 +30263,11 @@ async def test_list_tensorboard_runs_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30370,9 +30412,11 @@ async def test_delete_tensorboard_run_rest_asyncio_interceptors(null_interceptor ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30525,9 +30569,11 @@ async def test_batch_create_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30790,9 +30836,11 @@ async def test_create_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -30960,9 +31008,11 @@ async def test_get_tensorboard_time_series_rest_asyncio_interceptors(null_interc ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31225,9 +31275,11 @@ async def test_update_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31382,9 +31434,11 @@ async def test_list_tensorboard_time_series_rest_asyncio_interceptors(null_inter ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31533,9 +31587,11 @@ async def test_delete_tensorboard_time_series_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31692,9 +31748,11 @@ async def test_batch_read_tensorboard_time_series_data_rest_asyncio_interceptors ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -31856,9 +31914,11 @@ async def test_read_tensorboard_time_series_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32014,9 +32074,11 @@ async def test_read_tensorboard_blob_data_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32174,9 +32236,11 @@ async def test_write_tensorboard_experiment_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32330,9 +32394,11 @@ async def test_write_tensorboard_run_data_rest_asyncio_interceptors(null_interce ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) @@ -32491,9 +32557,11 @@ async def test_export_tensorboard_time_series_data_rest_asyncio_interceptors( ) transport = transports.AsyncTensorboardServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncTensorboardServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncTensorboardServiceRestInterceptor() + ), ) client = TensorboardServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_data_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_data_service.py index 5e0521bed4..2a9db4ea70 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_data_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_data_service.py @@ -72,8 +72,12 @@ from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import ( VertexRagDataServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import pagers -from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import transports +from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import ( + pagers, +) +from google.cloud.aiplatform_v1beta1.services.vertex_rag_data_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import api_auth from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import io @@ -1288,9 +1292,9 @@ def test_create_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rag_corpus] = ( + mock_rpc + ) request = {} client.create_rag_corpus(request) @@ -1670,9 +1674,9 @@ def test_update_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_rag_corpus] = ( + mock_rpc + ) request = {} client.update_rag_corpus(request) @@ -2383,9 +2387,9 @@ def test_list_rag_corpora_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_rag_corpora - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rag_corpora] = ( + mock_rpc + ) request = {} client.list_rag_corpora(request) @@ -2904,9 +2908,9 @@ def test_delete_rag_corpus_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_rag_corpus] = ( + mock_rpc + ) request = {} client.delete_rag_corpus(request) @@ -3626,9 +3630,9 @@ def test_import_rag_files_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_rag_files - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_rag_files] = ( + mock_rpc + ) request = {} client.import_rag_files(request) @@ -5527,9 +5531,9 @@ def test_get_rag_engine_config_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_rag_engine_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rag_engine_config] = ( + mock_rpc + ) request = {} client.get_rag_engine_config(request) @@ -5799,9 +5803,9 @@ def test_create_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_rag_corpus] = ( + mock_rpc + ) request = {} client.create_rag_corpus(request) @@ -6001,9 +6005,9 @@ def test_update_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_rag_corpus] = ( + mock_rpc + ) request = {} client.update_rag_corpus(request) @@ -6372,9 +6376,9 @@ def test_list_rag_corpora_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_rag_corpora - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_rag_corpora] = ( + mock_rpc + ) request = {} client.list_rag_corpora(request) @@ -6632,9 +6636,9 @@ def test_delete_rag_corpus_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_rag_corpus - ] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_rag_corpus] = ( + mock_rpc + ) request = {} client.delete_rag_corpus(request) @@ -7027,9 +7031,9 @@ def test_import_rag_files_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_rag_files - ] = mock_rpc + client._transport._wrapped_methods[client._transport.import_rag_files] = ( + mock_rpc + ) request = {} client.import_rag_files(request) @@ -8029,9 +8033,9 @@ def test_get_rag_engine_config_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_rag_engine_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_rag_engine_config] = ( + mock_rpc + ) request = {} client.get_rag_engine_config(request) @@ -9097,9 +9101,11 @@ def get_message_fields(field): def test_create_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9354,9 +9360,11 @@ def get_message_fields(field): def test_update_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9490,9 +9498,11 @@ def test_get_rag_corpus_rest_call_success(request_type): def test_get_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9618,9 +9628,11 @@ def test_list_rag_corpora_rest_call_success(request_type): def test_list_rag_corpora_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9745,9 +9757,11 @@ def test_delete_rag_corpus_rest_call_success(request_type): def test_delete_rag_corpus_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9872,9 +9886,11 @@ def test_upload_rag_file_rest_call_success(request_type): def test_upload_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -9999,9 +10015,11 @@ def test_import_rag_files_rest_call_success(request_type): def test_import_rag_files_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10145,9 +10163,11 @@ def test_get_rag_file_rest_call_success(request_type): def test_get_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10273,9 +10293,11 @@ def test_list_rag_files_rest_call_success(request_type): def test_list_rag_files_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10404,9 +10426,11 @@ def test_delete_rag_file_rest_call_success(request_type): def test_delete_rag_file_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10614,9 +10638,11 @@ def get_message_fields(field): def test_update_rag_engine_config_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -10744,9 +10770,11 @@ def test_get_rag_engine_config_rest_call_success(request_type): def test_get_rag_engine_config_rest_interceptors(null_interceptor): transport = transports.VertexRagDataServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.VertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceClient(transport=transport) @@ -11923,9 +11951,11 @@ async def test_create_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12196,9 +12226,11 @@ async def test_update_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12348,9 +12380,11 @@ async def test_get_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12492,9 +12526,11 @@ async def test_list_rag_corpora_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12635,9 +12671,11 @@ async def test_delete_rag_corpus_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12778,9 +12816,11 @@ async def test_upload_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -12921,9 +12961,11 @@ async def test_import_rag_files_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13083,9 +13125,11 @@ async def test_get_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13227,9 +13271,11 @@ async def test_list_rag_files_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13374,9 +13420,11 @@ async def test_delete_rag_file_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13600,9 +13648,11 @@ async def test_update_rag_engine_config_rest_asyncio_interceptors(null_intercept ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) @@ -13748,9 +13798,11 @@ async def test_get_rag_engine_config_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncVertexRagDataServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagDataServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagDataServiceRestInterceptor() + ), ) client = VertexRagDataServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_service.py index d44ef58e38..2cb8d7bb0a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_vertex_rag_service.py @@ -68,7 +68,9 @@ from google.cloud.aiplatform_v1beta1.services.vertex_rag_service import ( VertexRagServiceClient, ) -from google.cloud.aiplatform_v1beta1.services.vertex_rag_service import transports +from google.cloud.aiplatform_v1beta1.services.vertex_rag_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import content from google.cloud.aiplatform_v1beta1.types import content as gca_content from google.cloud.aiplatform_v1beta1.types import tool @@ -1251,9 +1253,9 @@ def test_retrieve_contexts_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_contexts - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_contexts] = ( + mock_rpc + ) request = {} client.retrieve_contexts(request) @@ -1938,9 +1940,9 @@ def test_corroborate_content_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.corroborate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.corroborate_content] = ( + mock_rpc + ) request = {} client.corroborate_content(request) @@ -2230,9 +2232,9 @@ def test_retrieve_contexts_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.retrieve_contexts - ] = mock_rpc + client._transport._wrapped_methods[client._transport.retrieve_contexts] = ( + mock_rpc + ) request = {} client.retrieve_contexts(request) @@ -2605,9 +2607,9 @@ def test_corroborate_content_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.corroborate_content - ] = mock_rpc + client._transport._wrapped_methods[client._transport.corroborate_content] = ( + mock_rpc + ) request = {} client.corroborate_content(request) @@ -3112,9 +3114,9 @@ def test_retrieve_contexts_rest_call_success(request_type): def test_retrieve_contexts_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -3242,9 +3244,9 @@ def test_augment_prompt_rest_call_success(request_type): def test_augment_prompt_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -3374,9 +3376,9 @@ def test_corroborate_content_rest_call_success(request_type): def test_corroborate_content_rest_interceptors(null_interceptor): transport = transports.VertexRagServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VertexRagServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceClient(transport=transport) @@ -4227,9 +4229,11 @@ async def test_retrieve_contexts_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) @@ -4373,9 +4377,11 @@ async def test_augment_prompt_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) @@ -4522,9 +4528,11 @@ async def test_corroborate_content_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVertexRagServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVertexRagServiceRestInterceptor(), + interceptor=( + None + if null_interceptor + else transports.AsyncVertexRagServiceRestInterceptor() + ), ) client = VertexRagServiceAsyncClient(transport=transport) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py index a9605fe1b8..1f35c39b45 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_vizier_service.py @@ -69,9 +69,13 @@ from google.cloud.aiplatform_v1beta1.services.vizier_service import ( VizierServiceAsyncClient, ) -from google.cloud.aiplatform_v1beta1.services.vizier_service import VizierServiceClient +from google.cloud.aiplatform_v1beta1.services.vizier_service import ( + VizierServiceClient, +) from google.cloud.aiplatform_v1beta1.services.vizier_service import pagers -from google.cloud.aiplatform_v1beta1.services.vizier_service import transports +from google.cloud.aiplatform_v1beta1.services.vizier_service import ( + transports, +) from google.cloud.aiplatform_v1beta1.types import study from google.cloud.aiplatform_v1beta1.types import study as gca_study from google.cloud.aiplatform_v1beta1.types import vizier_service @@ -4521,9 +4525,9 @@ def test_add_trial_measurement_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_trial_measurement - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_trial_measurement] = ( + mock_rpc + ) request = {} client.add_trial_measurement(request) @@ -5872,9 +5876,9 @@ def test_list_optimal_trials_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_optimal_trials - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_optimal_trials] = ( + mock_rpc + ) request = {} client.list_optimal_trials(request) @@ -7895,9 +7899,9 @@ def test_add_trial_measurement_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.add_trial_measurement - ] = mock_rpc + client._transport._wrapped_methods[client._transport.add_trial_measurement] = ( + mock_rpc + ) request = {} client.add_trial_measurement(request) @@ -8572,9 +8576,9 @@ def test_list_optimal_trials_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_optimal_trials - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_optimal_trials] = ( + mock_rpc + ) request = {} client.list_optimal_trials(request) @@ -9845,9 +9849,9 @@ def get_message_fields(field): def test_create_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -9976,9 +9980,9 @@ def test_get_study_rest_call_success(request_type): def test_get_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10099,9 +10103,9 @@ def test_list_studies_rest_call_success(request_type): def test_list_studies_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10220,9 +10224,9 @@ def test_delete_study_rest_call_success(request_type): def test_delete_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10339,9 +10343,9 @@ def test_lookup_study_rest_call_success(request_type): def test_lookup_study_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10460,9 +10464,9 @@ def test_suggest_trials_rest_call_success(request_type): def test_suggest_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10694,9 +10698,9 @@ def get_message_fields(field): def test_create_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10833,9 +10837,9 @@ def test_get_trial_rest_call_success(request_type): def test_get_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -10956,9 +10960,9 @@ def test_list_trials_rest_call_success(request_type): def test_list_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11099,9 +11103,9 @@ def test_add_trial_measurement_rest_call_success(request_type): def test_add_trial_measurement_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11241,9 +11245,9 @@ def test_complete_trial_rest_call_success(request_type): def test_complete_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11364,9 +11368,9 @@ def test_delete_trial_rest_call_success(request_type): def test_delete_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11477,9 +11481,9 @@ def test_check_trial_early_stopping_state_rest_call_success(request_type): def test_check_trial_early_stopping_state_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11619,9 +11623,9 @@ def test_stop_trial_rest_call_success(request_type): def test_stop_trial_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -11743,9 +11747,9 @@ def test_list_optimal_trials_rest_call_success(request_type): def test_list_optimal_trials_rest_interceptors(null_interceptor): transport = transports.VizierServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.VizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.VizierServiceRestInterceptor() + ), ) client = VizierServiceClient(transport=transport) @@ -13023,9 +13027,9 @@ async def test_create_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13172,9 +13176,9 @@ async def test_get_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13313,9 +13317,9 @@ async def test_list_studies_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13452,9 +13456,9 @@ async def test_delete_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13589,9 +13593,9 @@ async def test_lookup_study_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13726,9 +13730,9 @@ async def test_suggest_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -13979,9 +13983,9 @@ async def test_create_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14136,9 +14140,9 @@ async def test_get_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14277,9 +14281,9 @@ async def test_list_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14436,9 +14440,9 @@ async def test_add_trial_measurement_rest_asyncio_interceptors(null_interceptor) ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14594,9 +14598,9 @@ async def test_complete_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14736,9 +14740,9 @@ async def test_delete_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -14867,9 +14871,9 @@ async def test_check_trial_early_stopping_state_rest_asyncio_interceptors( ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -15029,9 +15033,9 @@ async def test_stop_trial_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) @@ -15169,9 +15173,9 @@ async def test_list_optimal_trials_rest_asyncio_interceptors(null_interceptor): ) transport = transports.AsyncVizierServiceRestTransport( credentials=async_anonymous_credentials(), - interceptor=None - if null_interceptor - else transports.AsyncVizierServiceRestInterceptor(), + interceptor=( + None if null_interceptor else transports.AsyncVizierServiceRestInterceptor() + ), ) client = VizierServiceAsyncClient(transport=transport) diff --git a/tests/unit/vertex_adk/test_reasoning_engine_templates_adk.py b/tests/unit/vertex_adk/test_reasoning_engine_templates_adk.py index 5579c75fb6..5bf3bba293 100644 --- a/tests/unit/vertex_adk/test_reasoning_engine_templates_adk.py +++ b/tests/unit/vertex_adk/test_reasoning_engine_templates_adk.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import asyncio import base64 import importlib import json @@ -180,6 +181,34 @@ async def run_async(self, *args, **kwargs): } ) + async def run_live(self, *args, **kwargs): + from google.adk.events import event + + yield event.Event( + **{ + "author": "currency_exchange_agent", + "content": { + "parts": [ + { + "thought_signature": b"test_signature", + "function_call": { + "args": { + "currency_date": "2025-04-03", + "currency_from": "USD", + "currency_to": "SEK", + }, + "id": "af-c5a57692-9177-4091-a3df-098f834ee849", + "name": "get_exchange_rate", + }, + } + ], + "role": "model", + }, + "id": "9aaItGK9", + "invocation_id": "e-6543c213-6417-484b-9551-b67915d1d5f7", + } + ) + @pytest.mark.usefixtures("google_auth_mock") class TestAdkApp: @@ -355,6 +384,29 @@ def test_streaming_agent_run_with_events(self): events = list(app.streaming_agent_run_with_events(request_json=request_json)) assert len(events) == 1 + @pytest.mark.asyncio + async def test_async_bidi_stream_query(self): + app = reasoning_engines.AdkApp( + agent=Agent(name=_TEST_AGENT_NAME, model=_TEST_MODEL) + ) + assert app._tmpl_attrs.get("runner") is None + app.set_up() + app._tmpl_attrs["runner"] = _MockRunner() + request_queue = asyncio.Queue() + request_dict = { + "user_id": _TEST_USER_ID, + "live_request": { + "input": "What is the exchange rate from USD to SEK?", + }, + } + + await request_queue.put(request_dict) + await request_queue.put(None) # Sentinel to end the stream. + events = [] + async for event in app.bidi_stream_query(request_queue): + events.append(event) + assert len(events) == 1 + def test_create_session(self): app = reasoning_engines.AdkApp( agent=Agent(name=_TEST_AGENT_NAME, model=_TEST_MODEL) @@ -568,3 +620,35 @@ async def test_async_stream_query_invalid_message_type(self): ): async for _ in app.async_stream_query(user_id=_TEST_USER_ID, message=123): pass + + @pytest.mark.asyncio + async def test_bidi_stream_query_invalid_request_queue(self): + app = reasoning_engines.AdkApp( + agent=Agent(name=_TEST_AGENT_NAME, model=_TEST_MODEL) + ) + request_queue = [] + with pytest.raises( + TypeError, + match="request_queue must be an asyncio.Queue instance.", + ): + async for _ in app.bidi_stream_query(request_queue): + pass + + @pytest.mark.asyncio + async def test_bidi_stream_query_invalid_first_request(self): + app = reasoning_engines.AdkApp( + agent=Agent(name=_TEST_AGENT_NAME, model=_TEST_MODEL) + ) + request_queue = asyncio.Queue() + request_dict = { + "live_request": { + "input": "What is the exchange rate from USD to SEK?", + }, + } + await request_queue.put(request_dict) + with pytest.raises( + ValueError, + match="The first request must have a user_id.", + ): + async for _ in app.bidi_stream_query(request_queue): + pass diff --git a/tests/unit/vertex_langchain/test_agent_engine_templates_module.py b/tests/unit/vertex_langchain/test_agent_engine_templates_module.py index 34a4ad7681..0ad1d0e41e 100644 --- a/tests/unit/vertex_langchain/test_agent_engine_templates_module.py +++ b/tests/unit/vertex_langchain/test_agent_engine_templates_module.py @@ -13,7 +13,9 @@ # limitations under the License. # from vertexai import agent_engines -from test_constants import test_agent +from test_constants import ( + test_agent, +) _TEST_MODULE_NAME = "test_constants" _TEST_AGENT_NAME = "test_agent" diff --git a/tests/unit/vertex_langchain/test_agent_engines.py b/tests/unit/vertex_langchain/test_agent_engines.py index 5be65777c9..133b2d3b19 100644 --- a/tests/unit/vertex_langchain/test_agent_engines.py +++ b/tests/unit/vertex_langchain/test_agent_engines.py @@ -108,6 +108,20 @@ def clone(self): return self +class BidiStreamQueryEngine: + """A sample Agent Engine that implements `bidi_stream_query`.""" + + def set_up(self): + pass + + async def bidi_stream_query(self, unused_request_queue) -> AsyncIterable[Any]: + """Runs the bidi stream engine.""" + raise NotImplementedError() + + def clone(self): + return self + + class OperationRegistrableEngine: """Add a test class that implements OperationRegistrable.""" @@ -141,6 +155,10 @@ async def async_stream_query( for chunk in _TEST_AGENT_ENGINE_STREAM_QUERY_RESPONSE: yield chunk + async def bidi_stream_query(self, unused_request_queue) -> AsyncIterable[Any]: + """Runs the bidi stream engine.""" + raise NotImplementedError() + # Add a custom method to test the custom stream method registration. def custom_stream_query(self, unused_arbitrary_string_name: str) -> Iterable[Any]: """Runs the stream engine.""" @@ -158,6 +176,12 @@ async def custom_async_stream_method( for chunk in _TEST_AGENT_ENGINE_STREAM_QUERY_RESPONSE: yield chunk + async def custom_bidi_stream_method( + self, unused_request_queue + ) -> AsyncIterable[Any]: + """Runs the bidi stream engine.""" + raise NotImplementedError() + def clone(self): return self @@ -496,18 +520,18 @@ def register_operations(self) -> Dict[str, List[str]]: schema_name=_TEST_CUSTOM_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA = _utils.to_proto( _utils.generate_schema( OperationRegistrableEngine().custom_async_method, schema_name=_TEST_CUSTOM_ASYNC_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_API_MODE +_TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_API_MODE +) _TEST_AGENT_ENGINE_STREAM_QUERY_SCHEMA = _utils.to_proto( _utils.generate_schema( StreamQueryEngine().stream_query, @@ -521,27 +545,27 @@ def register_operations(self) -> Dict[str, List[str]]: schema_name=_TEST_CUSTOM_STREAM_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STREAM_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STREAM_API_MODE +) _TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA = _utils.to_proto( _utils.generate_schema( AsyncStreamQueryEngine().async_stream_query, schema_name=_TEST_DEFAULT_ASYNC_STREAM_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_STREAM_API_MODE +_TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_STREAM_API_MODE +) _TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA = _utils.to_proto( _utils.generate_schema( OperationRegistrableEngine().custom_async_stream_method, schema_name=_TEST_CUSTOM_ASYNC_STREAM_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_STREAM_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_STREAM_API_MODE +) _TEST_OPERATION_REGISTRABLE_SCHEMAS = [ _TEST_AGENT_ENGINE_QUERY_SCHEMA, _TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA, @@ -568,9 +592,9 @@ def register_operations(self) -> Dict[str, List[str]]: schema_name=_TEST_METHOD_TO_BE_UNREGISTERED_NAME, ) ) -_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_ASYNC_QUERY_SCHEMAS = [_TEST_AGENT_ENGINE_ASYNC_METHOD_SCHEMA] _TEST_STREAM_QUERY_SCHEMAS = [ _TEST_AGENT_ENGINE_STREAM_QUERY_SCHEMA, @@ -890,6 +914,17 @@ async def async_stream_query() -> str: return "RESPONSE" +class InvalidCapitalizeEngineWithoutBidiStreamQuerySelf: + """A sample Agent Engine with an invalid bidi_stream_query method.""" + + def set_up(self): + pass + + async def bidi_stream_query() -> AsyncIterable[Any]: + """Runs the engine.""" + raise NotImplementedError() + + class InvalidCapitalizeEngineWithoutRegisterOperationsSelf: """A sample Agent Engine with an invalid register_operations method.""" @@ -1625,6 +1660,23 @@ def test_get_agent_framework( ), ), ), + ( + "Update the bidi stream query engine", + {"agent_engine": BidiStreamQueryEngine()}, + types.reasoning_engine_service.UpdateReasoningEngineRequest( + reasoning_engine=_generate_agent_engine_with_class_methods_and_agent_framework( + [], + _agent_engines._DEFAULT_AGENT_FRAMEWORK, + ), + update_mask=field_mask_pb2.FieldMask( + paths=[ + "spec.package_spec.pickle_object_gcs_uri", + "spec.class_methods", + "spec.agent_framework", + ] + ), + ), + ), ( "Update the operation registrable engine", {"agent_engine": OperationRegistrableEngine()}, @@ -2826,6 +2878,95 @@ async def test_async_stream_query_agent_engine_with_operation_schema( ) ) + # pytest does not allow absl.testing.parameterized.named_parameters. + @pytest.mark.parametrize( + "test_case_name, test_engine, test_class_method_docs, test_class_methods_spec", + [ + ( + "Default Bidi Stream Queryable (Not Operation Registrable) Engine", + BidiStreamQueryEngine(), + {}, + _TEST_ASYNC_STREAM_QUERY_SCHEMAS, + ), + ( + "Operation Registrable Engine", + OperationRegistrableEngine(), + {}, + _TEST_OPERATION_REGISTRABLE_SCHEMAS, + ), + ], + ) + @pytest.mark.asyncio + async def test_create_agent_engine_with_bidi_stream_query_operation_schema( + self, + test_case_name, + test_engine, + test_class_method_docs, + test_class_methods_spec, + ): + with mock.patch.object( + base.VertexAiResourceNoun, + "_get_gca_resource", + ) as get_gca_resource_mock: + test_spec = types.ReasoningEngineSpec() + test_spec.class_methods.extend(test_class_methods_spec) + get_gca_resource_mock.return_value = types.ReasoningEngine( + name=_TEST_AGENT_ENGINE_RESOURCE_NAME, + spec=test_spec, + ) + agent_engines.create(test_engine) + + # pytest does not allow absl.testing.parameterized.named_parameters. + @pytest.mark.parametrize( + "test_case_name, test_engine, test_class_methods, test_class_methods_spec", + [ + ( + "Default Bidi Stream Queryable (Not Operation Registrable) Engine", + BidiStreamQueryEngine(), + [], + [], + ), + ( + "Operation Registrable Engine", + OperationRegistrableEngine(), + [], + _TEST_OPERATION_REGISTRABLE_SCHEMAS, + ), + ], + ) + @pytest.mark.asyncio + async def test_update_agent_engine_with_bidi_stream_query_operation_schema( + self, + test_case_name, + test_engine, + test_class_methods, + test_class_methods_spec, + update_agent_engine_mock, + ): + with mock.patch.object( + base.VertexAiResourceNoun, + "_get_gca_resource", + ) as get_gca_resource_mock: + test_spec = types.ReasoningEngineSpec() + test_spec.class_methods.append(_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA) + get_gca_resource_mock.return_value = types.ReasoningEngine( + name=_TEST_AGENT_ENGINE_RESOURCE_NAME, spec=test_spec + ) + test_agent_engine = agent_engines.create(MethodToBeUnregisteredEngine()) + assert hasattr(test_agent_engine, _TEST_METHOD_TO_BE_UNREGISTERED_NAME) + + with mock.patch.object( + base.VertexAiResourceNoun, + "_get_gca_resource", + ) as get_gca_resource_mock: + test_spec = types.ReasoningEngineSpec() + test_spec.class_methods.extend(test_class_methods_spec) + get_gca_resource_mock.return_value = types.ReasoningEngine( + name=_TEST_AGENT_ENGINE_RESOURCE_NAME, + spec=test_spec, + ) + test_agent_engine.update(agent_engine=test_engine) + @pytest.mark.usefixtures("google_auth_mock") class TestAgentEngineErrors: @@ -2887,8 +3028,8 @@ def test_create_agent_engine_no_query_method( TypeError, match=( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` " - "or `register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query` or `register_operations`." ), ): agent_engines.create( @@ -2911,8 +3052,8 @@ def test_create_agent_engine_noncallable_query_attribute( TypeError, match=( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` " - "or `register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query` or `register_operations`." ), ): agent_engines.create( @@ -3024,6 +3165,23 @@ def test_create_agent_engine_with_invalid_async_stream_query_method( requirements=_TEST_AGENT_ENGINE_REQUIREMENTS, ) + def test_create_agent_engine_with_invalid_bidi_stream_query_method( + self, + create_agent_engine_mock, + cloud_storage_create_bucket_mock, + tarfile_open_mock, + cloudpickle_dump_mock, + cloudpickle_load_mock, + importlib_metadata_version_mock, + get_agent_engine_mock, + ): + with pytest.raises(ValueError, match="Invalid bidi_stream_query signature"): + agent_engines.create( + InvalidCapitalizeEngineWithoutBidiStreamQuerySelf(), + display_name=_TEST_AGENT_ENGINE_DISPLAY_NAME, + requirements=_TEST_AGENT_ENGINE_REQUIREMENTS, + ) + def test_create_agent_engine_with_invalid_register_operations_method( self, create_agent_engine_mock, @@ -3158,8 +3316,8 @@ def test_update_agent_engine_no_query_method( TypeError, match=( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` " - "or `register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query` or `register_operations`." ), ): test_agent_engine = _generate_agent_engine_to_update() @@ -3181,8 +3339,8 @@ def test_update_agent_engine_noncallable_query_attribute( TypeError, match=( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` " - "or `register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query` or `register_operations`." ), ): test_agent_engine = _generate_agent_engine_to_update() @@ -3324,7 +3482,8 @@ def test_update_class_methods_spec_with_registered_operation_not_found(self): "register the API methods: " "https://cloud.google.com/vertex-ai/generative-ai/docs/agent-engine/develop/custom#custom-methods." "Error: {Unsupported api mode: `UNKNOWN_API_MODE`, " - "Supported modes are: ``, `async`, `async_stream`, `stream`.}" + "Supported modes are: ``, `a2a_extension`, `async`, `async_stream`, " + "`bidi_stream`, `stream`.}" ), ), ], @@ -3580,7 +3739,8 @@ class ToParsedJsonTest: ( # "unicode_data", httpbody_pb2.HttpBody( - content_type="application/json", data='{"a": "你好"}'.encode("utf-8") + content_type="application/json", + data='{"a": "你好"}'.encode("utf-8"), ), [{"a": "你好"}], ), diff --git a/tests/unit/vertex_langchain/test_reasoning_engines.py b/tests/unit/vertex_langchain/test_reasoning_engines.py index 9f7f547ead..019dc214d9 100644 --- a/tests/unit/vertex_langchain/test_reasoning_engines.py +++ b/tests/unit/vertex_langchain/test_reasoning_engines.py @@ -393,27 +393,27 @@ class ListClass: schema_name=_TEST_CUSTOM_METHOD_NAME, ) ) -_TEST_REASONING_ENGINE_CUSTOM_METHOD_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_REASONING_ENGINE_CUSTOM_METHOD_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_REASONING_ENGINE_STREAM_QUERY_SCHEMA = _utils.to_proto( _utils.generate_schema( StreamQueryEngine().stream_query, schema_name=_TEST_DEFAULT_STREAM_METHOD_NAME, ) ) -_TEST_REASONING_ENGINE_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STREAM_API_MODE +_TEST_REASONING_ENGINE_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STREAM_API_MODE +) _TEST_REASONING_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA = _utils.to_proto( _utils.generate_schema( OperationRegistrableEngine().custom_stream_method, schema_name=_TEST_CUSTOM_STREAM_METHOD_NAME, ) ) -_TEST_REASONING_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STREAM_API_MODE +_TEST_REASONING_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STREAM_API_MODE +) _TEST_OPERATION_REGISTRABLE_SCHEMAS = [ _TEST_REASONING_ENGINE_QUERY_SCHEMA, _TEST_REASONING_ENGINE_CUSTOM_METHOD_SCHEMA, @@ -436,9 +436,9 @@ class ListClass: schema_name=_TEST_METHOD_TO_BE_UNREGISTERED_NAME, ) ) -_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_STREAM_QUERY_SCHEMAS = [ _TEST_REASONING_ENGINE_STREAM_QUERY_SCHEMA, ] @@ -2334,7 +2334,8 @@ class ToParsedJsonTest: ( # "unicode_data", httpbody_pb2.HttpBody( - content_type="application/json", data='{"a": "你好"}'.encode("utf-8") + content_type="application/json", + data='{"a": "你好"}'.encode("utf-8"), ), [{"a": "你好"}], ), diff --git a/tests/unit/vertex_llama_index/test_reasoning_engine_templates_llama_index.py b/tests/unit/vertex_llama_index/test_reasoning_engine_templates_llama_index.py index e508b869f8..414e91d387 100644 --- a/tests/unit/vertex_llama_index/test_reasoning_engine_templates_llama_index.py +++ b/tests/unit/vertex_llama_index/test_reasoning_engine_templates_llama_index.py @@ -19,7 +19,9 @@ from google import auth import vertexai from google.cloud.aiplatform import initializer -from vertexai.preview.reasoning_engines.templates import llama_index +from vertexai.preview.reasoning_engines.templates import ( + llama_index, +) from vertexai.reasoning_engines import _utils from llama_index.core import prompts diff --git a/tests/unit/vertex_ray/test_prediction_utils.py b/tests/unit/vertex_ray/test_prediction_utils.py index 873f1c252d..d77930b8aa 100644 --- a/tests/unit/vertex_ray/test_prediction_utils.py +++ b/tests/unit/vertex_ray/test_prediction_utils.py @@ -1,5 +1,4 @@ -"""Test utils for Prediction Tests. -""" +"""Test utils for Prediction Tests.""" import numpy as np import sklearn diff --git a/tests/unit/vertexai/genai/replays/conftest.py b/tests/unit/vertexai/genai/replays/conftest.py index 1def7d46b6..f1c963a3ca 100644 --- a/tests/unit/vertexai/genai/replays/conftest.py +++ b/tests/unit/vertexai/genai/replays/conftest.py @@ -184,8 +184,6 @@ def client(use_vertex, replays_prefix, http_options, request): os.path.dirname(__file__), "credentials.json", ) - os.environ["GOOGLE_CLOUD_PROJECT"] = "project-id" - os.environ["GOOGLE_CLOUD_LOCATION"] = "location" os.environ["VAPO_CONFIG_PATH"] = "gs://dummy-test/dummy-config.json" os.environ["VAPO_SERVICE_ACCOUNT_PROJECT_NUMBER"] = "1234567890" os.environ["GCS_BUCKET"] = "test-bucket" diff --git a/tests/unit/vertexai/genai/replays/test_create_agent_engine_sandbox.py b/tests/unit/vertexai/genai/replays/test_create_agent_engine_sandbox.py new file mode 100644 index 0000000000..c63e8eb954 --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_create_agent_engine_sandbox.py @@ -0,0 +1,48 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + + +def test_create_sandbox(client): + agent_engine = client.agent_engines.create() + assert isinstance(agent_engine, types.AgentEngine) + assert isinstance(agent_engine.api_resource, types.ReasoningEngine) + + operation = client.agent_engines.sandboxes.create( + name=agent_engine.api_resource.name, + spec={ + "code_execution_environment": { + "machineConfig": "MACHINE_CONFIG_VCPU4_RAM4GIB" + } + }, + config=types.CreateAgentEngineSandboxConfig(display_name="test_sandbox"), + ) + assert isinstance(operation, types.AgentEngineSandboxOperation) + assert operation.response.display_name == "test_sandbox" + assert ( + operation.response.spec.code_execution_environment.machine_config + == "MACHINE_CONFIG_VCPU4_RAM4GIB" + ) + assert operation.response.name.startswith(agent_engine.api_resource.name) + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="agent_engines.sandboxes.create", +) diff --git a/tests/unit/vertexai/genai/replays/test_create_prompt.py b/tests/unit/vertexai/genai/replays/test_create_prompt.py new file mode 100644 index 0000000000..0e563b846c --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_create_prompt.py @@ -0,0 +1,281 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types +from google.genai import types as genai_types + + +TEST_PROMPT_DATASET_ID = "8005484238453342208" +TEST_VARIABLES = [ + {"name": genai_types.Part(text="Alice")}, + {"name": genai_types.Part(text="Bob")}, +] +TEST_RESPONSE_SCHEMA = { + "type": "object", + "properties": {"response": {"type": "string"}}, +} +TEST_PROMPT = types.Prompt( + prompt_data=types.PromptData( + contents=[ + genai_types.Content( + role="user", + parts=[genai_types.Part(text="Hello, {name}! How are you?")], + ) + ], + safety_settings=[ + genai_types.SafetySetting( + category="HARM_CATEGORY_DANGEROUS_CONTENT", + threshold="BLOCK_MEDIUM_AND_ABOVE", + method="SEVERITY", + ), + ], + generation_config=genai_types.GenerationConfig( + temperature=0.1, + candidate_count=1, + top_p=0.95, + top_k=40, + response_modalities=["TEXT"], + response_schema=TEST_RESPONSE_SCHEMA, + ), + system_instruction=genai_types.Content( + parts=[genai_types.Part(text="Please answer in a short sentence.")] + ), + tools=[ + genai_types.Tool( + google_search_retrieval=genai_types.GoogleSearchRetrieval( + dynamic_retrieval_config=genai_types.DynamicRetrievalConfig( + mode="MODE_DYNAMIC" + ) + ) + ), + ], + tool_config=genai_types.ToolConfig( + retrieval_config=genai_types.RetrievalConfig( + lat_lng=genai_types.LatLng(latitude=37.7749, longitude=-122.4194) + ) + ), + model="gemini-2.0-flash-001", + variables=TEST_VARIABLES, + ), +) +TEST_CONFIG = types.CreatePromptConfig( + prompt_display_name="my_prompt", + version_display_name="my_version", +) + + +def test_create_dataset(client): + create_dataset_operation = client.prompt_management._create_dataset_resource( + name="projects/vertex-sdk-dev/locations/us-central1", + display_name="test display name", + metadata_schema_uri="gs://google-cloud-aiplatform/schema/dataset/metadata/text_prompt_1.0.0.yaml", + metadata={ + "promptType": "freeform", + "promptApiSchema": { + "multimodalPrompt": { + "promptMessage": { + "contents": [ + { + "role": "user", + "parts": [{"text": "Hello, {name}! How are you?"}], + } + ], + "safety_settings": [ + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + "method": "SEVERITY", + } + ], + "generation_config": {"temperature": 0.1}, + "model": "projects/vertex-sdk-dev/locations/us-central1/publishers/google/models/gemini-2.0-flash-001", + "system_instruction": { + "role": "user", + "parts": [{"text": "Please answer in a short sentence."}], + }, + } + }, + "apiSchemaVersion": "1.0.0", + "executions": [ + { + "arguments": { + "name": {"partList": {"parts": [{"text": "Alice"}]}} + } + }, + {"arguments": {"name": {"partList": {"parts": [{"text": "Bob"}]}}}}, + ], + }, + }, + model_reference="gemini-2.0-flash-001", + ) + assert isinstance(create_dataset_operation, types.DatasetOperation) + assert create_dataset_operation + + +def test_create_dataset_version(client): + dataset_version_resource = ( + client.prompt_management._create_dataset_version_resource( + dataset_name=TEST_PROMPT_DATASET_ID, + display_name="my new version yay", + ) + ) + assert isinstance(dataset_version_resource, types.DatasetOperation) + + +def test_create_version_e2e(client): + prompt_resource = client.prompt_management.create_version( + prompt=TEST_PROMPT, + config=TEST_CONFIG, + ) + assert isinstance(prompt_resource, types.Prompt) + assert isinstance(prompt_resource.dataset, types.Dataset) + + # Test local prompt resource is the same after calling get() + retrieved_prompt = client.prompt_management.get(prompt_id=prompt_resource.prompt_id) + assert ( + retrieved_prompt.prompt_data.system_instruction + == prompt_resource.prompt_data.system_instruction + ) + assert ( + retrieved_prompt.prompt_data.variables[0]["name"].text + == TEST_VARIABLES[0]["name"].text + ) + assert ( + retrieved_prompt.prompt_data.generation_config.temperature + == prompt_resource.prompt_data.generation_config.temperature + ) + assert ( + retrieved_prompt.prompt_data.safety_settings + == prompt_resource.prompt_data.safety_settings + ) + assert retrieved_prompt.prompt_data.model == prompt_resource.prompt_data.model + assert ( + retrieved_prompt.prompt_data.tool_config + == prompt_resource.prompt_data.tool_config + ) + assert ( + retrieved_prompt.prompt_data.generation_config + == prompt_resource.prompt_data.generation_config + ) + + # Test calling create_version again uses dataset from local Prompt resource. + prompt_resource_2 = client.prompt_management.create_version( + prompt=TEST_PROMPT, + config=types.CreatePromptConfig( + version_display_name="my_version", + ), + ) + assert prompt_resource_2.dataset.name == prompt_resource.dataset.name + + +def test_create_version_in_existing_dataset(client): + prompt_resource = client.prompt_management.create_version( + prompt=TEST_PROMPT, + config=types.CreatePromptConfig( + prompt_id=TEST_PROMPT_DATASET_ID, + prompt_display_name=TEST_CONFIG.prompt_display_name, + version_display_name="my_version_existing_dataset", + ), + ) + assert isinstance(prompt_resource, types.Prompt) + assert isinstance(prompt_resource.dataset, types.Dataset) + assert isinstance(prompt_resource.dataset_version, types.DatasetVersion) + assert prompt_resource.dataset.name.endswith(TEST_PROMPT_DATASET_ID) + + +def test_create_version_with_version_name(client): + version_name = "a_new_version_yay" + prompt_resource = client.prompt_management.create_version( + prompt=TEST_PROMPT, + config=types.CreatePromptConfig( + version_display_name=version_name, + ), + ) + assert isinstance(prompt_resource, types.Prompt) + assert isinstance(prompt_resource.dataset, types.Dataset) + assert isinstance(prompt_resource.dataset_version, types.DatasetVersion) + assert prompt_resource.dataset_version.display_name == version_name + + +def test_create_version_with_file_data(client): + version_name = "prompt with file data" + + audio_file_part = genai_types.Part( + file_data=genai_types.FileData( + file_uri="https://generativelanguage.googleapis.com/v1beta/files/57w3vpfomj71", + mime_type="video/mp4", + ), + ) + + prompt_resource = client.prompt_management.create_version( + prompt=types.Prompt( + prompt_data=types.PromptData( + contents=[ + genai_types.Content( + parts=[ + audio_file_part, + genai_types.Part(text="What is this recording about?"), + ] + ) + ], + system_instruction=genai_types.Content( + parts=[genai_types.Part(text="Answer in three sentences.")] + ), + generation_config=genai_types.GenerationConfig(temperature=0.1), + safety_settings=[ + genai_types.SafetySetting( + category="HARM_CATEGORY_DANGEROUS_CONTENT", + threshold="BLOCK_MEDIUM_AND_ABOVE", + method="SEVERITY", + ) + ], + model="gemini-2.0-flash-001", + ), + ), + config=types.CreatePromptConfig( + version_display_name=version_name, + prompt_display_name="my prompt with file data", + ), + ) + assert isinstance(prompt_resource, types.Prompt) + assert isinstance(prompt_resource.dataset, types.Dataset) + assert isinstance(prompt_resource.dataset_version, types.DatasetVersion) + assert prompt_resource.dataset_version.display_name == version_name + + # Confirm file data is preserved when we retrieve the prompt. + retrieved_prompt = client.prompt_management.get( + prompt_id=prompt_resource.prompt_id, + ) + assert ( + retrieved_prompt.prompt_data.contents[0].parts[0].file_data.file_uri + == audio_file_part.file_data.file_uri + ) + assert ( + retrieved_prompt.prompt_data.contents[0].parts[0].file_data.display_name + == audio_file_part.file_data.display_name + ) + + # Test assemble_contents on the prompt works. + contents = retrieved_prompt.assemble_contents() + assert contents[0] == prompt_resource.prompt_data.contents[0] + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="prompt_management.create_version", +) diff --git a/tests/unit/vertexai/genai/replays/test_delete_agent_engine_sandbox.py b/tests/unit/vertexai/genai/replays/test_delete_agent_engine_sandbox.py new file mode 100644 index 0000000000..a85a9b0f7b --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_delete_agent_engine_sandbox.py @@ -0,0 +1,46 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + + +def test_delete_sandbox(client): + agent_engine = client.agent_engines.create() + assert isinstance(agent_engine, types.AgentEngine) + assert isinstance(agent_engine.api_resource, types.ReasoningEngine) + + operation = client.agent_engines.sandboxes.create( + name=agent_engine.api_resource.name, + spec={ + "code_execution_environment": { + "machineConfig": "MACHINE_CONFIG_VCPU4_RAM4GIB" + } + }, + config=types.CreateAgentEngineSandboxConfig(display_name="test_sandbox"), + ) + assert isinstance(operation, types.AgentEngineSandboxOperation) + delete_operation = client.agent_engines.sandboxes.delete( + name=operation.response.name, + ) + assert isinstance(delete_operation, types.DeleteAgentEngineSandboxOperation) + assert "/operations/" in delete_operation.name + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="agent_engines.sandboxes.delete", +) diff --git a/tests/unit/vertexai/genai/replays/test_execute_code_agent_engine_sandbox.py b/tests/unit/vertexai/genai/replays/test_execute_code_agent_engine_sandbox.py new file mode 100644 index 0000000000..ab51fe4904 --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_execute_code_agent_engine_sandbox.py @@ -0,0 +1,51 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + + +def test_execute_code_sandbox(client): + agent_engine = client.agent_engines.create() + assert isinstance(agent_engine, types.AgentEngine) + assert isinstance(agent_engine.api_resource, types.ReasoningEngine) + + operation = client.agent_engines.sandboxes.create( + name=agent_engine.api_resource.name, + spec={ + "code_execution_environment": { + "machineConfig": "MACHINE_CONFIG_VCPU4_RAM4GIB" + } + }, + config=types.CreateAgentEngineSandboxConfig(display_name="test_sandbox"), + ) + assert isinstance(operation, types.AgentEngineSandboxOperation) + input_data = { + "language": "python", + "code": 'with open("hello.txt","w") as file:\n file.write("Hello, world!")', + } + response = client.agent_engines.sandboxes.execute_code( + name=operation.response.name, + input_data=input_data, + ) + assert response.outputs[0].mime_type == "application/json" + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="agent_engines.sandboxes.execute_code", +) diff --git a/tests/unit/vertexai/genai/replays/test_get_agent_engine_sandbox.py b/tests/unit/vertexai/genai/replays/test_get_agent_engine_sandbox.py new file mode 100644 index 0000000000..b82a74a397 --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_get_agent_engine_sandbox.py @@ -0,0 +1,47 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + + +def test_get_sandbox(client): + agent_engine = client.agent_engines.create() + assert isinstance(agent_engine, types.AgentEngine) + assert isinstance(agent_engine.api_resource, types.ReasoningEngine) + + operation = client.agent_engines.sandboxes.create( + name=agent_engine.api_resource.name, + spec={ + "code_execution_environment": { + "machineConfig": "MACHINE_CONFIG_VCPU4_RAM4GIB" + } + }, + config=types.CreateAgentEngineSandboxConfig(display_name="test_sandbox"), + ) + assert isinstance(operation, types.AgentEngineSandboxOperation) + sandbox = client.agent_engines.sandboxes.get( + name=operation.response.name, + ) + assert isinstance(sandbox, types.SandboxEnvironment) + assert sandbox.name == operation.response.name + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="agent_engines.sandboxes.get", +) diff --git a/tests/unit/vertexai/genai/replays/test_get_prompt_operation.py b/tests/unit/vertexai/genai/replays/test_get_prompt_operation.py new file mode 100644 index 0000000000..a345e5513d --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_get_prompt_operation.py @@ -0,0 +1,32 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper + + +def test_get_dataset_operation(client): + dataset_operation = client.prompt_management._get_dataset_operation( + dataset_id="6550997480673116160", + operation_id="5108504762664353792", + ) + assert dataset_operation.name is not None + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="prompt_management._get_dataset_operation", +) diff --git a/tests/unit/vertexai/genai/replays/test_get_prompt_resource.py b/tests/unit/vertexai/genai/replays/test_get_prompt_resource.py new file mode 100644 index 0000000000..9d063cc88e --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_get_prompt_resource.py @@ -0,0 +1,77 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types +from google.genai import types as genai_types + +TEST_PROMPT_DATASET_ID = "6550997480673116160" +TEST_PROMPT_VERSION_ID = "2" + + +def test_get_dataset(client): + dataset = client.prompt_management._get_dataset_resource( + name=TEST_PROMPT_DATASET_ID + ) + assert isinstance(dataset, types.Dataset) + + +def test_get_prompt(client): + prompt = client.prompt_management.get(prompt_id=TEST_PROMPT_DATASET_ID) + assert isinstance(prompt, types.Prompt) + assert isinstance(prompt.dataset, types.Dataset) + assert prompt.dataset.name.endswith(TEST_PROMPT_DATASET_ID) + assert ( + prompt.prompt_data + == prompt.dataset.metadata.prompt_api_schema.multimodal_prompt.prompt_message + ) + assert isinstance(prompt.prompt_data, types.SchemaPromptSpecPromptMessage) + + contents = prompt.assemble_contents() + assert isinstance(contents[0], genai_types.Content) + + +def test_get_prompt_version(client): + prompt = client.prompt_management.get( + prompt_id=TEST_PROMPT_DATASET_ID, + config=types.GetPromptConfig( + version_id=TEST_PROMPT_VERSION_ID, + ), + ) + assert isinstance(prompt, types.Prompt) + assert isinstance(prompt.dataset, types.Dataset) + assert isinstance(prompt.dataset_version, types.DatasetVersion) + assert prompt.dataset.name.endswith(TEST_PROMPT_DATASET_ID) + assert prompt.dataset_version.name.endswith(TEST_PROMPT_VERSION_ID) + + +def test_get_prompt_with_variables_and_assemble_contents(client): + prompt = client.prompt_management.get( + prompt_id="4505721135056289792", + ) + assert isinstance(prompt.prompt_data, types.SchemaPromptSpecPromptMessage) + assembled_contents = prompt.assemble_contents() + assert isinstance(assembled_contents, list) + assert len(assembled_contents) == 1 + assert isinstance(assembled_contents[0], genai_types.Content) + assert assembled_contents[0].parts[0].text == "Hello, Alice! How are you?" + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="prompt_management._get_dataset_resource", +) diff --git a/tests/unit/vertexai/genai/replays/test_list_agent_engine_memories.py b/tests/unit/vertexai/genai/replays/test_list_agent_engine_memories.py index 0ada6a75d5..5ce3d77fb9 100644 --- a/tests/unit/vertexai/genai/replays/test_list_agent_engine_memories.py +++ b/tests/unit/vertexai/genai/replays/test_list_agent_engine_memories.py @@ -21,20 +21,37 @@ def test_list_memories(client): agent_engine = client.agent_engines.create() assert not list( - client.agent_engines.list_memories( + client.agent_engines.memories.list( name=agent_engine.api_resource.name, ) ) - client.agent_engines.create_memory( + client.agent_engines.memories.create( name=agent_engine.api_resource.name, fact="memory_fact", scope={"user_id": "123"}, + config={ + "wait_for_completion": True, + }, ) - memory_list = client.agent_engines.list_memories( + client.agent_engines.memories.create( name=agent_engine.api_resource.name, + fact="memory_fact_2", + scope={"user_id": "456"}, + config={ + "wait_for_completion": True, + }, + ) + memory_list = client.agent_engines.memories.list( + name=agent_engine.api_resource.name, + config=types.ListAgentEngineMemoryConfig( + page_size=1, + order_by="create_time asc", + ), ) assert len(memory_list) == 1 assert isinstance(memory_list[0], types.Memory) + assert memory_list[0].fact == "memory_fact" + assert memory_list[0].scope["user_id"] == "123" # Clean up resources. agent_engine.delete(force=True) diff --git a/tests/unit/vertexai/genai/replays/test_list_agent_engine_sandboxes.py b/tests/unit/vertexai/genai/replays/test_list_agent_engine_sandboxes.py new file mode 100644 index 0000000000..fd8e72421a --- /dev/null +++ b/tests/unit/vertexai/genai/replays/test_list_agent_engine_sandboxes.py @@ -0,0 +1,52 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# pylint: disable=protected-access,bad-continuation,missing-function-docstring + +from tests.unit.vertexai.genai.replays import pytest_helper +from vertexai._genai import types + + +def test_list_sandboxes(client): + agent_engine = client.agent_engines.create() + assert not list( + client.agent_engines.sandboxes.list( + name=agent_engine.api_resource.name, + ) + ) + + operation = client.agent_engines.sandboxes.create( + name=agent_engine.api_resource.name, + spec={ + "code_execution_environment": { + "machineConfig": "MACHINE_CONFIG_VCPU4_RAM4GIB" + } + }, + config=types.CreateAgentEngineSandboxConfig(display_name="test_sandbox"), + ) + assert isinstance(operation, types.AgentEngineSandboxOperation) + + sandbox_list = client.agent_engines.sandboxes.list( + name=agent_engine.api_resource.name, + ) + assert len(sandbox_list) == 1 + assert isinstance(sandbox_list[0], types.SandboxEnvironment) + assert sandbox_list[0].name == operation.response.name + + +pytestmark = pytest_helper.setup( + file=__file__, + globals_for_file=globals(), + test_method="agent_engines.sandboxes.list", +) diff --git a/tests/unit/vertexai/genai/replays/test_prompt_optimizer_optimize_job_state.py b/tests/unit/vertexai/genai/replays/test_prompt_optimizer_optimize_job_state.py index f320be970d..1a0477e886 100644 --- a/tests/unit/vertexai/genai/replays/test_prompt_optimizer_optimize_job_state.py +++ b/tests/unit/vertexai/genai/replays/test_prompt_optimizer_optimize_job_state.py @@ -18,6 +18,7 @@ from tests.unit.vertexai.genai.replays import pytest_helper from vertexai._genai import types +from google.genai import types as genai_types import pytest @@ -50,7 +51,7 @@ def test_optimize(client): config=config, ) assert isinstance(job, types.CustomJob) - assert job.state == types.JobState.JOB_STATE_SUCCEEDED + assert job.state == genai_types.JobState.JOB_STATE_SUCCEEDED pytestmark = pytest_helper.setup( @@ -79,7 +80,7 @@ async def test_optimize_async(client): config=config, ) assert isinstance(job, types.CustomJob) - assert job.state == types.JobState.JOB_STATE_PENDING + assert job.state == genai_types.JobState.JOB_STATE_PENDING @pytest.mark.asyncio @@ -99,5 +100,5 @@ async def test_optimize_async_with_config_wait_for_completion(client, caplog): config=config, ) assert isinstance(job, types.CustomJob) - assert job.state == types.JobState.JOB_STATE_PENDING + assert job.state == genai_types.JobState.JOB_STATE_PENDING assert "Ignoring wait_for_completion=True" in caplog.text diff --git a/tests/unit/vertexai/genai/test_agent_engines.py b/tests/unit/vertexai/genai/test_agent_engines.py index af5ef986e3..b183e97886 100644 --- a/tests/unit/vertexai/genai/test_agent_engines.py +++ b/tests/unit/vertexai/genai/test_agent_engines.py @@ -149,6 +149,20 @@ async def custom_async_stream_method( for chunk in _TEST_AGENT_ENGINE_STREAM_QUERY_RESPONSE: yield chunk + async def bidi_stream_query(self, input_queue: asyncio.Queue) -> AsyncIterable[Any]: + """Runs the bidi stream engine.""" + while True: + chunk = await input_queue.get() + yield chunk + + async def custom_bidi_stream_method( + self, input_queue: asyncio.Queue + ) -> AsyncIterable[Any]: + """Runs the async bidi stream engine.""" + while True: + chunk = await input_queue.get() + yield chunk + def clone(self): return self @@ -170,6 +184,10 @@ def register_operations(self) -> Dict[str, List[str]]: _TEST_DEFAULT_ASYNC_STREAM_METHOD_NAME, _TEST_CUSTOM_ASYNC_STREAM_METHOD_NAME, ], + _TEST_BIDI_STREAM_API_MODE: [ + _TEST_DEFAULT_BIDI_STREAM_METHOD_NAME, + _TEST_CUSTOM_BIDI_STREAM_METHOD_NAME, + ], } @@ -323,21 +341,27 @@ def register_operations(self) -> Dict[str, List[str]]: _TEST_ASYNC_API_MODE = _agent_engines_utils._ASYNC_API_MODE _TEST_STREAM_API_MODE = _agent_engines_utils._STREAM_API_MODE _TEST_ASYNC_STREAM_API_MODE = _agent_engines_utils._ASYNC_STREAM_API_MODE +_TEST_BIDI_STREAM_API_MODE = _agent_engines_utils._BIDI_STREAM_API_MODE _TEST_DEFAULT_METHOD_NAME = _agent_engines_utils._DEFAULT_METHOD_NAME _TEST_DEFAULT_ASYNC_METHOD_NAME = _agent_engines_utils._DEFAULT_ASYNC_METHOD_NAME _TEST_DEFAULT_STREAM_METHOD_NAME = _agent_engines_utils._DEFAULT_STREAM_METHOD_NAME _TEST_DEFAULT_ASYNC_STREAM_METHOD_NAME = ( _agent_engines_utils._DEFAULT_ASYNC_STREAM_METHOD_NAME ) +_TEST_DEFAULT_BIDI_STREAM_METHOD_NAME = ( + _agent_engines_utils._DEFAULT_BIDI_STREAM_METHOD_NAME +) _TEST_CAPITALIZE_ENGINE_METHOD_DOCSTRING = "Runs the engine." _TEST_STREAM_METHOD_DOCSTRING = "Runs the stream engine." _TEST_ASYNC_STREAM_METHOD_DOCSTRING = "Runs the async stream engine." +_TEST_BIDI_STREAM_METHOD_DOCSTRING = "Runs the bidi stream engine." _TEST_MODE_KEY_IN_SCHEMA = _agent_engines_utils._MODE_KEY_IN_SCHEMA _TEST_METHOD_NAME_KEY_IN_SCHEMA = _agent_engines_utils._METHOD_NAME_KEY_IN_SCHEMA _TEST_CUSTOM_METHOD_NAME = "custom_method" _TEST_CUSTOM_ASYNC_METHOD_NAME = "custom_async_method" _TEST_CUSTOM_STREAM_METHOD_NAME = "custom_stream_method" _TEST_CUSTOM_ASYNC_STREAM_METHOD_NAME = "custom_async_stream_method" +_TEST_CUSTOM_BIDI_STREAM_METHOD_NAME = "custom_bidi_stream_method" _TEST_CUSTOM_METHOD_DEFAULT_DOCSTRING = """ Runs the Agent Engine to serve the user request. @@ -501,6 +525,7 @@ def register_operations(self) -> Dict[str, List[str]]: "memory": "4Gi", } _TEST_AGENT_ENGINE_CONTAINER_CONCURRENCY = 4 +_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT = "test-custom-service-account" _TEST_AGENT_ENGINE_ENCRYPTION_SPEC = {"kms_key_name": "test-kms-key"} _TEST_AGENT_ENGINE_SPEC = _genai_types.ReasoningEngineSpecDict( agent_framework=_TEST_AGENT_ENGINE_FRAMEWORK, @@ -526,6 +551,7 @@ def register_operations(self) -> Dict[str, List[str]]: dependency_files_gcs_uri=_TEST_AGENT_ENGINE_DEPENDENCY_FILES_GCS_URI, requirements_gcs_uri=_TEST_AGENT_ENGINE_REQUIREMENTS_GCS_URI, ), + service_account=_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, ) _TEST_AGENT_ENGINE_STREAM_QUERY_RESPONSE = [{"output": "hello"}, {"output": "world"}] _TEST_AGENT_ENGINE_OPERATION_SCHEMAS = [] @@ -539,16 +565,16 @@ def register_operations(self) -> Dict[str, List[str]]: OperationRegistrableEngine().custom_method, schema_name=_TEST_CUSTOM_METHOD_NAME, ) -_TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA = _agent_engines_utils._generate_schema( OperationRegistrableEngine().custom_async_method, schema_name=_TEST_CUSTOM_ASYNC_METHOD_NAME, ) -_TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_API_MODE +_TEST_AGENT_ENGINE_ASYNC_CUSTOM_METHOD_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_API_MODE +) _TEST_AGENT_ENGINE_STREAM_QUERY_SCHEMA = _agent_engines_utils._generate_schema( StreamQueryEngine().stream_query, schema_name=_TEST_DEFAULT_STREAM_METHOD_NAME, @@ -558,25 +584,41 @@ def register_operations(self) -> Dict[str, List[str]]: OperationRegistrableEngine().custom_stream_method, schema_name=_TEST_CUSTOM_STREAM_METHOD_NAME, ) -_TEST_AGENT_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STREAM_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STREAM_API_MODE +) _TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA = _agent_engines_utils._generate_schema( AsyncStreamQueryEngine().async_stream_query, schema_name=_TEST_DEFAULT_ASYNC_STREAM_METHOD_NAME, ) -_TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_STREAM_API_MODE +_TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_STREAM_API_MODE +) _TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA = ( _agent_engines_utils._generate_schema( OperationRegistrableEngine().custom_async_stream_method, schema_name=_TEST_CUSTOM_ASYNC_STREAM_METHOD_NAME, ) ) -_TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_ASYNC_STREAM_API_MODE +_TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_ASYNC_STREAM_API_MODE +) +_TEST_AGENT_ENGINE_BIDI_STREAM_QUERY_SCHEMA = _agent_engines_utils._generate_schema( + OperationRegistrableEngine().bidi_stream_query, + schema_name=_TEST_DEFAULT_BIDI_STREAM_METHOD_NAME, +) +_TEST_AGENT_ENGINE_BIDI_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_BIDI_STREAM_API_MODE +) +_TEST_AGENT_ENGINE_CUSTOM_BIDI_STREAM_QUERY_SCHEMA = ( + _agent_engines_utils._generate_schema( + OperationRegistrableEngine().custom_bidi_stream_method, + schema_name=_TEST_CUSTOM_BIDI_STREAM_METHOD_NAME, + ) +) +_TEST_AGENT_ENGINE_CUSTOM_BIDI_STREAM_QUERY_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_BIDI_STREAM_API_MODE +) _TEST_OPERATION_REGISTRABLE_SCHEMAS = [ _TEST_AGENT_ENGINE_QUERY_SCHEMA, _TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA, @@ -586,6 +628,8 @@ def register_operations(self) -> Dict[str, List[str]]: _TEST_AGENT_ENGINE_CUSTOM_STREAM_QUERY_SCHEMA, _TEST_AGENT_ENGINE_ASYNC_STREAM_QUERY_SCHEMA, _TEST_AGENT_ENGINE_CUSTOM_ASYNC_STREAM_QUERY_SCHEMA, + _TEST_AGENT_ENGINE_BIDI_STREAM_QUERY_SCHEMA, + _TEST_AGENT_ENGINE_CUSTOM_BIDI_STREAM_QUERY_SCHEMA, ] _TEST_OPERATION_NOT_REGISTERED_SCHEMAS = [ _TEST_AGENT_ENGINE_CUSTOM_METHOD_SCHEMA, @@ -601,9 +645,9 @@ def register_operations(self) -> Dict[str, List[str]]: MethodToBeUnregisteredEngine().method_to_be_unregistered, schema_name=_TEST_METHOD_TO_BE_UNREGISTERED_NAME, ) -_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[ - _TEST_MODE_KEY_IN_SCHEMA -] = _TEST_STANDARD_API_MODE +_TEST_METHOD_TO_BE_UNREGISTERED_SCHEMA[_TEST_MODE_KEY_IN_SCHEMA] = ( + _TEST_STANDARD_API_MODE +) _TEST_ASYNC_QUERY_SCHEMAS = [_TEST_AGENT_ENGINE_ASYNC_METHOD_SCHEMA] _TEST_STREAM_QUERY_SCHEMAS = [ _TEST_AGENT_ENGINE_STREAM_QUERY_SCHEMA, @@ -803,6 +847,7 @@ def test_create_agent_engine_config_full(self, mock_prepare): gcs_dir_name=_TEST_GCS_DIR_NAME, extra_packages=[_TEST_AGENT_ENGINE_EXTRA_PACKAGE_PATH], env_vars=_TEST_AGENT_ENGINE_ENV_VARS_INPUT, + service_account=_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, psc_interface_config=_TEST_AGENT_ENGINE_PSC_INTERFACE_CONFIG, min_instances=_TEST_AGENT_ENGINE_MIN_INSTANCES, max_instances=_TEST_AGENT_ENGINE_MAX_INSTANCES, @@ -841,6 +886,10 @@ def test_create_agent_engine_config_full(self, mock_prepare): } assert config["encryption_spec"] == _TEST_AGENT_ENGINE_ENCRYPTION_SPEC assert config["spec"]["class_methods"] == [_TEST_AGENT_ENGINE_CLASS_METHOD_1] + assert ( + config["spec"]["service_account"] + == _TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT + ) @mock.patch.object(_agent_engines_utils, "_prepare") def test_update_agent_engine_config_full(self, mock_prepare): @@ -854,6 +903,7 @@ def test_update_agent_engine_config_full(self, mock_prepare): gcs_dir_name=_TEST_GCS_DIR_NAME, extra_packages=[_TEST_AGENT_ENGINE_EXTRA_PACKAGE_PATH], env_vars=_TEST_AGENT_ENGINE_ENV_VARS_INPUT, + service_account=_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, ) assert config["display_name"] == _TEST_AGENT_ENGINE_DISPLAY_NAME assert config["description"] == _TEST_AGENT_ENGINE_DESCRIPTION @@ -880,6 +930,10 @@ def test_update_agent_engine_config_full(self, mock_prepare): ], } assert config["spec"]["class_methods"] == [_TEST_AGENT_ENGINE_CLASS_METHOD_1] + assert ( + config["spec"]["service_account"] + == _TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT + ) assert config["update_mask"] == ",".join( [ "display_name", @@ -889,6 +943,7 @@ def test_update_agent_engine_config_full(self, mock_prepare): "spec.package_spec.requirements_gcs_uri", "spec.deployment_spec.env", "spec.deployment_spec.secret_env", + "spec.service_account", "spec.class_methods", "spec.agent_framework", ] @@ -1289,6 +1344,7 @@ def test_create_agent_engine_with_env_vars_dict( gcs_dir_name=None, extra_packages=[_TEST_AGENT_ENGINE_EXTRA_PACKAGE_PATH], env_vars=_TEST_AGENT_ENGINE_ENV_VARS_INPUT, + service_account=None, context_spec=None, psc_interface_config=None, min_instances=None, @@ -1316,6 +1372,86 @@ def test_create_agent_engine_with_env_vars_dict( None, ) + @mock.patch.object(agent_engines.AgentEngines, "_create_config") + @mock.patch.object(_agent_engines_utils, "_await_operation") + def test_create_agent_engine_with_custom_service_account( + self, + mock_await_operation, + mock_create_config, + ): + mock_create_config.return_value = { + "display_name": _TEST_AGENT_ENGINE_DISPLAY_NAME, + "description": _TEST_AGENT_ENGINE_DESCRIPTION, + "spec": { + "package_spec": { + "python_version": _TEST_PYTHON_VERSION, + "pickle_object_gcs_uri": _TEST_AGENT_ENGINE_GCS_URI, + "requirements_gcs_uri": _TEST_AGENT_ENGINE_REQUIREMENTS_GCS_URI, + }, + "class_methods": [_TEST_AGENT_ENGINE_CLASS_METHOD_1], + "service_account": _TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, + "agent_framework": _TEST_AGENT_ENGINE_FRAMEWORK, + }, + } + mock_await_operation.return_value = _genai_types.AgentEngineOperation( + response=_genai_types.ReasoningEngine( + name=_TEST_AGENT_ENGINE_RESOURCE_NAME, + spec=_TEST_AGENT_ENGINE_SPEC, + ) + ) + with mock.patch.object( + self.client.agent_engines._api_client, "request" + ) as request_mock: + request_mock.return_value = genai_types.HttpResponse(body="") + self.client.agent_engines.create( + agent=self.test_agent, + config=_genai_types.AgentEngineConfig( + display_name=_TEST_AGENT_ENGINE_DISPLAY_NAME, + requirements=_TEST_AGENT_ENGINE_REQUIREMENTS, + extra_packages=[_TEST_AGENT_ENGINE_EXTRA_PACKAGE_PATH], + staging_bucket=_TEST_STAGING_BUCKET, + service_account=_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, + ), + ) + mock_create_config.assert_called_with( + mode="create", + agent=self.test_agent, + staging_bucket=_TEST_STAGING_BUCKET, + requirements=_TEST_AGENT_ENGINE_REQUIREMENTS, + display_name=_TEST_AGENT_ENGINE_DISPLAY_NAME, + description=None, + gcs_dir_name=None, + extra_packages=[_TEST_AGENT_ENGINE_EXTRA_PACKAGE_PATH], + env_vars=None, + service_account=_TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, + context_spec=None, + psc_interface_config=None, + min_instances=None, + max_instances=None, + resource_limits=None, + container_concurrency=None, + encryption_spec=None, + ) + request_mock.assert_called_with( + "post", + "reasoningEngines", + { + "displayName": _TEST_AGENT_ENGINE_DISPLAY_NAME, + "description": _TEST_AGENT_ENGINE_DESCRIPTION, + "spec": { + "agentFramework": _TEST_AGENT_ENGINE_FRAMEWORK, + "classMethods": [_TEST_AGENT_ENGINE_CLASS_METHOD_1], + "packageSpec": { + "pickle_object_gcs_uri": _TEST_AGENT_ENGINE_GCS_URI, + "python_version": _TEST_PYTHON_VERSION, + "requirements_gcs_uri": _TEST_AGENT_ENGINE_REQUIREMENTS_GCS_URI, + }, + "serviceAccount": _TEST_AGENT_ENGINE_CUSTOM_SERVICE_ACCOUNT, + }, + }, + None, + ) + @pytest.mark.usefixtures("caplog") @mock.patch.object(_agent_engines_utils, "_prepare") @mock.patch.object(_agent_engines_utils, "_await_operation") @@ -1779,6 +1915,20 @@ async def consume(): ), _TEST_ASYNC_STREAM_API_MODE, ), + ( + _agent_engines_utils._generate_schema( + OperationRegistrableEngine().bidi_stream_query, + schema_name=_TEST_DEFAULT_BIDI_STREAM_METHOD_NAME, + ), + _TEST_BIDI_STREAM_API_MODE, + ), + ( + _agent_engines_utils._generate_schema( + OperationRegistrableEngine().custom_bidi_stream_method, + schema_name=_TEST_CUSTOM_BIDI_STREAM_METHOD_NAME, + ), + _TEST_BIDI_STREAM_API_MODE, + ), ], ), ( @@ -1923,7 +2073,7 @@ def test_update_agent_engine_description(self, mock_await_operation): "register the API methods: " "https://cloud.google.com/vertex-ai/generative-ai/docs/agent-engine/develop/custom#custom-methods." "Error: {Unsupported api mode: `UNKNOWN_API_MODE`, " - "Supported modes are: ``, `async`, `async_stream`, `stream`.}" + "Supported modes are: ``, `a2a_extension`, `async`, `async_stream`, `stream`.}" ), ), ], diff --git a/tests/unit/vertexai/genai/test_evals.py b/tests/unit/vertexai/genai/test_evals.py index d011d79aaf..122e8c09b0 100644 --- a/tests/unit/vertexai/genai/test_evals.py +++ b/tests/unit/vertexai/genai/test_evals.py @@ -25,6 +25,7 @@ from vertexai import _genai from vertexai._genai import _evals_data_converters from vertexai._genai import _evals_metric_handlers +from vertexai._genai import _observability_data_converter from vertexai._genai import evals from vertexai._genai import types as vertexai_genai_types from google.genai import client @@ -1921,6 +1922,277 @@ def test_convert_skips_missing_request_or_response(self): assert len(result_dataset.eval_cases) == 0 +class TestObservabilityDataConverter: + """Unit tests for the ObservabilityDataConverter class.""" + + def setup_method(self): + self.converter = _observability_data_converter.ObservabilityDataConverter() + + def test_convert_simple_request_response(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]} + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + } + ] + result_dataset = self.converter.convert(raw_data) + + assert isinstance(result_dataset, vertexai_genai_types.EvaluationDataset) + assert len(result_dataset.eval_cases) == 1 + + eval_case = result_dataset.eval_cases[0] + assert eval_case.prompt == genai_types.Content( + parts=[genai_types.Part(text="Hello")], role="user" + ) + assert len(eval_case.responses) == 1 + assert eval_case.responses[0].response == genai_types.Content( + parts=[genai_types.Part(text="Hi")], role="system" + ) + assert eval_case.reference is None + assert eval_case.system_instruction is None + assert not eval_case.conversation_history + + def test_convert_with_system_instruction(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]} + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + "system_instruction": { + "role": "user", + "parts": [{"content": "Be helpful", "type": "text"}], + }, + } + ] + result_dataset = self.converter.convert(raw_data) + eval_case = result_dataset.eval_cases[0] + assert eval_case.system_instruction == genai_types.Content( + parts=[genai_types.Part(text="Be helpful")], role="user" + ) + + def test_convert_with_conversation_history(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]}, + {"role": "system", "parts": [{"content": "Hi", "type": "text"}]}, + { + "role": "user", + "parts": [ + {"content": "What's the meaning of life?", "type": "text"} + ], + }, + ], + "response": [ + { + "role": "system", + "parts": [{"content": "42.", "type": "text"}], + } + ], + } + ] + + result_dataset = self.converter.convert(raw_data) + eval_case = result_dataset.eval_cases[0] + + assert eval_case.prompt == genai_types.Content( + parts=[genai_types.Part(text="What's the meaning of life?")], role="user" + ) + + assert len(eval_case.conversation_history) == 2 + assert eval_case.conversation_history[0] == vertexai_genai_types.Message( + content=genai_types.Content( + parts=[genai_types.Part(text="Hello")], role="user" + ), + turn_id="0", + author="user", + ) + assert eval_case.conversation_history[1] == vertexai_genai_types.Message( + content=genai_types.Content( + parts=[genai_types.Part(text="Hi")], role="system" + ), + turn_id="1", + author="system", + ) + + def test_convert_multiple_request_response(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]} + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + }, + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Goodbye", "type": "text"}]} + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Bye", "type": "text"}], + } + ], + }, + ] + result_dataset = self.converter.convert(raw_data) + + assert isinstance(result_dataset, vertexai_genai_types.EvaluationDataset) + assert len(result_dataset.eval_cases) == 2 + + eval_case = result_dataset.eval_cases[0] + assert eval_case.prompt == genai_types.Content( + parts=[genai_types.Part(text="Hello")], role="user" + ) + assert eval_case.responses[0].response == genai_types.Content( + parts=[genai_types.Part(text="Hi")], role="system" + ) + + eval_case = result_dataset.eval_cases[1] + assert eval_case.prompt == genai_types.Content( + parts=[genai_types.Part(text="Goodbye")], role="user" + ) + assert eval_case.responses[0].response == genai_types.Content( + parts=[genai_types.Part(text="Bye")], role="system" + ) + + def test_convert_skips_unknown_part_type(self): + raw_data = [ + { + "format": "observability", + "request": [ + { + "role": "user", + "parts": [ + {"content": 123, "type": ""}, + {"content": 456}, + {"content": "Hello", "type": "text"}, + ], + } + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + } + ] + + result_dataset = self.converter.convert(raw_data) + eval_case = result_dataset.eval_cases[0] + + assert eval_case.prompt == genai_types.Content( + parts=[genai_types.Part(text="Hello")], role="user" + ) + + def test_convert_skips_missing_request(self): + raw_data = [ + { + "format": "observability", + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + } + ] + result_dataset = self.converter.convert(raw_data) + assert not result_dataset.eval_cases + + def test_convert_skips_missing_response(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]} + ], + } + ] + result_dataset = self.converter.convert(raw_data) + assert not result_dataset.eval_cases + + def test_convert_tool_call_parts(self): + raw_data = [ + { + "format": "observability", + "request": [ + { + "role": "user", + "parts": [ + { + "type": "tool_call", + "id": "tool_id", + "name": "tool_name", + "arguments": {"param": "1"}, + } + ], + } + ], + "response": [ + { + "role": "system", + "parts": [ + { + "type": "tool_call_response", + "id": "tool_id", + "result": {"field": "2"}, + } + ], + } + ], + } + ] + result_dataset = self.converter.convert(raw_data) + + eval_case = result_dataset.eval_cases[0] + assert eval_case.prompt == genai_types.Content( + parts=[ + genai_types.Part( + function_call=genai_types.FunctionCall( + id="tool_id", name="tool_id", args={"param": "1"} + ) + ) + ], + role="user", + ) + assert len(eval_case.responses) == 1 + assert eval_case.responses[0].response == genai_types.Content( + parts=[ + genai_types.Part( + function_response=genai_types.FunctionResponse( + id="tool_id", name="tool_id", response={"field": "2"} + ) + ) + ], + role="system", + ) + + class TestMetric: """Unit tests for the Metric class.""" @@ -2953,6 +3225,26 @@ def test_auto_detect_openai_schema(self): == _evals_data_converters.EvalDatasetSchema.OPENAI ) + def test_auto_detect_observability_schema(self): + raw_data = [ + { + "format": "observability", + "request": [ + {"role": "user", "parts": [{"content": "Hello", "type": "text"}]} + ], + "response": [ + { + "role": "system", + "parts": [{"content": "Hi", "type": "text"}], + } + ], + } + ] + assert ( + _evals_data_converters.auto_detect_dataset_schema(raw_data) + == _evals_data_converters.EvalDatasetSchema.OBSERVABILITY + ) + def test_auto_detect_unknown_schema(self): raw_data = [{"foo": "bar"}] assert ( @@ -3536,13 +3828,13 @@ def test_execute_evaluation_deduplicates_candidate_names( candidate_name="gemini-pro", ) - mock_eval_dependencies[ - "mock_evaluate_instances" - ].return_value = vertexai_genai_types.EvaluateInstancesResponse( - exact_match_results=vertexai_genai_types.ExactMatchResults( - exact_match_metric_values=[ - vertexai_genai_types.ExactMatchMetricValue(score=1.0) - ] + mock_eval_dependencies["mock_evaluate_instances"].return_value = ( + vertexai_genai_types.EvaluateInstancesResponse( + exact_match_results=vertexai_genai_types.ExactMatchResults( + exact_match_metric_values=[ + vertexai_genai_types.ExactMatchMetricValue(score=1.0) + ] + ) ) ) @@ -3585,3 +3877,169 @@ def test_execute_evaluation_adds_creation_timestamp( assert result.metadata is not None assert result.metadata.creation_timestamp == mock_now + + +class TestEvaluationDataset: + """Contains set of tests for the EvaluationDataset class methods.""" + + @mock.patch.object(_evals_utils, "GcsUtils") + def test_load_from_observability_eval_cases(self, mock_gcs_utils): + """Tests that load_from_observability_eval_cases reads data from GCS.""" + + def read_file_contents_side_effect(src: str) -> str: + if src == "gs://project/input.json": + return "input" + elif src == "gs://project/output.json": + return "output" + elif src == "gs://project/system_instruction.json": + return "system_instruction" + else: + return "" + + mock_gcs_utils.return_value.read_file_contents.side_effect = ( + read_file_contents_side_effect + ) + + eval_cases = [ + vertexai_genai_types.ObservabilityEvalCase( + input_src="/service/gs://project/input.json", + output_src="/service/gs://project/output.json", + system_instruction_src="/service/gs://project/system_instruction.json", + ) + ] + result = ( + vertexai_genai_types.EvaluationDataset.load_from_observability_eval_cases( + eval_cases + ) + ) + + mock_gcs_utils.return_value.read_file_contents.assert_has_calls( + [ + mock.call("gs://project/input.json"), + mock.call("gs://project/output.json"), + mock.call("gs://project/system_instruction.json"), + ], + any_order=True, + ) + assert result.eval_dataset_df is not None + pd.testing.assert_frame_equal( + result.eval_dataset_df, + pd.DataFrame( + { + "format": ["observability"], + "request": ["input"], + "response": ["output"], + "system_instruction": ["system_instruction"], + } + ), + ) + + @mock.patch.object(_evals_utils, "GcsUtils") + def test_load_from_observability_eval_cases_no_system_instruction( + self, mock_gcs_utils + ): + """Tests load_from_observability_eval_cases works without system_instruction.""" + + def read_file_contents_side_effect(src: str) -> str: + if src == "gs://project/input.json": + return "input" + elif src == "gs://project/output.json": + return "output" + elif src == "gs://project/system_instruction.json": + return "system_instruction" + else: + return "" + + mock_gcs_utils.return_value.read_file_contents.side_effect = ( + read_file_contents_side_effect + ) + + eval_cases = [ + vertexai_genai_types.ObservabilityEvalCase( + input_src="/service/gs://project/input.json", + output_src="/service/gs://project/output.json", + ) + ] + result = ( + vertexai_genai_types.EvaluationDataset.load_from_observability_eval_cases( + eval_cases + ) + ) + + mock_gcs_utils.return_value.read_file_contents.assert_has_calls( + [ + mock.call("gs://project/input.json"), + mock.call("gs://project/output.json"), + ], + any_order=True, + ) + assert result.eval_dataset_df is not None + pd.testing.assert_frame_equal( + result.eval_dataset_df, + pd.DataFrame( + { + "format": ["observability"], + "request": ["input"], + "response": ["output"], + "system_instruction": [""], + } + ), + ) + + @mock.patch.object(_evals_utils, "GcsUtils") + def test_load_from_observability_eval_cases_multiple_cases(self, mock_gcs_utils): + """Test load_from_observability_eval_cases can handle multiple cases.""" + + def read_file_contents_side_effect(src: str) -> str: + if src == "gs://project/input_1.json": + return "input_1" + elif src == "gs://project/input_2.json": + return "input_2" + elif src == "gs://project/output_1.json": + return "output_1" + elif src == "gs://project/output_2.json": + return "output_2" + elif src == "gs://project/system_instruction_1.json": + return "system_instruction_1" + elif src == "gs://project/system_instruction_2.json": + return "system_instruction_2" + else: + return "" + + mock_gcs_utils.return_value.read_file_contents.side_effect = ( + read_file_contents_side_effect + ) + + eval_cases = [ + vertexai_genai_types.ObservabilityEvalCase( + input_src="/service/gs://project/input_1.json", + output_src="/service/gs://project/output_1.json", + system_instruction_src="/service/gs://project/system_instruction_1.json", + ), + vertexai_genai_types.ObservabilityEvalCase( + input_src="/service/gs://project/input_2.json", + output_src="/service/gs://project/output_2.json", + system_instruction_src="/service/gs://project/system_instruction_2.json", + ), + ] + result = ( + vertexai_genai_types.EvaluationDataset.load_from_observability_eval_cases( + eval_cases + ) + ) + + assert result.eval_dataset_df is not None + pd.testing.assert_frame_equal( + result.eval_dataset_df, + pd.DataFrame( + { + "format": ["observability", "observability"], + "request": ["input_1", "input_2"], + "response": ["output_1", "output_2"], + "system_instruction": [ + "system_instruction_1", + "system_instruction_2", + ], + } + ), + ) diff --git a/tests/unit/vertexai/tuning/test_tuning.py b/tests/unit/vertexai/tuning/test_tuning.py index 98414a9452..cb8105d8c3 100644 --- a/tests/unit/vertexai/tuning/test_tuning.py +++ b/tests/unit/vertexai/tuning/test_tuning.py @@ -34,7 +34,9 @@ from google.cloud.aiplatform.metadata import experiment_resources from google.cloud.aiplatform_v1beta1.services import gen_ai_tuning_service from google.cloud.aiplatform_v1beta1.types import job_state -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job, +) from vertexai.preview import tuning from vertexai.preview.tuning import ( sft as preview_supervised_tuning, @@ -43,7 +45,9 @@ from vertexai.tuning import _distillation from vertexai.tuning import sft as supervised_tuning from google.cloud import storage -from vertexai.preview.tuning._tuning import TuningJob as PreviewTuningJob +from vertexai.preview.tuning._tuning import ( + TuningJob as PreviewTuningJob, +) import pytest diff --git a/vertexai/_genai/_agent_engines_utils.py b/vertexai/_genai/_agent_engines_utils.py index ae3da1de76..f76bf88c2a 100644 --- a/vertexai/_genai/_agent_engines_utils.py +++ b/vertexai/_genai/_agent_engines_utils.py @@ -15,6 +15,7 @@ """Utility functions for agent engines.""" import abc +import asyncio from importlib import metadata as importlib_metadata import inspect import io @@ -43,6 +44,8 @@ Union, ) +import httpx + import proto from google.api_core import exceptions @@ -103,11 +106,38 @@ Session = Any +try: + from a2a.types import ( + AgentCard, + TransportProtocol, + Message, + TaskIdParams, + TaskQueryParams, + ) + from a2a.client import ClientConfig, ClientFactory + + AgentCard = AgentCard + TransportProtocol = TransportProtocol + Message = Message + ClientConfig = ClientConfig + ClientFactory = ClientFactory + TaskIdParams = TaskIdParams + TaskQueryParams = TaskQueryParams +except (ImportError, AttributeError): + AgentCard = None + TransportProtocol = None + Message = None + ClientConfig = None + ClientFactory = None + TaskIdParams = None + TaskQueryParams = None + _ACTIONS_KEY = "actions" _ACTION_APPEND = "append" _AGENT_FRAMEWORK_ATTR = "agent_framework" _ASYNC_API_MODE = "async" _ASYNC_STREAM_API_MODE = "async_stream" +_BIDI_STREAM_API_MODE = "bidi_stream" _BASE_MODULES = set(_BUILTIN_MODULE_NAMES + tuple(_STDLIB_MODULE_NAMES)) _BLOB_FILENAME = "agent_engine.pkl" _DEFAULT_AGENT_FRAMEWORK = "custom" @@ -132,6 +162,7 @@ _DEFAULT_STREAM_METHOD_RETURN_TYPE = "Iterable[Any]" _DEFAULT_REQUIRED_PACKAGES = frozenset(["cloudpickle", "pydantic"]) _DEFAULT_STREAM_METHOD_NAME = "stream_query" +_DEFAULT_BIDI_STREAM_METHOD_NAME = "bidi_stream_query" _EXTRA_PACKAGES_FILE = "dependencies.tar.gz" _FAILED_TO_REGISTER_API_METHODS_WARNING_TEMPLATE = ( "Failed to register API methods. Please follow the guide to " @@ -145,6 +176,8 @@ _REQUIREMENTS_FILE = "requirements.txt" _STANDARD_API_MODE = "" _STREAM_API_MODE = "stream" +_A2A_EXTENSION_MODE = "a2a_extension" +_A2A_AGENT_CARD = "a2a_agent_card" _WARNINGS_KEY = "warnings" _WARNING_MISSING = "missing" _WARNING_INCOMPATIBLE = "incompatible" @@ -202,6 +235,15 @@ def stream_query(self, **kwargs) -> Iterator[Any]: # type: ignore[no-untyped-de """Stream responses to serve the user query.""" +@typing.runtime_checkable +class BidiStreamQueryable(Protocol): + """Protocol for Agent Engines that can stream requests and responses.""" + + @abc.abstractmethod + async def bidi_stream_query(self, input_queue: asyncio.Queue) -> AsyncIterator[Any]: + """Stream requests and responses to serve the user queries.""" + + @typing.runtime_checkable class Cloneable(Protocol): """Protocol for Agent Engines that can be cloned.""" @@ -234,6 +276,7 @@ def register_operations(self, **kwargs) -> Dict[str, Sequence[str]]: OperationRegistrable, Queryable, StreamQueryable, + BidiStreamQueryable, ] @@ -367,14 +410,14 @@ def _await_operation( *, operation_name: str, get_operation_fn: GetOperationFunction, - poll_interval_seconds: int = 10, + poll_interval_seconds: float = 10, ) -> Any: """Waits for the operation for creating an agent engine to complete. Args: operation_name (str): Required. The name of the operation for creating the Agent Engine. - poll_interval_seconds (int): + poll_interval_seconds (float): The number of seconds to wait between each poll. get_operation_fn (Callable[[str], Any]): Optional. The function to use for getting the operation. If not @@ -493,11 +536,32 @@ def _generate_class_methods_spec_or_raise( class_method = _to_proto(schema_dict) class_method[_MODE_KEY_IN_SCHEMA] = mode + if hasattr(agent, "agent_card"): + class_method[_A2A_AGENT_CARD] = getattr( + agent, "agent_card" + ).model_dump_json() class_methods_spec.append(class_method) return class_methods_spec +def _is_pydantic_serializable(param: inspect.Parameter) -> bool: + """Checks if the parameter is pydantic serializable.""" + + if param.annotation == inspect.Parameter.empty: + return True + + if isinstance(param.annotation, str): + return False + + pydantic = _import_pydantic_or_raise() + try: + pydantic.TypeAdapter(param.annotation) + return True + except Exception: + return False + + def _generate_schema( f: Callable[..., Any], *, @@ -557,6 +621,9 @@ def _generate_schema( inspect.Parameter.KEYWORD_ONLY, inspect.Parameter.POSITIONAL_ONLY, ) + # For a bidi endpoint, it requires an asyncio.Queue as the input, but + # it is not JSON serializable. We hence exclude it from the schema. + and param.annotation != asyncio.Queue and _is_pydantic_serializable(param) } parameters = pydantic.create_model(f.__name__, **fields_dict).schema() # Postprocessing @@ -656,6 +723,8 @@ def _get_registered_operations( operations[_STREAM_API_MODE] = [_DEFAULT_STREAM_METHOD_NAME] if isinstance(agent, AsyncStreamQueryable): operations[_ASYNC_STREAM_API_MODE] = [_DEFAULT_ASYNC_STREAM_METHOD_NAME] + if isinstance(agent, BidiStreamQueryable): + operations[_BIDI_STREAM_API_MODE] = [_DEFAULT_BIDI_STREAM_METHOD_NAME] return operations @@ -839,6 +908,10 @@ def _register_api_methods_or_raise( f" contain an `{_MODE_KEY_IN_SCHEMA}` field." ) api_mode = operation_schema.get(_MODE_KEY_IN_SCHEMA) + # For bidi stream api mode, we don't need to wrap the operation. + if api_mode == _BIDI_STREAM_API_MODE: + continue + if _METHOD_NAME_KEY_IN_SCHEMA not in operation_schema: raise ValueError( f"Operation schema {operation_schema} does not" @@ -868,6 +941,7 @@ def _register_api_methods_or_raise( _ASYNC_API_MODE: _wrap_async_query_operation, _STREAM_API_MODE: _wrap_stream_query_operation, _ASYNC_STREAM_API_MODE: _wrap_async_stream_query_operation, + _A2A_EXTENSION_MODE: _wrap_a2a_operation, } if isinstance(wrap_operation_fn, dict) and api_mode in wrap_operation_fn: # Override the default function with user-specified function if it exists. @@ -884,7 +958,13 @@ def _register_api_methods_or_raise( ) # Bind the method to the object. - method = _wrap_operation(method_name=method_name) # type: ignore[call-arg] + if api_mode == _A2A_EXTENSION_MODE: + agent_card = operation_schema.get(_A2A_AGENT_CARD) + method = _wrap_operation( + method_name=method_name, agent_card=agent_card + ) # type: ignore[call-arg] + else: + method = _wrap_operation(method_name=method_name) # type: ignore[call-arg] method.__name__ = method_name if method_description and isinstance(method_description, str): method.__doc__ = method_description @@ -1212,6 +1292,7 @@ def _validate_agent_or_raise( * a callable method named `query` * a callable method named `stream_query` * a callable method named `async_stream_query` + * a callable method named `bidi_stream_query` * a callable method named `register_operations` Args: @@ -1246,6 +1327,9 @@ def _validate_agent_or_raise( is_async_stream_queryable = isinstance(agent, AsyncStreamQueryable) and callable( agent.async_stream_query ) + is_bidi_stream_queryable = isinstance(agent, BidiStreamQueryable) and callable( + agent.bidi_stream_query + ) is_operation_registrable = isinstance(agent, OperationRegistrable) and callable( agent.register_operations ) @@ -1255,12 +1339,13 @@ def _validate_agent_or_raise( or is_async_queryable or is_stream_queryable or is_operation_registrable + or is_bidi_stream_queryable or is_async_stream_queryable ): raise TypeError( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` or " - "`register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query`, or `register_operations`." ) if is_queryable: @@ -1299,6 +1384,15 @@ def _validate_agent_or_raise( " missing `self` argument in the agent.async_stream_query method." ) from err + if is_bidi_stream_queryable: + try: + inspect.signature(getattr(agent, "bidi_stream_query")) + except ValueError as err: + raise ValueError( + "Invalid bidi_stream_query signature. This might be due to a " + " missing `self` argument in the agent.bidi_stream_query method." + ) from err + if is_operation_registrable: try: inspect.signature(getattr(agent, "register_operations")) @@ -1327,6 +1421,19 @@ def _method(self, **kwargs) -> Any: # type: ignore[no-untyped-def] return _method +AgentEngineOperationUnion = Union[ + genai_types.AgentEngineOperation, + genai_types.AgentEngineMemoryOperation, + genai_types.AgentEngineGenerateMemoriesOperation, +] + + +class GetOperationFunction(Protocol): + def __call__( + self, *, operation_name: str, **kwargs + ) -> AgentEngineOperationUnion: ... + + def _wrap_query_operation(*, method_name: str) -> Callable[..., Any]: """Wraps an Agent Engine method, creating a callable for `query` API. @@ -1473,6 +1580,125 @@ async def _method(self: genai_types.AgentEngine, **kwargs) -> AsyncIterator[Any] return _method +def _wrap_a2a_operation(method_name: str, agent_card: str) -> Callable[..., list]: + """Wraps an Agent Engine method, creating a callable for A2A API. + + Args: + method_name: The name of the Agent Engine method to call. + agent_card: The agent card to use for the A2A API call. + Example: + {'additionalInterfaces': None, + 'capabilities': {'extensions': None, + 'pushNotifications': None, + 'stateTransitionHistory': None, + 'streaming': False}, + 'defaultInputModes': ['text'], + 'defaultOutputModes': ['text'], + 'description': ( + 'A helpful assistant agent that can answer questions.' + ), + 'documentationUrl': None, + 'iconUrl': None, + 'name': 'Q&A Agent', + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3.0', + 'provider': None, + 'security': None, + 'securitySchemes': None, + 'signatures': None, + 'skills': [{ + 'description': ( + 'A helpful assistant agent that can answer questions.' + ), + 'examples': ['Who is leading 2025 F1 Standings?', + 'Where can i find an active volcano?'], + 'id': 'question_answer', + 'inputModes': None, + 'name': 'Q&A Agent', + 'outputModes': None, + 'security': None, + 'tags': ['Question-Answer']}], + 'supportsAuthenticatedExtendedCard': True, + 'url': 'http://localhost:8080/', + 'version': '1.0.0'} + Returns: + A callable object that executes the method on the Agent Engine via + the A2A API. + """ + + async def _method(self, **kwargs) -> Any: + """Wraps an Agent Engine method, creating a callable for A2A API.""" + if not self.api_client: + raise ValueError("api_client is not initialized.") + if not self.api_resource: + raise ValueError("api_resource is not initialized.") + a2a_agent_card = AgentCard(**json.loads(agent_card)) + # A2A + AE integration currently only supports Rest API. + if ( + a2a_agent_card.preferred_transport + and a2a_agent_card.preferred_transport != TransportProtocol.http_json + ): + raise ValueError( + "Only HTTP+JSON is supported for preferred transport on agent card " + ) + + # Set preferred transport to HTTP+JSON if not set. + if not hasattr(a2a_agent_card, "preferred_transport"): + a2a_agent_card.preferred_transport = TransportProtocol.http_json + + # AE cannot support streaming yet. Turn off streaming for now. + if a2a_agent_card.capabilities and a2a_agent_card.capabilities.streaming: + raise ValueError( + "Streaming is not supported in Agent Engine, please change " + "a2a_agent_card.capabilities.streaming to False." + ) + + if not hasattr(a2a_agent_card.capabilities, "streaming"): + a2a_agent_card.capabilities.streaming = False + + # agent_card is set on the class_methods before set_up is invoked. + # Ensure that the agent_card url is set correctly before the client is created. + base_url = self.api_client._api_client._http_options.base_url.rstrip("/") + api_version = self.api_client._api_client._http_options.api_version + a2a_agent_card.url = f"{base_url}/{api_version}/{self.api_resource.name}/a2a" + + # Using a2a client, inject the auth token from the global config. + config = ClientConfig( + supported_transports=[ + TransportProtocol.http_json, + ], + use_client_preference=True, + httpx_client=httpx.AsyncClient( + headers={ + "Authorization": ( + f"Bearer {self.api_client._api_client._credentials.token}" + ) + } + ), + ) + factory = ClientFactory(config) + client = factory.create(a2a_agent_card) + + if method_name == "on_message_send": + response = client.send_message(Message(**kwargs)) + chunks = [] + async for chunk in response: + chunks.append(chunk) + return chunks + elif method_name == "on_get_task": + response = await client.get_task(TaskQueryParams(**kwargs)) + elif method_name == "on_cancel_task": + response = await client.cancel_task(TaskIdParams(**kwargs)) + elif method_name == "handle_authenticated_agent_card": + response = await client.get_card() + else: + raise ValueError(f"Unknown method name: {method_name}") + + return response + + return _method + + def _yield_parsed_json(http_response: google_genai_types.HttpResponse) -> Iterator[Any]: """Converts the body of the HTTP Response message to JSON format. diff --git a/vertexai/_genai/_evals_data_converters.py b/vertexai/_genai/_evals_data_converters.py index 85e14ad351..40b588ac58 100644 --- a/vertexai/_genai/_evals_data_converters.py +++ b/vertexai/_genai/_evals_data_converters.py @@ -14,7 +14,6 @@ # """Dataset converters for evals.""" -import abc import json import logging from typing import Any, Optional, Union @@ -23,6 +22,8 @@ from google.genai import types as genai_types from typing_extensions import override +from . import _evals_utils +from . import _observability_data_converter from . import types @@ -35,18 +36,10 @@ class EvalDatasetSchema(_common.CaseInSensitiveEnum): GEMINI = "gemini" FLATTEN = "flatten" OPENAI = "openai" + OBSERVABILITY = "observability" UNKNOWN = "unknown" -class _EvalDataConverter(abc.ABC): - """Abstract base class for dataset converters.""" - - @abc.abstractmethod - def convert(self, raw_data: Any) -> types.EvaluationDataset: - """Converts a loaded raw dataset into an EvaluationDataset.""" - raise NotImplementedError() - - _PLACEHOLDER_RESPONSE_TEXT = "Error: Missing response for this candidate" @@ -59,12 +52,10 @@ def _create_placeholder_response_candidate( ) -class _GeminiEvalDataConverter(_EvalDataConverter): +class _GeminiEvalDataConverter(_evals_utils.EvalDataConverter): """Converter for dataset in the Gemini format.""" - def _parse_request( - self, request_data: dict[str, Any] - ) -> tuple[ + def _parse_request(self, request_data: dict[str, Any]) -> tuple[ genai_types.Content, genai_types.Content, list[types.Message], @@ -185,7 +176,7 @@ def convert(self, raw_data: list[dict[str, Any]]) -> types.EvaluationDataset: return types.EvaluationDataset(eval_cases=eval_cases) -class _FlattenEvalDataConverter(_EvalDataConverter): +class _FlattenEvalDataConverter(_evals_utils.EvalDataConverter): """Converter for datasets in a structured table format.""" def convert(self, raw_data: list[dict[str, Any]]) -> types.EvaluationDataset: @@ -296,9 +287,11 @@ def convert(self, raw_data: list[dict[str, Any]]) -> types.EvaluationDataset: if isinstance(value, list): try: validated_rubrics = [ - types.Rubric.model_validate(r) - if isinstance(r, dict) - else r + ( + types.Rubric.model_validate(r) + if isinstance(r, dict) + else r + ) for r in value ] if all( @@ -353,12 +346,10 @@ def convert(self, raw_data: list[dict[str, Any]]) -> types.EvaluationDataset: return types.EvaluationDataset(eval_cases=eval_cases) -class _OpenAIDataConverter(_EvalDataConverter): +class _OpenAIDataConverter(_evals_utils.EvalDataConverter): """Converter for dataset in OpenAI's Chat Completion format.""" - def _parse_messages( - self, messages: list[dict[str, Any]] - ) -> tuple[ + def _parse_messages(self, messages: list[dict[str, Any]]) -> tuple[ Optional[genai_types.Content], list[types.Message], Optional[genai_types.Content], @@ -503,6 +494,11 @@ def auto_detect_dataset_schema( first_item = raw_dataset[0] keys = set(first_item.keys()) + if "format" in keys: + format_content = first_item.get("format", "") + if isinstance(format_content, str) and format_content == "observability": + return EvalDatasetSchema.OBSERVABILITY + if "request" in keys and "response" in keys: request_content = first_item.get("request", {}) if isinstance(request_content, dict) and "contents" in request_content: @@ -540,12 +536,13 @@ def auto_detect_dataset_schema( EvalDatasetSchema.GEMINI: _GeminiEvalDataConverter, EvalDatasetSchema.FLATTEN: _FlattenEvalDataConverter, EvalDatasetSchema.OPENAI: _OpenAIDataConverter, + EvalDatasetSchema.OBSERVABILITY: _observability_data_converter.ObservabilityDataConverter, } def get_dataset_converter( dataset_schema: EvalDatasetSchema, -) -> _EvalDataConverter: +) -> _evals_utils.EvalDataConverter: """Returns the appropriate dataset converter for the given schema.""" if dataset_schema in _CONVERTER_REGISTRY: return _CONVERTER_REGISTRY[dataset_schema]() # type: ignore[abstract] diff --git a/vertexai/_genai/_evals_metric_handlers.py b/vertexai/_genai/_evals_metric_handlers.py index 0e3133ecf3..870124d9b4 100644 --- a/vertexai/_genai/_evals_metric_handlers.py +++ b/vertexai/_genai/_evals_metric_handlers.py @@ -472,9 +472,11 @@ def _build_rubric_based_input( rubrics_list = [] rubric_enhanced_contents = { - "prompt": [eval_case.prompt.model_dump(mode="json", exclude_none=True)] - if eval_case.prompt - else None, + "prompt": ( + [eval_case.prompt.model_dump(mode="json", exclude_none=True)] + if eval_case.prompt + else None + ), "response": [response_content.model_dump(mode="json", exclude_none=True)], "rubric_groups": { self.metric.rubric_group_name: { @@ -566,9 +568,9 @@ def _build_pointwise_input( "return_raw_output": self.metric.return_raw_output } if self.metric.judge_model_system_instruction: - metric_spec_payload[ - "system_instruction" - ] = self.metric.judge_model_system_instruction + metric_spec_payload["system_instruction"] = ( + self.metric.judge_model_system_instruction + ) return { "pointwise_metric_input": { diff --git a/vertexai/_genai/_evals_utils.py b/vertexai/_genai/_evals_utils.py index 99318f5753..2c5c478e66 100644 --- a/vertexai/_genai/_evals_utils.py +++ b/vertexai/_genai/_evals_utils.py @@ -14,6 +14,7 @@ # """Utility functions for evals.""" +import abc import io import json import logging @@ -350,9 +351,9 @@ def _get_latest_version_uri(self, api_client: Any, metric_gcs_dir: str) -> str: blobs = gcs_utils.storage_client.list_blobs(bucket_name, prefix=prefix) - version_files: list[ - dict[str, Union[list[int], str]] - ] = [] # {'version_parts': [1,0,0], 'filename': 'v1.0.0.yaml'} + version_files: list[dict[str, Union[list[int], str]]] = ( + [] + ) # {'version_parts': [1,0,0], 'filename': 'v1.0.0.yaml'} version_pattern = re.compile( r"v(\d+)(?:\.(\d+))?(?:\.(\d+))?\.(yaml|yml|json)$", re.IGNORECASE @@ -822,3 +823,12 @@ def prepare_metric_payload( resolved_metrics, set_default_aggregation_metrics=True ) return request_dict + + +class EvalDataConverter(abc.ABC): + """Abstract base class for dataset converters.""" + + @abc.abstractmethod + def convert(self, raw_data: Any) -> types.EvaluationDataset: + """Converts a loaded raw dataset into an EvaluationDataset.""" + raise NotImplementedError() diff --git a/vertexai/_genai/_evals_visualization.py b/vertexai/_genai/_evals_visualization.py index 5134e7dffa..477b6b40ee 100644 --- a/vertexai/_genai/_evals_visualization.py +++ b/vertexai/_genai/_evals_visualization.py @@ -704,9 +704,11 @@ def display_evaluation_dataset(eval_dataset_obj: types.EvaluationDataset) -> Non if isinstance(cell_value, dict): processed_row[col_name] = { k: [ - v_item.model_dump(mode="json") - if hasattr(v_item, "model_dump") - else v_item + ( + v_item.model_dump(mode="json") + if hasattr(v_item, "model_dump") + else v_item + ) for v_item in v ] for k, v in cell_value.items() diff --git a/vertexai/_genai/_observability_data_converter.py b/vertexai/_genai/_observability_data_converter.py new file mode 100644 index 0000000000..9eb43ae411 --- /dev/null +++ b/vertexai/_genai/_observability_data_converter.py @@ -0,0 +1,203 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Dataset converter for Google Observability GenAI data.""" + +import json +import logging +from typing import Any, Optional + +from google.genai import types as genai_types +from typing_extensions import override + +from . import _evals_utils +from . import types + + +logger = logging.getLogger("vertexai_genai._observability_data_converters") + + +class ObservabilityDataConverter(_evals_utils.EvalDataConverter): + """Converter for dataset in GCP Observability GenAI format.""" + + def _message_to_content(self, message: dict[str, Any]) -> genai_types.Content: + """Converts Observability GenAI Message format to Content.""" + parts = [] + message_parts = message.get("parts", []) + if isinstance(message_parts, list): + for message_part in message_parts: + part = None + part_type = message_part.get("type", "") + if part_type == "text": + part = genai_types.Part(text=message_part.get("content", "")) + elif part_type == "blob": + part = genai_types.Part( + inline_data=genai_types.Blob( + data=message_part.get("data", ""), + mime_type=message_part.get("mime_type", ""), + ) + ) + elif part_type == "file_data": + part = genai_types.Part( + file_data=genai_types.FileData( + file_uri=message_part.get("file_uri", ""), + mime_type=message_part.get("mime_type", ""), + ) + ) + elif part_type == "tool_call": + # O11y format requires use of id in place of name + part = genai_types.Part( + function_call=genai_types.FunctionCall( + id=message_part.get("id", ""), + name=message_part.get("id", ""), + args=message_part.get("arguments", {}), + ) + ) + elif part_type == "tool_call_response": + # O11y format requires use of id in place of name + part = genai_types.Part( + function_response=genai_types.FunctionResponse( + id=message_part.get("id", ""), + name=message_part.get("id", ""), + response=message_part.get("result", {}), + ) + ) + else: + logger.warning( + "Skipping message part due to unrecognized message " + "part type of '%s'", + part_type, + ) + + if part is not None: + parts.append(part) + + return genai_types.Content(parts=parts, role=message.get("role", "")) + + def _parse_messages( + self, + eval_case_id: str, + request_msgs: list[Any], + response_msgs: list[Any], + system_instruction_msg: Optional[dict[str, Any]] = None, + ) -> types.EvalCase: + """Parses a set of Observability messages into an EvalCase.""" + # System instruction message + system_instruction = None + if system_instruction_msg is not None: + system_instruction = self._message_to_content(system_instruction_msg) + + # Request messages + prompt = None + conversation_history = [] + if request_msgs: + # Extract latest message as prompt + prompt = self._message_to_content(request_msgs[-1]) + + # All previous messages are conversation history + if len(request_msgs) > 1: + for i, msg in enumerate(request_msgs[:-1]): + conversation_history.append( + types.Message( + turn_id=str(i), + content=self._message_to_content(msg), + author=msg.get("role", ""), + ) + ) + + # Output messages + responses = [] + for msg in response_msgs: + response = types.ResponseCandidate(response=self._message_to_content(msg)) + responses.append(response) + + return types.EvalCase( + eval_case_id=eval_case_id, + prompt=prompt, + responses=responses, + system_instruction=system_instruction, + conversation_history=conversation_history, + reference=None, + ) + + def _load_json_dict(self, data: Any, case_id: str) -> dict[Any, str]: + """Parses the raw data into a dict if possible.""" + if isinstance(data, str): + loaded_json = json.loads(data) + if isinstance(loaded_json, dict): + return loaded_json + else: + raise TypeError( + f"Decoded JSON payload is not a dictionary for case " + f"{case_id}. Type found: {type(loaded_json).__name__}" + ) + elif isinstance(data, dict): + return data + else: + raise TypeError( + f"Payload is not a dictionary for case {case_id}. Type found: " + f"{type(data).__name__}" + ) + + def _load_json_list(self, data: Any, case_id: str) -> list[Any]: + """Parses the raw data into a list if possible.""" + if isinstance(data, str): + loaded_json = json.loads(data) + if isinstance(loaded_json, list): + return loaded_json + else: + raise TypeError( + f"Decoded JSON payload is not a list for case " + f"{case_id}. Type found: {type(loaded_json).__name__}" + ) + elif isinstance(data, list): + return data + else: + raise TypeError( + f"Payload is not a list for case {case_id}. Type found: " + f"{type(data).__name__}" + ) + + @override + def convert(self, raw_data: list[dict[str, Any]]) -> types.EvaluationDataset: + """Converts a list of GCP Observability GenAI cases into an EvaluationDataset.""" + eval_cases = [] + + for i, case in enumerate(raw_data): + eval_case_id = f"observability_eval_case_{i}" + + if "request" not in case or "response" not in case: + logger.warning( + "Skipping case %s due to missing 'request' or 'response' key.", + eval_case_id, + ) + continue + + request_data = case.get("request", []) + request_list = self._load_json_list(request_data, eval_case_id) + + response_data = case.get("response", []) + response_list = self._load_json_list(response_data, eval_case_id) + + system_dict = None + if "system_instruction" in case: + system_data = case.get("system_instruction", {}) + system_dict = self._load_json_dict(system_data, eval_case_id) + + eval_case = self._parse_messages( + eval_case_id, request_list, response_list, system_dict + ) + eval_cases.append(eval_case) + + return types.EvaluationDataset(eval_cases=eval_cases) diff --git a/vertexai/_genai/_prompt_management_utils.py b/vertexai/_genai/_prompt_management_utils.py new file mode 100644 index 0000000000..2aa613e5fa --- /dev/null +++ b/vertexai/_genai/_prompt_management_utils.py @@ -0,0 +1,121 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility functions for prompt management.""" + +from typing import Optional + +from google.genai import types as genai_types + +from . import types + + +DEFAULT_API_SCHEMA_VERSION = "1.0.0" +PROMPT_SCHEMA_URI = ( + "gs://google-cloud-aiplatform/schema/dataset/metadata/text_prompt_1.0.0.yaml" +) +PROMPT_TYPE = "multimodal_freeform" + + +def _create_dataset_metadata_from_prompt( + prompt: types.Prompt, + variables: Optional[list[dict[str, genai_types.Part]]] = None, +) -> types.SchemaTextPromptDatasetMetadata: + """Convert a types.Prompt into types.SchemaTextPromptDatasetMetadata.""" + + prompt_metadata = types.SchemaTextPromptDatasetMetadata() + + prompt_api_schema = types.SchemaPromptApiSchema() + prompt_api_schema.multimodal_prompt = types.SchemaPromptSpecMultimodalPrompt( + prompt_message=prompt.prompt_data + ) + + prompt_api_schema.api_schema_version = DEFAULT_API_SCHEMA_VERSION + + prompt_metadata.has_prompt_variable = bool(variables) + + if variables: + prompt_execution_list = [] + for prompt_var in variables: + prompt_instance_execution = types.SchemaPromptInstancePromptExecution() + prompt_instance_execution.arguments = {} + for key, val in prompt_var.items(): + prompt_instance_execution.arguments[key] = ( + types.SchemaPromptInstanceVariableValue( + part_list=types.SchemaPromptSpecPartList(parts=[val]) + ) + ) + prompt_execution_list.append(prompt_instance_execution) + prompt_api_schema.executions = prompt_execution_list + + # Need to exclude variables from the prompt message as it is a client side + # only field + if prompt_api_schema.multimodal_prompt.prompt_message: + prompt_message_dict = ( + prompt_api_schema.multimodal_prompt.prompt_message.model_dump( + exclude=["variables"], exclude_none=True + ) + ) + prompt_api_schema.multimodal_prompt.prompt_message = ( + types.SchemaPromptSpecPromptMessage(**prompt_message_dict) + ) + prompt_metadata.prompt_api_schema = prompt_api_schema + + prompt_metadata.prompt_type = PROMPT_TYPE + + return prompt_metadata + + +def _create_prompt_from_dataset_metadata( + dataset: types.Dataset, +) -> types.Prompt: + """Constructs a types.Prompt from a types.Dataset resource returned from the API. + + Args: + dataset: The types.Dataset object containing the prompt metadata. + + Returns: + A types.Prompt object reconstructed from the dataset metadata. + """ + if ( + not hasattr(dataset, "metadata") + or dataset.metadata is None + or not isinstance(dataset.metadata, types.SchemaTextPromptDatasetMetadata) + ): + raise ValueError( + "Error retrieving prompt: prompt dataset resource is missing 'metadata'." + ) + api_schema = dataset.metadata.prompt_api_schema + prompt = types.Prompt() + + if api_schema.multimodal_prompt: + + prompt_message = api_schema.multimodal_prompt.prompt_message + prompt.prompt_data = prompt_message + + executions = api_schema.executions + if executions: + prompt.prompt_data.variables = [] + for execution in executions: + if execution.arguments: + args = execution.arguments + var_map = {} + for key, val in args.items(): + part_list = val.part_list.parts + if part_list and part_list[0].text: + var_map[key] = part_list[0] + if var_map: + prompt.prompt_data.variables.append(var_map) + + return prompt diff --git a/vertexai/_genai/agent_engines.py b/vertexai/_genai/agent_engines.py index 740d4495b5..6b58f044c0 100644 --- a/vertexai/_genai/agent_engines.py +++ b/vertexai/_genai/agent_engines.py @@ -25,6 +25,7 @@ from google.genai import _api_module from google.genai import _common +from google.genai import types as genai_types from google.genai._common import get_value_by_path as getv from google.genai._common import set_value_by_path as setv from google.genai.pagers import Pager @@ -42,31 +43,19 @@ def _ReasoningEngineSpec_to_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["agent_framework"]) is not None: - setv( - to_object, - ["agentFramework"], - getv(from_object, ["agent_framework"]), - ) + setv(to_object, ["agentFramework"], getv(from_object, ["agent_framework"])) if getv(from_object, ["class_methods"]) is not None: setv(to_object, ["classMethods"], getv(from_object, ["class_methods"])) if getv(from_object, ["deployment_spec"]) is not None: - setv( - to_object, - ["deploymentSpec"], - getv(from_object, ["deployment_spec"]), - ) + setv(to_object, ["deploymentSpec"], getv(from_object, ["deployment_spec"])) if getv(from_object, ["package_spec"]) is not None: setv(to_object, ["packageSpec"], getv(from_object, ["package_spec"])) if getv(from_object, ["service_account"]) is not None: - setv( - to_object, - ["serviceAccount"], - getv(from_object, ["service_account"]), - ) + setv(to_object, ["serviceAccount"], getv(from_object, ["service_account"])) return to_object @@ -77,11 +66,7 @@ def _ReasoningEngineContextSpec_to_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["memory_bank_config"]) is not None: - setv( - to_object, - ["memoryBankConfig"], - getv(from_object, ["memory_bank_config"]), - ) + setv(to_object, ["memoryBankConfig"], getv(from_object, ["memory_bank_config"])) return to_object @@ -93,32 +78,17 @@ def _PscInterfaceConfig_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["dns_peering_configs"]) is not None: setv( - to_object, - ["dnsPeeringConfigs"], - getv(from_object, ["dns_peering_configs"]), + to_object, ["dnsPeeringConfigs"], getv(from_object, ["dns_peering_configs"]) ) if getv(from_object, ["network_attachment"]) is not None: setv( - to_object, - ["networkAttachment"], - getv(from_object, ["network_attachment"]), + to_object, ["networkAttachment"], getv(from_object, ["network_attachment"]) ) return to_object -def _EncryptionSpec_to_vertex( - from_object: Union[dict[str, Any], object], - parent_object: Optional[dict[str, Any]] = None, -) -> dict[str, Any]: - to_object: dict[str, Any] = {} - if getv(from_object, ["kms_key_name"]) is not None: - setv(to_object, ["kmsKeyName"], getv(from_object, ["kms_key_name"])) - - return to_object - - def _CreateAgentEngineConfig_to_vertex( from_object: Union[dict[str, Any], object], parent_object: Optional[dict[str, Any]] = None, @@ -157,13 +127,7 @@ def _CreateAgentEngineConfig_to_vertex( ) if getv(from_object, ["encryption_spec"]) is not None: - setv( - parent_object, - ["encryptionSpec"], - _EncryptionSpec_to_vertex( - getv(from_object, ["encryption_spec"]), to_object - ), - ) + setv(parent_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"])) return to_object @@ -223,18 +187,10 @@ def _ListAgentEngineConfig_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["page_size"]) is not None: - setv( - parent_object, - ["_query", "pageSize"], - getv(from_object, ["page_size"]), - ) + setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"])) if getv(from_object, ["page_token"]) is not None: - setv( - parent_object, - ["_query", "pageToken"], - getv(from_object, ["page_token"]), - ) + setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"])) if getv(from_object, ["filter"]) is not None: setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"])) @@ -264,9 +220,7 @@ def _GetAgentEngineOperationParameters_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["operation_name"]) is not None: setv( - to_object, - ["_url", "operationName"], - getv(from_object, ["operation_name"]), + to_object, ["_url", "operationName"], getv(from_object, ["operation_name"]) ) if getv(from_object, ["config"]) is not None: @@ -288,11 +242,7 @@ def _QueryAgentEngineConfig_to_vertex( setv(parent_object, ["input"], getv(from_object, ["input"])) if getv(from_object, ["include_all_fields"]) is not None: - setv( - to_object, - ["includeAllFields"], - getv(from_object, ["include_all_fields"]), - ) + setv(to_object, ["includeAllFields"], getv(from_object, ["include_all_fields"])) return to_object @@ -353,19 +303,11 @@ def _UpdateAgentEngineConfig_to_vertex( ) if getv(from_object, ["encryption_spec"]) is not None: - setv( - parent_object, - ["encryptionSpec"], - _EncryptionSpec_to_vertex( - getv(from_object, ["encryption_spec"]), to_object - ), - ) + setv(parent_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"])) if getv(from_object, ["update_mask"]) is not None: setv( - parent_object, - ["_query", "updateMask"], - getv(from_object, ["update_mask"]), + parent_object, ["_query", "updateMask"], getv(from_object, ["update_mask"]) ) return to_object @@ -396,6 +338,9 @@ def _ReasoningEngine_from_vertex( parent_object: Optional[dict[str, Any]] = None, ) -> dict[str, Any]: to_object: dict[str, Any] = {} + if getv(from_object, ["encryptionSpec"]) is not None: + setv(to_object, ["encryption_spec"], getv(from_object, ["encryptionSpec"])) + if getv(from_object, ["contextSpec"]) is not None: setv(to_object, ["context_spec"], getv(from_object, ["contextSpec"])) @@ -408,13 +353,6 @@ def _ReasoningEngine_from_vertex( if getv(from_object, ["displayName"]) is not None: setv(to_object, ["display_name"], getv(from_object, ["displayName"])) - if getv(from_object, ["encryptionSpec"]) is not None: - setv( - to_object, - ["encryption_spec"], - getv(from_object, ["encryptionSpec"]), - ) - if getv(from_object, ["etag"]) is not None: setv(to_object, ["etag"], getv(from_object, ["etag"])) @@ -483,11 +421,7 @@ def _ListReasoningEnginesResponse_from_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["sdkHttpResponse"]) is not None: - setv( - to_object, - ["sdk_http_response"], - getv(from_object, ["sdkHttpResponse"]), - ) + setv(to_object, ["sdk_http_response"], getv(from_object, ["sdkHttpResponse"])) if getv(from_object, ["nextPageToken"]) is not None: setv(to_object, ["next_page_token"], getv(from_object, ["nextPageToken"])) @@ -517,10 +451,13 @@ def _QueryReasoningEngineResponse_from_vertex( class AgentEngines(_api_module.BaseModule): + def _create( self, *, config: Optional[types.CreateAgentEngineConfigOrDict] = None ) -> types.AgentEngineOperation: - """Creates a new Agent Engine.""" + """ + Creates a new Agent Engine. + """ parameter_model = types._CreateAgentEngineRequestParameters( config=config, @@ -576,19 +513,21 @@ def delete( force: Optional[bool] = None, config: Optional[types.DeleteAgentEngineConfigOrDict] = None, ) -> types.DeleteAgentEngineOperation: - """Delete an Agent Engine resource. + """ + Delete an Agent Engine resource. Args: - name (str): Required. The name of the Agent Engine to be deleted. - Format: + name (str): + Required. The name of the Agent Engine to be deleted. Format: `projects/{project}/locations/{location}/reasoningEngines/{resource_id}` or `reasoningEngines/{resource_id}`. - force (bool): Optional. If set to True, child resources will also be - deleted. Otherwise, the request will fail with FAILED_PRECONDITION - error when the Agent Engine has undeleted child resources. - Defaults to False. - config (DeleteAgentEngineConfig): Optional. Additional - configurations for deleting the Agent Engine. + force (bool): + Optional. If set to True, child resources will also be deleted. + Otherwise, the request will fail with FAILED_PRECONDITION error when + the Agent Engine has undeleted child resources. Defaults to False. + config (DeleteAgentEngineConfig): + Optional. Additional configurations for deleting the Agent Engine. + """ parameter_model = types._DeleteAgentEngineRequestParameters( @@ -641,12 +580,11 @@ def delete( return return_value def _get( - self, - *, - name: str, - config: Optional[types.GetAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.GetAgentEngineConfigOrDict] = None ) -> types.ReasoningEngine: - """Get an Agent Engine instance.""" + """ + Get an Agent Engine instance. + """ parameter_model = types._GetAgentEngineRequestParameters( name=name, @@ -697,7 +635,9 @@ def _get( def _list( self, *, config: Optional[types.ListAgentEngineConfigOrDict] = None ) -> types.ListReasoningEnginesResponse: - """Lists Agent Engines.""" + """ + Lists Agent Engines. + """ parameter_model = types._ListAgentEngineRequestParameters( config=config, @@ -797,12 +737,11 @@ def _get_agent_operation( return return_value def _query( - self, - *, - name: str, - config: Optional[types.QueryAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.QueryAgentEngineConfigOrDict] = None ) -> types.QueryReasoningEngineResponse: - """Query an Agent Engine.""" + """ + Query an Agent Engine. + """ parameter_model = types._QueryAgentEngineRequestParameters( name=name, @@ -851,12 +790,11 @@ def _query( return return_value def _update( - self, - *, - name: str, - config: Optional[types.UpdateAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.UpdateAgentEngineConfigOrDict] = None ) -> types.AgentEngineOperation: - """Updates an Agent Engine.""" + """ + Updates an Agent Engine. + """ parameter_model = types._UpdateAgentEngineRequestParameters( name=name, @@ -907,13 +845,10 @@ def _update( return return_value _memories = None + _sandboxes = None _sessions = None @property - @_common.experimental_warning( - "The Vertex SDK GenAI agent_engines.memories module is experimental, " - "and may change in future versions." - ) def memories(self): if self._memories is None: try: @@ -930,9 +865,24 @@ def memories(self): @property @_common.experimental_warning( - "The Vertex SDK GenAI agent_engines.sessions module is experimental, " + "The Vertex SDK GenAI agent_engines.sandboxes module is experimental, " "and may change in future versions." ) + def sandboxes(self): + if self._sandboxes is None: + try: + # We need to lazy load the sandboxes module to handle the + # possibility of ImportError when dependencies are not installed. + self._sandboxes = importlib.import_module(".sandboxes", __package__) + except ImportError as e: + raise ImportError( + "The agent_engines.sandboxes module requires additional packages. " + "Please install them using pip install " + "google-cloud-aiplatform[agent_engines]" + ) from e + return self._sandboxes.Sandboxes(self._api_client) + + @property def sessions(self): if self._sessions is None: try: @@ -941,9 +891,9 @@ def sessions(self): self._sessions = importlib.import_module(".sessions", __package__) except ImportError as e: raise ImportError( - "The agent_engines.sessions module requires additional" - " packages. Please install them using pip install" - " google-cloud-aiplatform[agent_engines]" + "The agent_engines.sessions module requires additional packages. " + "Please install them using pip install " + "google-cloud-aiplatform[agent_engines]" ) from e return self._sessions.Sessions(self._api_client) @@ -966,9 +916,10 @@ def get( """Gets an agent engine. Args: - name (str): Required. A fully-qualified resource name or ID such as - "projects/123/locations/us-central1/reasoningEngines/456" or a - shortened name such as "reasoningEngines/456". + name (str): + Required. A fully-qualified resource name or ID such as + "projects/123/locations/us-central1/reasoningEngines/456" or + a shortened name such as "reasoningEngines/456". """ api_resource = self._get(name=name, config=config) agent_engine = types.AgentEngine( @@ -1029,14 +980,14 @@ def create( ) Args: - agent (Any): Optional. The Agent to be created. If not specified, - this will correspond to a lightweight instance that cannot be - queried (but can be updated to future instances that can be - queried). - agent_engine (Any): Optional. This is deprecated. Please use `agent` - instead. - config (AgentEngineConfig): Optional. The configurations to use for - creating the Agent Engine. + agent (Any): + Optional. The Agent to be created. If not specified, this will + correspond to a lightweight instance that cannot be queried + (but can be updated to future instances that can be queried). + agent_engine (Any): + Optional. This is deprecated. Please use `agent` instead. + config (AgentEngineConfig): + Optional. The configurations to use for creating the Agent Engine. Returns: AgentEngine: The created Agent Engine instance. @@ -1049,10 +1000,8 @@ def create( ValueError: If `config.staging_bucket` does not start with "gs://". ValueError: If `config.extra_packages` is specified but `agent` is None. - ValueError: If `config.requirements` is specified but `agent` is - None. - ValueError: If `config.env_vars` has a dictionary entry that does - not + ValueError: If `config.requirements` is specified but `agent` is None. + ValueError: If `config.env_vars` has a dictionary entry that does not correspond to an environment variable value or a SecretRef. TypeError: If `config.env_vars` is not a dictionary. FileNotFoundError: If `config.extra_packages` includes a file or @@ -1066,8 +1015,7 @@ def create( config = types.AgentEngineConfig.model_validate(config) elif not isinstance(config, types.AgentEngineConfig): raise TypeError( - "config must be a dict or AgentEngineConfig, but got" - f" {type(config)}." + f"config must be a dict or AgentEngineConfig, but got {type(config)}." ) context_spec = config.context_spec if context_spec is not None: @@ -1077,8 +1025,7 @@ def create( raise ValueError("Please specify only one of `agent` or `agent_engine`.") elif agent_engine: raise DeprecationWarning( - "The `agent_engine` argument is deprecated. Please use `agent`" - " instead." + "The `agent_engine` argument is deprecated. Please use `agent` instead." ) agent = agent or agent_engine api_config = self._create_config( @@ -1091,6 +1038,7 @@ def create( gcs_dir_name=config.gcs_dir_name, extra_packages=config.extra_packages, env_vars=config.env_vars, + service_account=config.service_account, context_spec=context_spec, psc_interface_config=config.psc_interface_config, min_instances=config.min_instances, @@ -1102,8 +1050,7 @@ def create( operation = self._create(config=api_config) # TODO: Use a more specific link. logger.info( - "View progress and logs at" - f" https://console.cloud.google.com/logs/query?project={self._api_client.project}." + f"View progress and logs at https://console.cloud.google.com/logs/query?project={self._api_client.project}." ) if agent is None: poll_interval_seconds = 1 # Lightweight agent engine resource creation. @@ -1147,13 +1094,14 @@ def _create_config( gcs_dir_name: Optional[str] = None, extra_packages: Optional[Sequence[str]] = None, env_vars: Optional[dict[str, Union[str, Any]]] = None, + service_account: Optional[str] = None, context_spec: Optional[types.ReasoningEngineContextSpecDict] = None, psc_interface_config: Optional[types.PscInterfaceConfigDict] = None, min_instances: Optional[int] = None, max_instances: Optional[int] = None, resource_limits: Optional[dict[str, str]] = None, container_concurrency: Optional[int] = None, - encryption_spec: Optional[types.EncryptionSpecDict] = None, + encryption_spec: Optional[genai_types.EncryptionSpecDict] = None, ) -> types.UpdateAgentEngineConfigDict: import sys @@ -1261,6 +1209,9 @@ def _create_config( ) update_masks.extend(deployment_update_masks) agent_engine_spec["deployment_spec"] = deployment_spec + if service_account is not None: + agent_engine_spec["service_account"] = service_account + update_masks.append("spec.service_account") class_methods = _agent_engines_utils._generate_class_methods_spec_or_raise( agent=agent, operations=_agent_engines_utils._get_registered_operations(agent=agent), @@ -1269,10 +1220,22 @@ def _create_config( _agent_engines_utils._to_dict(class_method) for class_method in class_methods ] + # Set the agent_server_mode to EXPERIMENTAL if the agent has a + # bidi_stream method. + for class_method in class_methods: + if class_method["api_mode"] == "bidi_stream": + if not agent_engine_spec.get("deployment_spec"): + agent_engine_spec["deployment_spec"] = ( + types.ReasoningEngineSpecDeploymentSpecDict() + ) + agent_engine_spec["deployment_spec"][ + "agent_server_mode" + ] = types.AgentServerMode.EXPERIMENTAL + break update_masks.append("spec.class_methods") - agent_engine_spec[ - "agent_framework" - ] = _agent_engines_utils._get_agent_framework(agent=agent) + agent_engine_spec["agent_framework"] = ( + _agent_engines_utils._get_agent_framework(agent=agent) + ) update_masks.append("spec.agent_framework") config["spec"] = agent_engine_spec if update_masks and mode == "update": @@ -1311,19 +1274,18 @@ def _generate_deployment_spec_or_raise( if min_instances is not None: if not 0 <= min_instances <= 10: raise ValueError( - "min_instances must be between 0 and 10. Got" f" {min_instances}" + f"min_instances must be between 0 and 10. Got {min_instances}" ) deployment_spec["min_instances"] = min_instances update_masks.append("spec.deployment_spec.min_instances") if max_instances is not None: if psc_interface_config and not 1 <= max_instances <= 100: raise ValueError( - "max_instances must be between 1 and 100 when PSC-I is" - f" enabled. Got {max_instances}" + f"max_instances must be between 1 and 100 when PSC-I is enabled. Got {max_instances}" ) elif not psc_interface_config and not 1 <= max_instances <= 1000: raise ValueError( - "max_instances must be between 1 and 1000. Got" f" {max_instances}" + f"max_instances must be between 1 and 1000. Got {max_instances}" ) deployment_spec["max_instances"] = max_instances update_masks.append("spec.deployment_spec.max_instances") @@ -1373,12 +1335,12 @@ def _register_api_methods( "async": _agent_engines_utils._wrap_async_query_operation, "stream": _agent_engines_utils._wrap_stream_query_operation, "async_stream": _agent_engines_utils._wrap_async_stream_query_operation, + "a2a_extension": _agent_engines_utils._wrap_a2a_operation, }, ) except Exception as e: logger.warning( - _agent_engines_utils._FAILED_TO_REGISTER_API_METHODS_WARNING_TEMPLATE, - e, + _agent_engines_utils._FAILED_TO_REGISTER_API_METHODS_WARNING_TEMPLATE, e ) return agent_engine @@ -1392,20 +1354,18 @@ def list( .. code-block:: python import vertexai - client = vertexai.Client(project="my_project", - location="us-central1") + client = vertexai.Client(project="my_project", location="us-central1") for agent in client.agent_engines.list( config={"filter": "'display_name="My Custom Agent"'}, ): print(agent.api_resource.name) Args: - config (ListAgentEngineConfig): Optional. The config (e.g. filter) - for the agents to be listed. + config (ListAgentEngineConfig): + Optional. The config (e.g. filter) for the agents to be listed. Returns: - Iterable[AgentEngine]: An iterable of Agent Engines matching the - filter. + Iterable[AgentEngine]: An iterable of Agent Engines matching the filter. """ for reasoning_engine in self._list_pager(config=config): @@ -1425,21 +1385,20 @@ def update( ) -> types.AgentEngine: """Updates an existing Agent Engine. - This method updates the configuration of an existing Agent Engine - running + This method updates the configuration of an existing Agent Engine running remotely, which is identified by its name. Args: name (str): Required. A fully-qualified resource name or ID such as - "projects/123/locations/us-central1/reasoningEngines/456" or a - shortened name such as "reasoningEngines/456". - agent (Any): Optional. The instance to be used as the updated Agent - Engine. If it is not specified, the existing instance will be - used. - agent_engine (Any): Optional. This is deprecated. Please use `agent` - instead. - config (AgentEngineConfig): Optional. The configurations to use for - updating the Agent Engine. + "projects/123/locations/us-central1/reasoningEngines/456" or a + shortened name such as "reasoningEngines/456". + agent (Any): + Optional. The instance to be used as the updated Agent Engine. + If it is not specified, the existing instance will be used. + agent_engine (Any): + Optional. This is deprecated. Please use `agent` instead. + config (AgentEngineConfig): + Optional. The configurations to use for updating the Agent Engine. Returns: AgentEngine: The updated Agent Engine. @@ -1452,8 +1411,7 @@ def update( ValueError: If `config.staging_bucket` does not start with "gs://". ValueError: If `config.extra_packages` is specified but `agent_engine` is None. - ValueError: If `config.requirements` is specified but `agent_engine` - is + ValueError: If `config.requirements` is specified but `agent_engine` is None. ValueError: If `config.env_vars` has a dictionary entry that does not correspond to an environment variable value or a SecretRef. @@ -1467,8 +1425,7 @@ def update( config = types.AgentEngineConfig.model_validate(config) elif not isinstance(config, types.AgentEngineConfig): raise TypeError( - "config must be a dict or AgentEngineConfig, but got" - f" {type(config)}." + f"config must be a dict or AgentEngineConfig, but got {type(config)}." ) context_spec = config.context_spec if context_spec is not None: @@ -1478,8 +1435,7 @@ def update( raise ValueError("Please specify only one of `agent` or `agent_engine`.") elif agent_engine: raise DeprecationWarning( - "The `agent_engine` argument is deprecated. Please use `agent`" - " instead." + "The `agent_engine` argument is deprecated. Please use `agent` instead." ) agent = agent or agent_engine api_config = self._create_config( @@ -1492,6 +1448,7 @@ def update( gcs_dir_name=config.gcs_dir_name, extra_packages=config.extra_packages, env_vars=config.env_vars, + service_account=config.service_account, context_spec=context_spec, psc_interface_config=config.psc_interface_config, min_instances=config.min_instances, @@ -1501,8 +1458,7 @@ def update( ) operation = self._update(name=name, config=api_config) logger.info( - "View progress and logs at" - f" https://console.cloud.google.com/logs/query?project={self._api_client.project}." + f"View progress and logs at https://console.cloud.google.com/logs/query?project={self._api_client.project}." ) operation = _agent_engines_utils._await_operation( operation_name=operation.name, @@ -1525,10 +1481,7 @@ def update( return agent_engine def _stream_query( - self, - *, - name: str, - config: Optional[types.QueryAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.QueryAgentEngineConfigOrDict] = None ) -> Iterator[Any]: """Streams the response of the agent engine.""" parameter_model = types._QueryAgentEngineRequestParameters( @@ -1852,10 +1805,13 @@ def list_session_events( class AsyncAgentEngines(_api_module.BaseModule): + async def _create( self, *, config: Optional[types.CreateAgentEngineConfigOrDict] = None ) -> types.AgentEngineOperation: - """Creates a new Agent Engine.""" + """ + Creates a new Agent Engine. + """ parameter_model = types._CreateAgentEngineRequestParameters( config=config, @@ -1913,19 +1869,21 @@ async def delete( force: Optional[bool] = None, config: Optional[types.DeleteAgentEngineConfigOrDict] = None, ) -> types.DeleteAgentEngineOperation: - """Delete an Agent Engine resource. + """ + Delete an Agent Engine resource. Args: - name (str): Required. The name of the Agent Engine to be deleted. - Format: + name (str): + Required. The name of the Agent Engine to be deleted. Format: `projects/{project}/locations/{location}/reasoningEngines/{resource_id}` or `reasoningEngines/{resource_id}`. - force (bool): Optional. If set to True, child resources will also be - deleted. Otherwise, the request will fail with FAILED_PRECONDITION - error when the Agent Engine has undeleted child resources. - Defaults to False. - config (DeleteAgentEngineConfig): Optional. Additional - configurations for deleting the Agent Engine. + force (bool): + Optional. If set to True, child resources will also be deleted. + Otherwise, the request will fail with FAILED_PRECONDITION error when + the Agent Engine has undeleted child resources. Defaults to False. + config (DeleteAgentEngineConfig): + Optional. Additional configurations for deleting the Agent Engine. + """ parameter_model = types._DeleteAgentEngineRequestParameters( @@ -1980,12 +1938,11 @@ async def delete( return return_value async def _get( - self, - *, - name: str, - config: Optional[types.GetAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.GetAgentEngineConfigOrDict] = None ) -> types.ReasoningEngine: - """Get an Agent Engine instance.""" + """ + Get an Agent Engine instance. + """ parameter_model = types._GetAgentEngineRequestParameters( name=name, @@ -2038,7 +1995,9 @@ async def _get( async def _list( self, *, config: Optional[types.ListAgentEngineConfigOrDict] = None ) -> types.ListReasoningEnginesResponse: - """Lists Agent Engines.""" + """ + Lists Agent Engines. + """ parameter_model = types._ListAgentEngineRequestParameters( config=config, @@ -2142,12 +2101,11 @@ async def _get_agent_operation( return return_value async def _query( - self, - *, - name: str, - config: Optional[types.QueryAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.QueryAgentEngineConfigOrDict] = None ) -> types.QueryReasoningEngineResponse: - """Query an Agent Engine.""" + """ + Query an Agent Engine. + """ parameter_model = types._QueryAgentEngineRequestParameters( name=name, @@ -2198,12 +2156,11 @@ async def _query( return return_value async def _update( - self, - *, - name: str, - config: Optional[types.UpdateAgentEngineConfigOrDict] = None, + self, *, name: str, config: Optional[types.UpdateAgentEngineConfigOrDict] = None ) -> types.AgentEngineOperation: - """Updates an Agent Engine.""" + """ + Updates an Agent Engine. + """ parameter_model = types._UpdateAgentEngineRequestParameters( name=name, @@ -2259,10 +2216,6 @@ async def _update( _sessions = None @property - @_common.experimental_warning( - "The Vertex SDK GenAI agent_engines.memories module is experimental, " - "and may change in future versions." - ) def memories(self): if self._memories is None: try: @@ -2278,10 +2231,6 @@ def memories(self): return self._memories.AsyncMemories(self._api_client) @property - @_common.experimental_warning( - "The Vertex SDK GenAI agent_engines.sessions module is experimental, " - "and may change in future versions." - ) def sessions(self): if self._sessions is None: try: @@ -2290,9 +2239,9 @@ def sessions(self): self._sessions = importlib.import_module(".sessions", __package__) except ImportError as e: raise ImportError( - "The agent_engines.sessions module requires additional" - " packages. Please install them using pip install" - " google-cloud-aiplatform[agent_engines]" + "The agent_engines.sessions module requires additional packages. " + "Please install them using pip install " + "google-cloud-aiplatform[agent_engines]" ) from e return self._sessions.AsyncSessions(self._api_client) diff --git a/vertexai/_genai/client.py b/vertexai/_genai/client.py index 249c3a5c93..83797eacda 100644 --- a/vertexai/_genai/client.py +++ b/vertexai/_genai/client.py @@ -91,10 +91,6 @@ def prompt_optimizer(self): return self._prompt_optimizer.AsyncPromptOptimizer(self._api_client) @property - @_common.experimental_warning( - "The Vertex SDK GenAI agent engines module is experimental, " - "and may change in future versions." - ) def agent_engines(self): if self._agent_engines is None: try: @@ -122,6 +118,7 @@ class Client: def __init__( self, *, + api_key: Optional[str] = None, credentials: Optional[google.auth.credentials.Credentials] = None, project: Optional[str] = None, location: Optional[str] = None, @@ -131,6 +128,9 @@ def __init__( """Initializes the client. Args: + api_key (str): The `API key + `_ + to use for authentication. Applies to Vertex AI in express mode only. credentials (google.auth.credentials.Credentials): The credentials to use for authentication when calling the Vertex AI APIs. Credentials can be obtained from environment variables and default credentials. For more @@ -156,6 +156,7 @@ def __init__( self._api_client = genai_client.Client._get_api_client( vertexai=True, + api_key=api_key, credentials=credentials, project=project, location=location, @@ -166,6 +167,7 @@ def __init__( self._evals = None self._prompt_optimizer = None self._agent_engines = None + self._prompt_management = None @property def evals(self) -> Any: @@ -230,10 +232,6 @@ def _get_api_client( ) @property - @_common.experimental_warning( - "The Vertex SDK GenAI agent engines module is experimental, " - "and may change in future versions." - ) def agent_engines(self): if self._agent_engines is None: try: @@ -250,3 +248,17 @@ def agent_engines(self): "google-cloud-aiplatform[agent_engines]" ) from e return self._agent_engines.AgentEngines(self._api_client) + + @property + @_common.experimental_warning( + "The Vertex SDK GenAI prompt management module is experimental, " + "and may change in future versions." + ) + def prompt_management(self): + if self._prompt_management is None: + # Lazy loading the prompt_management module + self._prompt_management = importlib.import_module( + ".prompt_management", + __package__, + ) + return self._prompt_management.PromptManagement(self._api_client) diff --git a/vertexai/_genai/evals.py b/vertexai/_genai/evals.py index 90619a722b..3235345f97 100644 --- a/vertexai/_genai/evals.py +++ b/vertexai/_genai/evals.py @@ -595,11 +595,7 @@ def _EvaluateInstancesRequestParameters_to_vertex( ) if getv(from_object, ["autorater_config"]) is not None: - setv( - to_object, - ["autoraterConfig"], - getv(from_object, ["autorater_config"]), - ) + setv(to_object, ["autoraterConfig"], getv(from_object, ["autorater_config"])) if getv(from_object, ["metrics"]) is not None: setv(to_object, ["metrics"], t.t_metrics(getv(from_object, ["metrics"]))) @@ -623,11 +619,7 @@ def _PredefinedMetricSpec_to_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["metric_spec_name"]) is not None: - setv( - to_object, - ["metricSpecName"], - getv(from_object, ["metric_spec_name"]), - ) + setv(to_object, ["metricSpecName"], getv(from_object, ["metric_spec_name"])) if getv(from_object, ["metric_spec_parameters"]) is not None: setv( @@ -645,24 +637,14 @@ def _RubricGenerationSpec_to_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["prompt_template"]) is not None: - setv( - to_object, - ["promptTemplate"], - getv(from_object, ["prompt_template"]), - ) + setv(to_object, ["promptTemplate"], getv(from_object, ["prompt_template"])) if getv(from_object, ["generator_model_config"]) is not None: - setv( - to_object, - ["model_config"], - getv(from_object, ["generator_model_config"]), - ) + setv(to_object, ["model_config"], getv(from_object, ["generator_model_config"])) if getv(from_object, ["rubric_content_type"]) is not None: setv( - to_object, - ["rubricContentType"], - getv(from_object, ["rubric_content_type"]), + to_object, ["rubricContentType"], getv(from_object, ["rubric_content_type"]) ) if getv(from_object, ["rubric_type_ontology"]) is not None: @@ -688,8 +670,7 @@ def _GenerateInstanceRubricsRequest_to_vertex( to_object, ["predefinedRubricGenerationSpec"], _PredefinedMetricSpec_to_vertex( - getv(from_object, ["predefined_rubric_generation_spec"]), - to_object, + getv(from_object, ["predefined_rubric_generation_spec"]), to_object ), ) @@ -717,11 +698,7 @@ def _MetricResult_from_vertex( setv(to_object, ["score"], getv(from_object, ["score"])) if getv(from_object, ["rubricVerdicts"]) is not None: - setv( - to_object, - ["rubric_verdicts"], - getv(from_object, ["rubricVerdicts"]), - ) + setv(to_object, ["rubric_verdicts"], getv(from_object, ["rubricVerdicts"])) if getv(from_object, ["explanation"]) is not None: setv(to_object, ["explanation"], getv(from_object, ["explanation"])) @@ -763,9 +740,7 @@ def _EvaluateInstancesResponse_from_vertex( if getv(from_object, ["exactMatchResults"]) is not None: setv( - to_object, - ["exact_match_results"], - getv(from_object, ["exactMatchResults"]), + to_object, ["exact_match_results"], getv(from_object, ["exactMatchResults"]) ) if getv(from_object, ["metricxResult"]) is not None: @@ -890,6 +865,7 @@ def _GenerateInstanceRubricsResponse_from_vertex( class Evals(_api_module.BaseModule): + def _evaluate_instances( self, *, @@ -912,7 +888,9 @@ def _evaluate_instances( instance: Optional[types.EvaluationInstanceOrDict] = None, config: Optional[types.EvaluateInstancesConfigOrDict] = None, ) -> types.EvaluateInstancesResponse: - """Evaluates instances based on a given metric.""" + """ + Evaluates instances based on a given metric. + """ parameter_model = types._EvaluateInstancesRequestParameters( bleu_input=bleu_input, @@ -984,7 +962,9 @@ def _generate_rubrics( rubric_generation_spec: Optional[types.RubricGenerationSpecOrDict] = None, config: Optional[types.RubricGenerationConfigOrDict] = None, ) -> types.GenerateInstanceRubricsResponse: - """Generates rubrics for a given prompt.""" + """ + Generates rubrics for a given prompt. + """ parameter_model = types._GenerateInstanceRubricsRequest( contents=contents, @@ -1067,23 +1047,22 @@ def run_inference( """Runs inference on a dataset for evaluation. Args: - model: The model to use for inference. - For Google Gemini models, - provide the model name string (e.g., "gemini-2.5-flash"). - For - third-party models via LiteLLM, use the format "provider/model_name" - (e.g., "openai/gpt-4o"). Ensure the necessary API key (e.g., - OPENAI_API_KEY) is set as an environment variable. - For custom - logic, provide a callable function that accepts a prompt and returns - a response. + model: The model to use for inference. + - For Google Gemini models, provide the model name string (e.g., "gemini-2.5-flash"). + - For third-party models via LiteLLM, use the format "provider/model_name" + (e.g., "openai/gpt-4o"). Ensure the necessary API key (e.g., OPENAI_API_KEY) + is set as an environment variable. + - For custom logic, provide a callable function that accepts a prompt and + returns a response. src: The source of the dataset. Can be a string (path to a local file, - a GCS path, or a BigQuery table), a Pandas DataFrame, or an - EvaluationDataset object. If an EvaluationDataset is provided, it - must have `eval_dataset_df` populated. - config: The optional configuration for the inference run. Must be a - dict or `types.EvalRunInferenceConfig` type. - dest: The destination - path for storage of the inference results. - prompt_template: The - template string to use for constructing prompts. - - generate_content_config: The config for the Gemini generate content - call. + a GCS path, or a BigQuery table), a Pandas DataFrame, or an + EvaluationDataset object. If an EvaluationDataset is provided, + it must have `eval_dataset_df` populated. + config: The optional configuration for the inference run. Must be a dict or + `types.EvalRunInferenceConfig` type. + - dest: The destination path for storage of the inference results. + - prompt_template: The template string to use for constructing prompts. + - generate_content_config: The config for the Gemini generate content call. Returns: The evaluation dataset. @@ -1121,14 +1100,11 @@ def evaluate( """Evaluates candidate responses in the provided dataset(s) using the specified metrics. Args: - dataset: The dataset(s) to evaluate. Can be a single - `types.EvaluationDataset` or a list of `types.EvaluationDataset`. + dataset: The dataset(s) to evaluate. Can be a single `types.EvaluationDataset` or a list of `types.EvaluationDataset`. metrics: The list of metrics to use for evaluation. - config: Optional configuration for the evaluation. Can be a dictionary - or a `types.EvaluateMethodConfig` object. - dataset_schema: Schema - to use for the dataset. If not specified, the dataset schema will be - inferred from the dataset automatically. - dest: Destination path - for storing evaluation results. + config: Optional configuration for the evaluation. Can be a dictionary or a `types.EvaluateMethodConfig` object. + - dataset_schema: Schema to use for the dataset. If not specified, the dataset schema will be inferred from the dataset automatically. + - dest: Destination path for storing evaluation results. Returns: The evaluation result. @@ -1139,9 +1115,11 @@ def evaluate( config = types.EvaluateMethodConfig.model_validate(config) if isinstance(dataset, list): dataset = [ - types.EvaluationDataset.model_validate(ds_item) - if isinstance(ds_item, dict) - else ds_item + ( + types.EvaluationDataset.model_validate(ds_item) + if isinstance(ds_item, dict) + else ds_item + ) for ds_item in dataset ] else: @@ -1170,7 +1148,7 @@ def batch_evaluate( resolved_metrics = _evals_common._resolve_metrics(metrics, self._api_client) output_config = genai_types.OutputConfig( - gcs_destination=types.GcsDestination(output_uri_prefix=dest) + gcs_destination=genai_types.GcsDestination(output_uri_prefix=dest) ) parameter_model = types.EvaluateDatasetRequestParameters( dataset=dataset, @@ -1242,53 +1220,47 @@ def generate_rubrics( config: Optional[types.RubricGenerationConfigOrDict] = None, ) -> types.EvaluationDataset: """Generates rubrics for each prompt in the source and adds them as a new column - structured as a dictionary. You can generate rubrics by providing either: 1. A `predefined_spec_name` to use a Vertex AI backend recipe. 2. A `prompt_template` along with other configuration parameters - (`generator_model_config`, `rubric_content_type`, - `rubric_type_ontology`) + (`generator_model_config`, `rubric_content_type`, `rubric_type_ontology`) for custom rubric generation. These two modes are mutually exclusive. Args: src: The source of the prompts. Can be a string (path to a local - file, a GCS path, or a BigQuery table), a Pandas DataFrame, or an - EvaluationDataset object. The loaded data must contain either a - 'prompt' column (for text) or a 'request' column (for text or + file, a GCS path, or a BigQuery table), a Pandas DataFrame, or + an EvaluationDataset object. The loaded data must contain either + a 'prompt' column (for text) or a 'request' column (for text or multimodal Gemini Content). rubric_group_name: Name for the key within the dictionary in the new column. - prompt_template: Optional. Template for the rubric generation - prompt. Used for custom rubric generation. Mutually exclusive with - `predefined_spec_name`. If using a 'prompt' column, use {prompt} - as the placeholder. If using a 'request' column, this template is - passed to the service along with the content. + prompt_template: Optional. Template for the rubric generation prompt. Used for + custom rubric generation. Mutually exclusive with `predefined_spec_name`. + If using a 'prompt' column, use {prompt} as the placeholder. If using a + 'request' column, this template is passed to the service along + with the content. generator_model_config: Optional. Configuration for the model used - in custom rubric generation. Only used if `prompt_template` is - provided. e.g., {"autorater_model": "gemini-2.5-flash"}. + in custom rubric generation. Only used if `prompt_template` is provided. + e.g., {"autorater_model": "gemini-2.5-flash"}. rubric_content_type: Optional. The type of rubric content to be generated. Only used if `prompt_template` is provided. rubric_type_ontology: Optional. A pre-defined list of allowed types for generated rubrics. Only used if `prompt_template` is provided. - predefined_spec_name: Optional. The name of a Predefined Metric to - use for rubric generation (e.g., "general_quality_v1") or a - types.PrebuiltMetric object. Mutually exclusive with - `prompt_template` and its related parameters. - metric_spec_parameters: Optional. Parameters for the Predefined - Metric, used to customize rubric generation. Only used if - `predefined_spec_name` is set. + predefined_spec_name: Optional. The name of a Predefined Metric to use + for rubric generation (e.g., "general_quality_v1") or a types.PrebuiltMetric object. + Mutually exclusive with `prompt_template` and its related parameters. + metric_spec_parameters: Optional. Parameters for the Predefined Metric, + used to customize rubric generation. Only used if `predefined_spec_name` is set. Example: {"guidelines": ["The response must be in Japanese."]} config: Optional. Configuration for the rubric generation process. Returns: - An `EvaluationDataset` with an added column named `rubric_groups` in - its - `eval_dataset_df`. Each cell in this column contains a dictionary - like: + An `EvaluationDataset` with an added column named `rubric_groups` in its + `eval_dataset_df`. Each cell in this column contains a dictionary like: {rubric_group_name: [list[Rubric]]}. """ if isinstance(src, types.EvaluationDataset): @@ -1302,12 +1274,11 @@ def generate_rubrics( prompts_df = _evals_common._load_dataframe(self._api_client, src) except Exception as e: raise ValueError( - f"Failed to load prompt dataset from source: {src}." f" Error: {e}" + f"Failed to load prompt dataset from source: {src}. Error: {e}" ) else: raise TypeError( - "Unsupported type for src. Must be str, pd.DataFrame, or" - " types.EvaluationDataset." + "Unsupported type for src. Must be str, pd.DataFrame, or types.EvaluationDataset." ) if "prompt" not in prompts_df.columns and "request" not in prompts_df.columns: @@ -1317,7 +1288,7 @@ def generate_rubrics( input_column = "request" if "request" in prompts_df.columns else "prompt" logger.info( - "Generating rubrics for %d prompts from column '%s', group:" " '%s'...", + "Generating rubrics for %d prompts from column '%s', group: '%s'...", len(prompts_df), input_column, rubric_group_name, @@ -1330,23 +1301,19 @@ def generate_rubrics( if predefined_spec_name: if prompt_template: logger.warning( - "prompt_template is ignored when predefined_spec_name is" - " provided." + "prompt_template is ignored when predefined_spec_name is provided." ) if generator_model_config: logger.warning( - "generator_model_config is ignored when" - " predefined_spec_name is provided." + "generator_model_config is ignored when predefined_spec_name is provided." ) if rubric_content_type: logger.warning( - "rubric_content_type is ignored when predefined_spec_name" - " is provided." + "rubric_content_type is ignored when predefined_spec_name is provided." ) if rubric_type_ontology: logger.warning( - "rubric_type_ontology is ignored when predefined_spec_name" - " is provided." + "rubric_type_ontology is ignored when predefined_spec_name is provided." ) if isinstance(predefined_spec_name, str): @@ -1361,12 +1328,12 @@ def generate_rubrics( raise ValueError(f"Failed to resolve PrebuiltMetric: {e}") else: raise TypeError( - "predefined_spec_name must be a string or" " types.PrebuiltMetric" + "predefined_spec_name must be a string or types.PrebuiltMetric" ) if not actual_predefined_spec_name: raise ValueError( - "Could not determine metric_spec_name from" " predefined_spec_name" + "Could not determine metric_spec_name from predefined_spec_name" ) predefined_spec = types.PredefinedMetricSpec( @@ -1376,8 +1343,7 @@ def generate_rubrics( elif prompt_template: if metric_spec_parameters: logger.warning( - "metric_spec_parameters is ignored when prompt_template is" - " provided." + "metric_spec_parameters is ignored when prompt_template is provided." ) spec_dict = { "prompt_template": prompt_template, @@ -1389,7 +1355,7 @@ def generate_rubrics( rubric_gen_spec = types.RubricGenerationSpec.model_validate(spec_dict) else: raise ValueError( - "Either predefined_spec_name or prompt_template must be" " provided." + "Either predefined_spec_name or prompt_template must be provided." ) for _, row in prompts_df.iterrows(): @@ -1402,8 +1368,7 @@ def generate_rubrics( contents = input_data else: logger.warning( - "Skipping row: Unexpected input format in column" - f" '{input_column}'." + f"Skipping row: Unexpected input format in column '{input_column}'." ) all_rubric_groups.append({rubric_group_name: []}) continue @@ -1429,13 +1394,13 @@ def generate_rubrics( prompts_with_rubrics = prompts_df.copy() prompts_with_rubrics["rubric_groups"] = all_rubric_groups logger.info( - "Rubric generation complete. Added column 'rubric_groups' with key" - f" '{rubric_group_name}'." + f"Rubric generation complete. Added column 'rubric_groups' with key '{rubric_group_name}'." ) return types.EvaluationDataset(eval_dataset_df=prompts_with_rubrics) class AsyncEvals(_api_module.BaseModule): + async def _evaluate_instances( self, *, @@ -1458,7 +1423,9 @@ async def _evaluate_instances( instance: Optional[types.EvaluationInstanceOrDict] = None, config: Optional[types.EvaluateInstancesConfigOrDict] = None, ) -> types.EvaluateInstancesResponse: - """Evaluates instances based on a given metric.""" + """ + Evaluates instances based on a given metric. + """ parameter_model = types._EvaluateInstancesRequestParameters( bleu_input=bleu_input, @@ -1532,7 +1499,9 @@ async def _generate_rubrics( rubric_generation_spec: Optional[types.RubricGenerationSpecOrDict] = None, config: Optional[types.RubricGenerationConfigOrDict] = None, ) -> types.GenerateInstanceRubricsResponse: - """Generates rubrics for a given prompt.""" + """ + Generates rubrics for a given prompt. + """ parameter_model = types._GenerateInstanceRubricsRequest( contents=contents, @@ -1595,7 +1564,7 @@ async def batch_evaluate( """Evaluates a dataset based on a set of given metrics.""" resolved_metrics = _evals_common._resolve_metrics(metrics, self._api_client) output_config = genai_types.OutputConfig( - gcs_destination=types.GcsDestination(output_uri_prefix=dest) + gcs_destination=genai_types.GcsDestination(output_uri_prefix=dest) ) parameter_model = types.EvaluateDatasetRequestParameters( dataset=dataset, diff --git a/vertexai/_genai/memories.py b/vertexai/_genai/memories.py index 1b2411114a..a49c1caa2c 100644 --- a/vertexai/_genai/memories.py +++ b/vertexai/_genai/memories.py @@ -149,11 +149,7 @@ def _GenerateMemoriesRequestDirectMemoriesSource_to_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["direct_memories"]) is not None: - setv( - to_object, - ["directMemories"], - getv(from_object, ["direct_memories"]), - ) + setv(to_object, ["directMemories"], getv(from_object, ["direct_memories"])) return to_object @@ -245,22 +241,17 @@ def _ListAgentEngineMemoryConfig_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["page_size"]) is not None: - setv( - parent_object, - ["_query", "pageSize"], - getv(from_object, ["page_size"]), - ) + setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"])) if getv(from_object, ["page_token"]) is not None: - setv( - parent_object, - ["_query", "pageToken"], - getv(from_object, ["page_token"]), - ) + setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"])) if getv(from_object, ["filter"]) is not None: setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"])) + if getv(from_object, ["order_by"]) is not None: + setv(parent_object, ["_query", "orderBy"], getv(from_object, ["order_by"])) + return to_object @@ -291,9 +282,7 @@ def _GetAgentEngineMemoryOperationParameters_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["operation_name"]) is not None: setv( - to_object, - ["_url", "operationName"], - getv(from_object, ["operation_name"]), + to_object, ["_url", "operationName"], getv(from_object, ["operation_name"]) ) if getv(from_object, ["config"]) is not None: @@ -309,9 +298,7 @@ def _GetAgentEngineGenerateMemoriesOperationParameters_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["operation_name"]) is not None: setv( - to_object, - ["_url", "operationName"], - getv(from_object, ["operation_name"]), + to_object, ["_url", "operationName"], getv(from_object, ["operation_name"]) ) if getv(from_object, ["config"]) is not None: @@ -403,9 +390,7 @@ def _UpdateAgentEngineMemoryConfig_to_vertex( if getv(from_object, ["update_mask"]) is not None: setv( - parent_object, - ["_query", "updateMask"], - getv(from_object, ["update_mask"]), + parent_object, ["_query", "updateMask"], getv(from_object, ["update_mask"]) ) return to_object @@ -590,11 +575,7 @@ def _ListReasoningEnginesMemoriesResponse_from_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["sdkHttpResponse"]) is not None: - setv( - to_object, - ["sdk_http_response"], - getv(from_object, ["sdkHttpResponse"]), - ) + setv(to_object, ["sdk_http_response"], getv(from_object, ["sdkHttpResponse"])) if getv(from_object, ["nextPageToken"]) is not None: setv(to_object, ["next_page_token"], getv(from_object, ["nextPageToken"])) @@ -622,15 +603,14 @@ def _RetrieveMemoriesResponse_from_vertex( if getv(from_object, ["retrievedMemories"]) is not None: setv( - to_object, - ["retrieved_memories"], - getv(from_object, ["retrievedMemories"]), + to_object, ["retrieved_memories"], getv(from_object, ["retrievedMemories"]) ) return to_object class Memories(_api_module.BaseModule): + def _create( self, *, @@ -639,7 +619,9 @@ def _create( scope: dict[str, str], config: Optional[types.AgentEngineMemoryConfigOrDict] = None, ) -> types.AgentEngineMemoryOperation: - """Creates a new memory in the Agent Engine.""" + """ + Creates a new memory in the Agent Engine. + """ parameter_model = types._CreateAgentEngineMemoryRequestParameters( name=name, @@ -697,14 +679,16 @@ def delete( name: str, config: Optional[types.DeleteAgentEngineMemoryConfigOrDict] = None, ) -> types.DeleteAgentEngineMemoryOperation: - """Delete an Agent Engine memory. + """ + Delete an Agent Engine memory. Args: - name (str): Required. The name of the Agent Engine memory to be - deleted. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/memories/{memory}`. - config (DeleteAgentEngineMemoryConfig): Optional. Additional - configurations for deleting the Agent Engine. + name (str): + Required. The name of the Agent Engine memory to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/memories/{memory}`. + config (DeleteAgentEngineMemoryConfig): + Optional. Additional configurations for deleting the Agent Engine. + """ parameter_model = types._DeleteAgentEngineMemoryRequestParameters( @@ -771,7 +755,9 @@ def _generate( scope: Optional[dict[str, str]] = None, config: Optional[types.GenerateAgentEngineMemoriesConfigOrDict] = None, ) -> types.AgentEngineGenerateMemoriesOperation: - """Generates memories for an Agent Engine.""" + """ + Generates memories for an Agent Engine. + """ parameter_model = types._GenerateAgentEngineMemoriesRequestParameters( name=name, @@ -833,12 +819,14 @@ def get( name: str, config: Optional[types.GetAgentEngineMemoryConfigOrDict] = None, ) -> types.Memory: - """Gets an agent engine memory. + """ + Gets an agent engine memory. Args: name (str): Required. A fully-qualified resource name or ID such as "projects/123/locations/us-central1/reasoningEngines/456/memories/789" or a shortened name such as "reasoningEngines/456/memories/789". + """ parameter_model = types._GetAgentEngineMemoryRequestParameters( @@ -895,7 +883,9 @@ def _list( name: str, config: Optional[types.ListAgentEngineMemoryConfigOrDict] = None, ) -> types.ListReasoningEnginesMemoriesResponse: - """Lists Agent Engine memories.""" + """ + Lists Agent Engine memories. + """ parameter_model = types._ListAgentEngineMemoryRequestParameters( name=name, @@ -1070,7 +1060,9 @@ def _retrieve( ] = None, config: Optional[types.RetrieveAgentEngineMemoriesConfigOrDict] = None, ) -> types.RetrieveMemoriesResponse: - """Retrieves memories for an Agent Engine.""" + """ + Retrieves memories for an Agent Engine. + """ parameter_model = types._RetrieveAgentEngineMemoriesRequestParameters( name=name, @@ -1131,7 +1123,9 @@ def _update( scope: Optional[dict[str, str]] = None, config: Optional[types.UpdateAgentEngineMemoryConfigOrDict] = None, ) -> types.AgentEngineMemoryOperation: - """Updates an Agent Engine memory.""" + """ + Updates an Agent Engine memory. + """ parameter_model = types._UpdateAgentEngineMemoryRequestParameters( name=name, @@ -1194,12 +1188,14 @@ def create( """Creates a new memory in the Agent Engine. Args: - name (str): Required. The name of the memory to create. - fact (str): Required. The fact to be stored in the memory. - scope (dict[str, str]): Required. The scope of the memory. For - example, {"user_id": "123"}. - config (AgentEngineMemoryConfigOrDict): Optional. The configuration - for the memory. + name (str): + Required. The name of the memory to create. + fact (str): + Required. The fact to be stored in the memory. + scope (dict[str, str]): + Required. The scope of the memory. For example, {"user_id": "123"}. + config (AgentEngineMemoryConfigOrDict): + Optional. The configuration for the memory. Returns: AgentEngineMemoryOperation: The operation for creating the memory. @@ -1249,27 +1245,26 @@ def generate( """Generates memories for the agent engine. Args: - name (str): Required. The name of the agent engine to generate - memories for. + name (str): + Required. The name of the agent engine to generate memories for. vertex_session_source (GenerateMemoriesRequestVertexSessionSource): - Optional. The vertex session source to use for generating - memories. Only one of vertex_session_source, - direct_contents_source, or direct_memories_source can be - specified. - direct_contents_source(GenerateMemoriesRequestDirectContentsSource): - Optional. The direct contents source to use for generating - memories. Only one of vertex_session_source, - direct_contents_source, or direct_memories_source can be - specified. direct_memories_source - (GenerateMemoriesRequestDirectMemoriesSource): Optional. The - direct memories source to use for generating memories. Only one of - vertex_session_source, direct_contents_source, or - direct_memories_source can be specified. - scope (dict[str, str]): Optional. The scope of the memories to - generate. This is optional if vertex_session_source is used, - otherwise it must be specified. - config (GenerateMemoriesConfig): Optional. The configuration for the - memories to generate. + Optional. The vertex session source to use for generating + memories. Only one of vertex_session_source, + direct_contents_source, or direct_memories_source can be + specified. + direct_contents_source(GenerateMemoriesRequestDirectContentsSource): + Optional. The direct contents source to use for generating + memories. Only one of vertex_session_source, direct_contents_source, + or direct_memories_source can be specified. + direct_memories_source (GenerateMemoriesRequestDirectMemoriesSource): + Optional. The direct memories source to use for generating + memories. Only one of vertex_session_source, direct_contents_source, + or direct_memories_source can be specified. + scope (dict[str, str]): + Optional. The scope of the memories to generate. This is optional + if vertex_session_source is used, otherwise it must be specified. + config (GenerateMemoriesConfig): + Optional. The configuration for the memories to generate. Returns: AgentEngineGenerateMemoriesOperation: @@ -1291,11 +1286,10 @@ def generate( operation = _agent_engines_utils._await_operation( operation_name=operation.name, get_operation_fn=self._get_generate_memories_operation, + poll_interval_seconds=0.5, ) - if not operation.response: - if operation.error: - raise RuntimeError(f"Failed to generate memory: {operation.error}") - raise RuntimeError(f"Error generating memory: {operation}") + if operation.error: + raise RuntimeError(f"Failed to generate memory: {operation.error}") return operation def list( @@ -1307,10 +1301,10 @@ def list( """Lists Agent Engine memories. Args: - name (str): Required. The name of the agent engine to list memories - for. - config (ListAgentEngineMemoryConfig): Optional. The configuration - for the memories to list. + name (str): + Required. The name of the agent engine to list memories for. + config (ListAgentEngineMemoryConfig): + Optional. The configuration for the memories to list. Returns: Iterable[Memory]: An iterable of memories. @@ -1339,18 +1333,19 @@ def retrieve( """Retrieves memories for the agent. Args: - name (str): Required. The name of the agent engine to retrieve - memories for. - scope (dict[str, str]): Required. The scope of the memories to - retrieve. For example, {"user_id": "123"}. - similarity_search_params - (RetrieveMemoriesRequestSimilaritySearchParams): Optional. The - similarity search parameters to use for retrieving memories. - simple_retrieval_params - (RetrieveMemoriesRequestSimpleRetrievalParams): Optional. The - simple retrieval parameters to use for retrieving memories. - config (RetrieveAgentEngineMemoriesConfig): Optional. The - configuration for the memories to retrieve. + name (str): + Required. The name of the agent engine to retrieve memories for. + scope (dict[str, str]): + Required. The scope of the memories to retrieve. For example, + {"user_id": "123"}. + similarity_search_params (RetrieveMemoriesRequestSimilaritySearchParams): + Optional. The similarity search parameters to use for retrieving + memories. + simple_retrieval_params (RetrieveMemoriesRequestSimpleRetrievalParams): + Optional. The simple retrieval parameters to use for retrieving + memories. + config (RetrieveAgentEngineMemoriesConfig): + Optional. The configuration for the memories to retrieve. Returns: Iterator[RetrieveMemoriesResponseRetrievedMemory]: An iterable of @@ -1377,6 +1372,7 @@ def retrieve( class AsyncMemories(_api_module.BaseModule): + async def _create( self, *, @@ -1385,7 +1381,9 @@ async def _create( scope: dict[str, str], config: Optional[types.AgentEngineMemoryConfigOrDict] = None, ) -> types.AgentEngineMemoryOperation: - """Creates a new memory in the Agent Engine.""" + """ + Creates a new memory in the Agent Engine. + """ parameter_model = types._CreateAgentEngineMemoryRequestParameters( name=name, @@ -1445,14 +1443,16 @@ async def delete( name: str, config: Optional[types.DeleteAgentEngineMemoryConfigOrDict] = None, ) -> types.DeleteAgentEngineMemoryOperation: - """Delete an Agent Engine memory. + """ + Delete an Agent Engine memory. Args: - name (str): Required. The name of the Agent Engine memory to be - deleted. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/memories/{memory}`. - config (DeleteAgentEngineMemoryConfig): Optional. Additional - configurations for deleting the Agent Engine. + name (str): + Required. The name of the Agent Engine memory to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/memories/{memory}`. + config (DeleteAgentEngineMemoryConfig): + Optional. Additional configurations for deleting the Agent Engine. + """ parameter_model = types._DeleteAgentEngineMemoryRequestParameters( @@ -1521,7 +1521,9 @@ async def _generate( scope: Optional[dict[str, str]] = None, config: Optional[types.GenerateAgentEngineMemoriesConfigOrDict] = None, ) -> types.AgentEngineGenerateMemoriesOperation: - """Generates memories for an Agent Engine.""" + """ + Generates memories for an Agent Engine. + """ parameter_model = types._GenerateAgentEngineMemoriesRequestParameters( name=name, @@ -1585,12 +1587,14 @@ async def get( name: str, config: Optional[types.GetAgentEngineMemoryConfigOrDict] = None, ) -> types.Memory: - """Gets an agent engine memory. + """ + Gets an agent engine memory. Args: name (str): Required. A fully-qualified resource name or ID such as "projects/123/locations/us-central1/reasoningEngines/456/memories/789" or a shortened name such as "reasoningEngines/456/memories/789". + """ parameter_model = types._GetAgentEngineMemoryRequestParameters( @@ -1649,7 +1653,9 @@ async def _list( name: str, config: Optional[types.ListAgentEngineMemoryConfigOrDict] = None, ) -> types.ListReasoningEnginesMemoriesResponse: - """Lists Agent Engine memories.""" + """ + Lists Agent Engine memories. + """ parameter_model = types._ListAgentEngineMemoryRequestParameters( name=name, @@ -1830,7 +1836,9 @@ async def _retrieve( ] = None, config: Optional[types.RetrieveAgentEngineMemoriesConfigOrDict] = None, ) -> types.RetrieveMemoriesResponse: - """Retrieves memories for an Agent Engine.""" + """ + Retrieves memories for an Agent Engine. + """ parameter_model = types._RetrieveAgentEngineMemoriesRequestParameters( name=name, @@ -1893,7 +1901,9 @@ async def _update( scope: Optional[dict[str, str]] = None, config: Optional[types.UpdateAgentEngineMemoryConfigOrDict] = None, ) -> types.AgentEngineMemoryOperation: - """Updates an Agent Engine memory.""" + """ + Updates an Agent Engine memory. + """ parameter_model = types._UpdateAgentEngineMemoryRequestParameters( name=name, diff --git a/vertexai/_genai/prompt_management.py b/vertexai/_genai/prompt_management.py new file mode 100644 index 0000000000..25ff9f78f7 --- /dev/null +++ b/vertexai/_genai/prompt_management.py @@ -0,0 +1,1341 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Code generated by the Google Gen AI SDK generator DO NOT EDIT. + +import json +import logging +import time +from typing import Any, Optional, Union +from urllib.parse import urlencode + +from google.genai import _api_module +from google.genai import _common +from google.genai import types as genai_types +from google.genai._common import get_value_by_path as getv +from google.genai._common import set_value_by_path as setv + +from . import _prompt_management_utils +from . import types + + +logger = logging.getLogger("vertexai_genai.promptmanagement") + + +def _SchemaTextPromptDatasetMetadata_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["candidate_count"]) is not None: + setv(to_object, ["candidateCount"], getv(from_object, ["candidate_count"])) + + if getv(from_object, ["gcs_uri"]) is not None: + setv(to_object, ["gcsUri"], getv(from_object, ["gcs_uri"])) + + if getv(from_object, ["grounding_config"]) is not None: + setv(to_object, ["groundingConfig"], getv(from_object, ["grounding_config"])) + + if getv(from_object, ["has_prompt_variable"]) is not None: + setv( + to_object, ["hasPromptVariable"], getv(from_object, ["has_prompt_variable"]) + ) + + if getv(from_object, ["logprobs"]) is not None: + setv(to_object, ["logprobs"], getv(from_object, ["logprobs"])) + + if getv(from_object, ["max_output_tokens"]) is not None: + setv(to_object, ["maxOutputTokens"], getv(from_object, ["max_output_tokens"])) + + if getv(from_object, ["note"]) is not None: + setv(to_object, ["note"], getv(from_object, ["note"])) + + if getv(from_object, ["prompt_api_schema"]) is not None: + setv(to_object, ["promptApiSchema"], getv(from_object, ["prompt_api_schema"])) + + if getv(from_object, ["prompt_type"]) is not None: + setv(to_object, ["promptType"], getv(from_object, ["prompt_type"])) + + if getv(from_object, ["seed_enabled"]) is not None: + setv(to_object, ["seedEnabled"], getv(from_object, ["seed_enabled"])) + + if getv(from_object, ["seed_value"]) is not None: + setv(to_object, ["seedValue"], getv(from_object, ["seed_value"])) + + if getv(from_object, ["stop_sequences"]) is not None: + setv(to_object, ["stopSequences"], getv(from_object, ["stop_sequences"])) + + if getv(from_object, ["system_instruction"]) is not None: + setv( + to_object, ["systemInstruction"], getv(from_object, ["system_instruction"]) + ) + + if getv(from_object, ["system_instruction_gcs_uri"]) is not None: + setv( + to_object, + ["systemInstructionGcsUri"], + getv(from_object, ["system_instruction_gcs_uri"]), + ) + + if getv(from_object, ["temperature"]) is not None: + setv(to_object, ["temperature"], getv(from_object, ["temperature"])) + + if getv(from_object, ["text"]) is not None: + setv(to_object, ["text"], getv(from_object, ["text"])) + + if getv(from_object, ["top_k"]) is not None: + setv(to_object, ["topK"], getv(from_object, ["top_k"])) + + if getv(from_object, ["top_p"]) is not None: + setv(to_object, ["topP"], getv(from_object, ["top_p"])) + + return to_object + + +def _CreateDatasetParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["display_name"]) is not None: + setv(to_object, ["displayName"], getv(from_object, ["display_name"])) + + if getv(from_object, ["metadata_schema_uri"]) is not None: + setv( + to_object, ["metadataSchemaUri"], getv(from_object, ["metadata_schema_uri"]) + ) + + if getv(from_object, ["metadata"]) is not None: + setv( + to_object, + ["metadata"], + _SchemaTextPromptDatasetMetadata_to_vertex( + getv(from_object, ["metadata"]), to_object + ), + ) + + if getv(from_object, ["description"]) is not None: + setv(to_object, ["description"], getv(from_object, ["description"])) + + if getv(from_object, ["encryption_spec"]) is not None: + setv(to_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"])) + + if getv(from_object, ["model_reference"]) is not None: + setv(to_object, ["modelReference"], getv(from_object, ["model_reference"])) + + return to_object + + +def _DatasetVersion_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["metadata"]) is not None: + setv( + to_object, + ["metadata"], + _SchemaTextPromptDatasetMetadata_to_vertex( + getv(from_object, ["metadata"]), to_object + ), + ) + + if getv(from_object, ["big_query_dataset_name"]) is not None: + setv( + to_object, + ["bigQueryDatasetName"], + getv(from_object, ["big_query_dataset_name"]), + ) + + if getv(from_object, ["create_time"]) is not None: + setv(to_object, ["createTime"], getv(from_object, ["create_time"])) + + if getv(from_object, ["display_name"]) is not None: + setv(to_object, ["displayName"], getv(from_object, ["display_name"])) + + if getv(from_object, ["etag"]) is not None: + setv(to_object, ["etag"], getv(from_object, ["etag"])) + + if getv(from_object, ["model_reference"]) is not None: + setv(to_object, ["modelReference"], getv(from_object, ["model_reference"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["satisfies_pzi"]) is not None: + setv(to_object, ["satisfiesPzi"], getv(from_object, ["satisfies_pzi"])) + + if getv(from_object, ["satisfies_pzs"]) is not None: + setv(to_object, ["satisfiesPzs"], getv(from_object, ["satisfies_pzs"])) + + if getv(from_object, ["update_time"]) is not None: + setv(to_object, ["updateTime"], getv(from_object, ["update_time"])) + + return to_object + + +def _CreateDatasetVersionParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["dataset_name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["dataset_name"])) + + if getv(from_object, ["dataset_version"]) is not None: + setv( + to_object, + ["datasetVersion"], + _DatasetVersion_to_vertex( + getv(from_object, ["dataset_version"]), to_object + ), + ) + + if getv(from_object, ["parent"]) is not None: + setv(to_object, ["parent"], getv(from_object, ["parent"])) + + if getv(from_object, ["display_name"]) is not None: + setv(to_object, ["displayName"], getv(from_object, ["display_name"])) + + return to_object + + +def _GetDatasetParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + return to_object + + +def _GetDatasetVersionParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["dataset_id"]) is not None: + setv(to_object, ["_url", "dataset_id"], getv(from_object, ["dataset_id"])) + + if getv(from_object, ["dataset_version_id"]) is not None: + setv( + to_object, + ["_url", "dataset_version_id"], + getv(from_object, ["dataset_version_id"]), + ) + + return to_object + + +def _GetDatasetOperationParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + if getv(from_object, ["dataset_id"]) is not None: + setv(to_object, ["_url", "dataset_id"], getv(from_object, ["dataset_id"])) + + if getv(from_object, ["operation_id"]) is not None: + setv(to_object, ["_url", "operation_id"], getv(from_object, ["operation_id"])) + + return to_object + + +def _DatasetOperation_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + if getv(from_object, ["done"]) is not None: + setv(to_object, ["done"], getv(from_object, ["done"])) + + if getv(from_object, ["error"]) is not None: + setv(to_object, ["error"], getv(from_object, ["error"])) + + if getv(from_object, ["response"]) is not None: + setv(to_object, ["response"], getv(from_object, ["response"])) + + return to_object + + +def _SchemaTextPromptDatasetMetadata_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["candidateCount"]) is not None: + setv(to_object, ["candidate_count"], getv(from_object, ["candidateCount"])) + + if getv(from_object, ["gcsUri"]) is not None: + setv(to_object, ["gcs_uri"], getv(from_object, ["gcsUri"])) + + if getv(from_object, ["groundingConfig"]) is not None: + setv(to_object, ["grounding_config"], getv(from_object, ["groundingConfig"])) + + if getv(from_object, ["hasPromptVariable"]) is not None: + setv( + to_object, ["has_prompt_variable"], getv(from_object, ["hasPromptVariable"]) + ) + + if getv(from_object, ["logprobs"]) is not None: + setv(to_object, ["logprobs"], getv(from_object, ["logprobs"])) + + if getv(from_object, ["maxOutputTokens"]) is not None: + setv(to_object, ["max_output_tokens"], getv(from_object, ["maxOutputTokens"])) + + if getv(from_object, ["note"]) is not None: + setv(to_object, ["note"], getv(from_object, ["note"])) + + if getv(from_object, ["promptApiSchema"]) is not None: + setv(to_object, ["prompt_api_schema"], getv(from_object, ["promptApiSchema"])) + + if getv(from_object, ["promptType"]) is not None: + setv(to_object, ["prompt_type"], getv(from_object, ["promptType"])) + + if getv(from_object, ["seedEnabled"]) is not None: + setv(to_object, ["seed_enabled"], getv(from_object, ["seedEnabled"])) + + if getv(from_object, ["seedValue"]) is not None: + setv(to_object, ["seed_value"], getv(from_object, ["seedValue"])) + + if getv(from_object, ["stopSequences"]) is not None: + setv(to_object, ["stop_sequences"], getv(from_object, ["stopSequences"])) + + if getv(from_object, ["systemInstruction"]) is not None: + setv( + to_object, ["system_instruction"], getv(from_object, ["systemInstruction"]) + ) + + if getv(from_object, ["systemInstructionGcsUri"]) is not None: + setv( + to_object, + ["system_instruction_gcs_uri"], + getv(from_object, ["systemInstructionGcsUri"]), + ) + + if getv(from_object, ["temperature"]) is not None: + setv(to_object, ["temperature"], getv(from_object, ["temperature"])) + + if getv(from_object, ["text"]) is not None: + setv(to_object, ["text"], getv(from_object, ["text"])) + + if getv(from_object, ["topK"]) is not None: + setv(to_object, ["top_k"], getv(from_object, ["topK"])) + + if getv(from_object, ["topP"]) is not None: + setv(to_object, ["top_p"], getv(from_object, ["topP"])) + + return to_object + + +def _Dataset_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["metadata"]) is not None: + setv( + to_object, + ["metadata"], + _SchemaTextPromptDatasetMetadata_from_vertex( + getv(from_object, ["metadata"]), to_object + ), + ) + + if getv(from_object, ["encryptionSpec"]) is not None: + setv(to_object, ["encryption_spec"], getv(from_object, ["encryptionSpec"])) + + if getv(from_object, ["createTime"]) is not None: + setv(to_object, ["create_time"], getv(from_object, ["createTime"])) + + if getv(from_object, ["dataItemCount"]) is not None: + setv(to_object, ["data_item_count"], getv(from_object, ["dataItemCount"])) + + if getv(from_object, ["description"]) is not None: + setv(to_object, ["description"], getv(from_object, ["description"])) + + if getv(from_object, ["displayName"]) is not None: + setv(to_object, ["display_name"], getv(from_object, ["displayName"])) + + if getv(from_object, ["etag"]) is not None: + setv(to_object, ["etag"], getv(from_object, ["etag"])) + + if getv(from_object, ["labels"]) is not None: + setv(to_object, ["labels"], getv(from_object, ["labels"])) + + if getv(from_object, ["metadataArtifact"]) is not None: + setv(to_object, ["metadata_artifact"], getv(from_object, ["metadataArtifact"])) + + if getv(from_object, ["metadataSchemaUri"]) is not None: + setv( + to_object, ["metadata_schema_uri"], getv(from_object, ["metadataSchemaUri"]) + ) + + if getv(from_object, ["modelReference"]) is not None: + setv(to_object, ["model_reference"], getv(from_object, ["modelReference"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["satisfiesPzi"]) is not None: + setv(to_object, ["satisfies_pzi"], getv(from_object, ["satisfiesPzi"])) + + if getv(from_object, ["satisfiesPzs"]) is not None: + setv(to_object, ["satisfies_pzs"], getv(from_object, ["satisfiesPzs"])) + + if getv(from_object, ["savedQueries"]) is not None: + setv(to_object, ["saved_queries"], getv(from_object, ["savedQueries"])) + + if getv(from_object, ["updateTime"]) is not None: + setv(to_object, ["update_time"], getv(from_object, ["updateTime"])) + + return to_object + + +def _DatasetVersion_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["metadata"]) is not None: + setv( + to_object, + ["metadata"], + _SchemaTextPromptDatasetMetadata_from_vertex( + getv(from_object, ["metadata"]), to_object + ), + ) + + if getv(from_object, ["bigQueryDatasetName"]) is not None: + setv( + to_object, + ["big_query_dataset_name"], + getv(from_object, ["bigQueryDatasetName"]), + ) + + if getv(from_object, ["createTime"]) is not None: + setv(to_object, ["create_time"], getv(from_object, ["createTime"])) + + if getv(from_object, ["displayName"]) is not None: + setv(to_object, ["display_name"], getv(from_object, ["displayName"])) + + if getv(from_object, ["etag"]) is not None: + setv(to_object, ["etag"], getv(from_object, ["etag"])) + + if getv(from_object, ["modelReference"]) is not None: + setv(to_object, ["model_reference"], getv(from_object, ["modelReference"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["satisfiesPzi"]) is not None: + setv(to_object, ["satisfies_pzi"], getv(from_object, ["satisfiesPzi"])) + + if getv(from_object, ["satisfiesPzs"]) is not None: + setv(to_object, ["satisfies_pzs"], getv(from_object, ["satisfiesPzs"])) + + if getv(from_object, ["updateTime"]) is not None: + setv(to_object, ["update_time"], getv(from_object, ["updateTime"])) + + return to_object + + +class PromptManagement(_api_module.BaseModule): + + def _create_dataset_resource( + self, + *, + config: Optional[types.CreateDatasetConfigOrDict] = None, + name: Optional[str] = None, + display_name: Optional[str] = None, + metadata_schema_uri: Optional[str] = None, + metadata: Optional[types.SchemaTextPromptDatasetMetadataOrDict] = None, + description: Optional[str] = None, + encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, + model_reference: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Creates a dataset resource to store prompts. + """ + + parameter_model = types._CreateDatasetParameters( + config=config, + name=name, + display_name=display_name, + metadata_schema_uri=metadata_schema_uri, + metadata=metadata, + description=description, + encryption_spec=encryption_spec, + model_reference=model_reference, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets".format_map(request_url_dict) + else: + path = "datasets" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("post", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _create_dataset_version_resource( + self, + *, + config: Optional[types.CreateDatasetVersionConfigOrDict] = None, + dataset_name: Optional[str] = None, + dataset_version: Optional[types.DatasetVersionOrDict] = None, + parent: Optional[str] = None, + display_name: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Creates a dataset version resource to store prompts. + """ + + parameter_model = types._CreateDatasetVersionParameters( + config=config, + dataset_name=dataset_name, + dataset_version=dataset_version, + parent=parent, + display_name=display_name, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateDatasetVersionParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{name}/datasetVersions".format_map(request_url_dict) + else: + path = "datasets/{name}/datasetVersions" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("post", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get_dataset_resource( + self, + *, + config: Optional[types.VertexBaseConfigOrDict] = None, + name: Optional[str] = None, + ) -> types.Dataset: + """ + Gets a dataset resource to store prompts. + """ + + parameter_model = types._GetDatasetParameters( + config=config, + name=name, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{name}".format_map(request_url_dict) + else: + path = "datasets/{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _Dataset_from_vertex(response_dict) + + return_value = types.Dataset._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get_dataset_version_resource( + self, + *, + config: Optional[types.VertexBaseConfigOrDict] = None, + dataset_id: Optional[str] = None, + dataset_version_id: Optional[str] = None, + ) -> types.DatasetVersion: + """ + Gets a dataset version resource to store prompts. + """ + + parameter_model = types._GetDatasetVersionParameters( + config=config, + dataset_id=dataset_id, + dataset_version_id=dataset_version_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetVersionParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/datasetVersions/{dataset_version_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/datasetVersions/{dataset_version_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetVersion_from_vertex(response_dict) + + return_value = types.DatasetVersion._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get_dataset_operation( + self, + *, + config: Optional[types.GetDatasetOperationConfigOrDict] = None, + dataset_id: Optional[str] = None, + operation_id: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Gets the operation from creating a dataset version. + """ + + parameter_model = types._GetDatasetOperationParameters( + config=config, + dataset_id=dataset_id, + operation_id=operation_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetOperationParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/operations/{operation_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/operations/{operation_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def create_version( + self, + *, + prompt: types.PromptOrDict, + config: Optional[types.CreatePromptConfigOrDict] = None, + ) -> types.Prompt: + """Creates a new version of a prompt in a Vertex Dataset resource. + + If config.prompt_id is not provided, this method creates a new Dataset + resource for the prompt and a new Dataset Version resource under that + Dataset. + If config.prompt_id is provided, this method creates a new Dataset + Version resource under the existing Dataset resource with the provided + prompt_id. + + When creating new Dataset and Dataset Version resources, this waits for + the Dataset operations to complete before returning. + + Args: + prompt: The prompt to create a version for. + config: Optional configuration for creating the prompt version. + + Returns: + A types.Prompt object representing the prompt with its associated + Dataset and Dataset Version resources. + """ + if isinstance(prompt, dict): + prompt = types.Prompt(**prompt) + if isinstance(config, dict): + config = types.CreatePromptConfig(**config) + elif not config: + config = types.CreatePromptConfig() + + if config.encryption_spec and config.prompt_id: + raise ValueError( + "Encryption spec can only be used for creating new prompts, not for creating new prompt versions." + ) + + if not prompt.prompt_data: + raise ValueError("Prompt data must be provided.") + if not prompt.prompt_data.contents: + raise ValueError("Prompt contents must be provided.") + if not prompt.prompt_data.model: + raise ValueError("Model name must be provided.") + if ( + prompt.prompt_data + and prompt.prompt_data.contents + and len(prompt.prompt_data.contents) > 1 + ): + raise ValueError("Multi-turn prompts are not currently supported.") + + prompt_metadata = _prompt_management_utils._create_dataset_metadata_from_prompt( + prompt, + variables=( + prompt.prompt_data.variables + if prompt.prompt_data and prompt.prompt_data.variables + else None + ), + ) + + if config and config.prompt_id: + prompt_id = config.prompt_id + else: + prompt_id = None + + if config and config.version_display_name: + version_name = config.version_display_name + else: + version_name = None + + dataset_id = prompt_id + if ( + dataset_id + and prompt._dataset + and dataset_id != prompt._dataset.name.split("/")[-1] + ): + # prompt_id takes precedence over existing prompt resource if provided. + logger.info( + f"The provided prompt_id {prompt_id} is different from the" + f" existing prompt resource {prompt._dataset.name} and will" + " take precedence. Creating a new prompt version for prompt" + f" with id: {prompt_id}." + ) + if not dataset_id and prompt._dataset and prompt._dataset.name: + dataset_id = prompt._dataset.name.split("/")[-1] + + # Step 1: Create the dataset resource for the prompt if it doesn't exist. + if not dataset_id: + create_prompt_dataset_operation = self._create_dataset_resource( + display_name=( + config.prompt_display_name + if config and config.prompt_display_name + else f"prompt_{time.strftime('%Y%m%d-%H%M%S')}" + ), + name=f"projects/{self._api_client.project}/locations/{self._api_client.location}", + metadata_schema_uri=_prompt_management_utils.PROMPT_SCHEMA_URI, + metadata=prompt_metadata, + model_reference=prompt.prompt_data.model, + encryption_spec=( + config.encryption_spec + if config and config.encryption_spec + else None + ), + ) + dataset_resource_name = self._wait_for_operation( + operation=create_prompt_dataset_operation, + timeout=config.timeout if config else 90, + ) + dataset_id = dataset_resource_name.split("/")[-1] + + # Step 2: Get the dataset resource + dataset_resource = self._get_dataset_resource( + name=dataset_id, + ) + prompt._dataset = dataset_resource + + # Step 3: Create the dataset version + create_dataset_version_operation = self._create_dataset_version_resource( + dataset_name=dataset_id, + display_name=( + version_name + if version_name + else f"prompt_version_{time.strftime('%Y%m%d-%H%M%S')}" + ), + ) + dataset_version_resource_name = self._wait_for_operation( + operation=create_dataset_version_operation, + timeout=config.timeout if config else 90, + ) + + # Step 4: Get the dataset version resource and return it with the prompt + dataset_version_resource = self._get_dataset_version_resource( + dataset_id=dataset_id, + dataset_version_id=dataset_version_resource_name.split("/")[-1], + ) + prompt._dataset_version = dataset_version_resource + return prompt + + def _wait_for_operation( + self, + operation: types.DatasetOperation, + timeout: int, + ) -> str: + """Waits for a dataset operation to complete. + + Args: + operation: The dataset operation to wait for. + timeout: The maximum time to wait for the operation to complete. + + Returns: + The name of the Dataset resource from the operation result. + + Raises: + TimeoutError: If the operation does not complete within the timeout. + ValueError: If the operation fails. + """ + done = False + prompt_dataset_operation: Optional[types.DatasetOperation] = None + + response_operation_name = operation.name + dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0] + operation_id = response_operation_name.split("/")[-1] + + start_time = time.time() + sleep_duration = 5 + wait_multiplier = 2 + max_wait_time = 60 + previous_time = time.time() + + while not done: + if (time.time() - start_time) > timeout: + raise TimeoutError( + "Create prompt operation did not complete within the" + f" specified timeout of {timeout} seconds." + ) + current_time = time.time() + if current_time - previous_time >= sleep_duration: + sleep_duration = min(sleep_duration * wait_multiplier, max_wait_time) + previous_time = current_time + time.sleep(sleep_duration) + prompt_dataset_operation = self._get_dataset_operation( + dataset_id=dataset_id, + operation_id=operation_id, + ) + done = ( + prompt_dataset_operation.done + if hasattr(prompt_dataset_operation, "done") + else False + ) + if ( + not prompt_dataset_operation + or prompt_dataset_operation.response is None + or prompt_dataset_operation.response.get("name") is None + ): + raise ValueError("Error creating prompt version resource.") + if ( + hasattr(prompt_dataset_operation, "error") + and prompt_dataset_operation.error is not None + ): + raise ValueError( + f"Error creating prompt version resource: {prompt_dataset_operation.error}" + ) + return prompt_dataset_operation.response.get("name") + + def get( + self, + *, + prompt_id: str, + config: Optional[types.GetPromptConfig] = None, + ) -> types.Prompt: + """Gets a prompt resource from a Vertex Dataset. + + Args: + prompt_id: The id of the Vertex Dataset resource containing the prompt. For example, if the prompt resource name is "projects/123/locations/us-central1/datasets/456", then the prompt_id is "456". + config: Optional configuration for getting the prompt. + + Returns: + A types.Prompt object representing the prompt with its associated Dataset and Dataset Version resources. + """ + + prompt_dataset_resource = self._get_dataset_resource(name=prompt_id) + prompt = _prompt_management_utils._create_prompt_from_dataset_metadata( + prompt_dataset_resource, + ) + prompt._dataset = prompt_dataset_resource + + if config and config.version_id: + prompt_version_resource = self._get_dataset_version_resource( + dataset_id=prompt_id, + dataset_version_id=config.version_id, + ) + prompt._dataset_version = prompt_version_resource + + return prompt + + +class AsyncPromptManagement(_api_module.BaseModule): + + async def _create_dataset_resource( + self, + *, + config: Optional[types.CreateDatasetConfigOrDict] = None, + name: Optional[str] = None, + display_name: Optional[str] = None, + metadata_schema_uri: Optional[str] = None, + metadata: Optional[types.SchemaTextPromptDatasetMetadataOrDict] = None, + description: Optional[str] = None, + encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None, + model_reference: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Creates a dataset resource to store prompts. + """ + + parameter_model = types._CreateDatasetParameters( + config=config, + name=name, + display_name=display_name, + metadata_schema_uri=metadata_schema_uri, + metadata=metadata, + description=description, + encryption_spec=encryption_spec, + model_reference=model_reference, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets".format_map(request_url_dict) + else: + path = "datasets" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "post", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _create_dataset_version_resource( + self, + *, + config: Optional[types.CreateDatasetVersionConfigOrDict] = None, + dataset_name: Optional[str] = None, + dataset_version: Optional[types.DatasetVersionOrDict] = None, + parent: Optional[str] = None, + display_name: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Creates a dataset version resource to store prompts. + """ + + parameter_model = types._CreateDatasetVersionParameters( + config=config, + dataset_name=dataset_name, + dataset_version=dataset_version, + parent=parent, + display_name=display_name, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateDatasetVersionParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{name}/datasetVersions".format_map(request_url_dict) + else: + path = "datasets/{name}/datasetVersions" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "post", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get_dataset_resource( + self, + *, + config: Optional[types.VertexBaseConfigOrDict] = None, + name: Optional[str] = None, + ) -> types.Dataset: + """ + Gets a dataset resource to store prompts. + """ + + parameter_model = types._GetDatasetParameters( + config=config, + name=name, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{name}".format_map(request_url_dict) + else: + path = "datasets/{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _Dataset_from_vertex(response_dict) + + return_value = types.Dataset._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get_dataset_version_resource( + self, + *, + config: Optional[types.VertexBaseConfigOrDict] = None, + dataset_id: Optional[str] = None, + dataset_version_id: Optional[str] = None, + ) -> types.DatasetVersion: + """ + Gets a dataset version resource to store prompts. + """ + + parameter_model = types._GetDatasetVersionParameters( + config=config, + dataset_id=dataset_id, + dataset_version_id=dataset_version_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetVersionParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/datasetVersions/{dataset_version_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/datasetVersions/{dataset_version_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetVersion_from_vertex(response_dict) + + return_value = types.DatasetVersion._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get_dataset_operation( + self, + *, + config: Optional[types.GetDatasetOperationConfigOrDict] = None, + dataset_id: Optional[str] = None, + operation_id: Optional[str] = None, + ) -> types.DatasetOperation: + """ + Gets the operation from creating a dataset version. + """ + + parameter_model = types._GetDatasetOperationParameters( + config=config, + dataset_id=dataset_id, + operation_id=operation_id, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetDatasetOperationParameters_to_vertex(parameter_model) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "datasets/{dataset_id}/operations/{operation_id}".format_map( + request_url_dict + ) + else: + path = "datasets/{dataset_id}/operations/{operation_id}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DatasetOperation_from_vertex(response_dict) + + return_value = types.DatasetOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value diff --git a/vertexai/_genai/prompt_optimizer.py b/vertexai/_genai/prompt_optimizer.py index 865b7afb94..1ac01e68c2 100644 --- a/vertexai/_genai/prompt_optimizer.py +++ b/vertexai/_genai/prompt_optimizer.py @@ -69,11 +69,7 @@ def _CustomJobSpec_to_vertex( ) if getv(from_object, ["enable_web_access"]) is not None: - setv( - to_object, - ["enableWebAccess"], - getv(from_object, ["enable_web_access"]), - ) + setv(to_object, ["enableWebAccess"], getv(from_object, ["enable_web_access"])) if getv(from_object, ["experiment"]) is not None: setv(to_object, ["experiment"], getv(from_object, ["experiment"])) @@ -109,31 +105,19 @@ def _CustomJobSpec_to_vertex( ) if getv(from_object, ["reserved_ip_ranges"]) is not None: - setv( - to_object, - ["reservedIpRanges"], - getv(from_object, ["reserved_ip_ranges"]), - ) + setv(to_object, ["reservedIpRanges"], getv(from_object, ["reserved_ip_ranges"])) if getv(from_object, ["scheduling"]) is not None: setv(to_object, ["scheduling"], getv(from_object, ["scheduling"])) if getv(from_object, ["service_account"]) is not None: - setv( - to_object, - ["serviceAccount"], - getv(from_object, ["service_account"]), - ) + setv(to_object, ["serviceAccount"], getv(from_object, ["service_account"])) if getv(from_object, ["tensorboard"]) is not None: setv(to_object, ["tensorboard"], getv(from_object, ["tensorboard"])) if getv(from_object, ["worker_pool_specs"]) is not None: - setv( - to_object, - ["workerPoolSpecs"], - getv(from_object, ["worker_pool_specs"]), - ) + setv(to_object, ["workerPoolSpecs"], getv(from_object, ["worker_pool_specs"])) return to_object @@ -153,22 +137,21 @@ def _CustomJob_to_vertex( _CustomJobSpec_to_vertex(getv(from_object, ["job_spec"]), to_object), ) + if getv(from_object, ["encryption_spec"]) is not None: + setv(parent_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"])) + + if getv(from_object, ["state"]) is not None: + setv(to_object, ["state"], getv(from_object, ["state"])) + + if getv(from_object, ["error"]) is not None: + setv(parent_object, ["error"], getv(from_object, ["error"])) + if getv(from_object, ["create_time"]) is not None: setv(to_object, ["createTime"], getv(from_object, ["create_time"])) - if getv(from_object, ["encryption_spec"]) is not None: - setv( - to_object, - ["encryptionSpec"], - getv(from_object, ["encryption_spec"]), - ) - if getv(from_object, ["end_time"]) is not None: setv(to_object, ["endTime"], getv(from_object, ["end_time"])) - if getv(from_object, ["error"]) is not None: - setv(to_object, ["error"], getv(from_object, ["error"])) - if getv(from_object, ["labels"]) is not None: setv(to_object, ["labels"], getv(from_object, ["labels"])) @@ -184,9 +167,6 @@ def _CustomJob_to_vertex( if getv(from_object, ["start_time"]) is not None: setv(to_object, ["startTime"], getv(from_object, ["start_time"])) - if getv(from_object, ["state"]) is not None: - setv(to_object, ["state"], getv(from_object, ["state"])) - if getv(from_object, ["update_time"]) is not None: setv(to_object, ["updateTime"], getv(from_object, ["update_time"])) @@ -259,11 +239,7 @@ def _CustomJobSpec_from_vertex( ) if getv(from_object, ["enableWebAccess"]) is not None: - setv( - to_object, - ["enable_web_access"], - getv(from_object, ["enableWebAccess"]), - ) + setv(to_object, ["enable_web_access"], getv(from_object, ["enableWebAccess"])) if getv(from_object, ["experiment"]) is not None: setv(to_object, ["experiment"], getv(from_object, ["experiment"])) @@ -299,31 +275,19 @@ def _CustomJobSpec_from_vertex( ) if getv(from_object, ["reservedIpRanges"]) is not None: - setv( - to_object, - ["reserved_ip_ranges"], - getv(from_object, ["reservedIpRanges"]), - ) + setv(to_object, ["reserved_ip_ranges"], getv(from_object, ["reservedIpRanges"])) if getv(from_object, ["scheduling"]) is not None: setv(to_object, ["scheduling"], getv(from_object, ["scheduling"])) if getv(from_object, ["serviceAccount"]) is not None: - setv( - to_object, - ["service_account"], - getv(from_object, ["serviceAccount"]), - ) + setv(to_object, ["service_account"], getv(from_object, ["serviceAccount"])) if getv(from_object, ["tensorboard"]) is not None: setv(to_object, ["tensorboard"], getv(from_object, ["tensorboard"])) if getv(from_object, ["workerPoolSpecs"]) is not None: - setv( - to_object, - ["worker_pool_specs"], - getv(from_object, ["workerPoolSpecs"]), - ) + setv(to_object, ["worker_pool_specs"], getv(from_object, ["workerPoolSpecs"])) return to_object @@ -343,22 +307,21 @@ def _CustomJob_from_vertex( _CustomJobSpec_from_vertex(getv(parent_object, ["jobSpec"]), to_object), ) + if getv(parent_object, ["encryptionSpec"]) is not None: + setv(to_object, ["encryption_spec"], getv(parent_object, ["encryptionSpec"])) + + if getv(from_object, ["state"]) is not None: + setv(to_object, ["state"], getv(from_object, ["state"])) + + if getv(parent_object, ["error"]) is not None: + setv(to_object, ["error"], getv(parent_object, ["error"])) + if getv(from_object, ["createTime"]) is not None: setv(to_object, ["create_time"], getv(from_object, ["createTime"])) - if getv(from_object, ["encryptionSpec"]) is not None: - setv( - to_object, - ["encryption_spec"], - getv(from_object, ["encryptionSpec"]), - ) - if getv(from_object, ["endTime"]) is not None: setv(to_object, ["end_time"], getv(from_object, ["endTime"])) - if getv(from_object, ["error"]) is not None: - setv(to_object, ["error"], getv(from_object, ["error"])) - if getv(from_object, ["labels"]) is not None: setv(to_object, ["labels"], getv(from_object, ["labels"])) @@ -374,9 +337,6 @@ def _CustomJob_from_vertex( if getv(from_object, ["startTime"]) is not None: setv(to_object, ["start_time"], getv(from_object, ["startTime"])) - if getv(from_object, ["state"]) is not None: - setv(to_object, ["state"], getv(from_object, ["state"])) - if getv(from_object, ["updateTime"]) is not None: setv(to_object, ["update_time"], getv(from_object, ["updateTime"])) @@ -395,7 +355,9 @@ def _optimize_prompt( content: Optional[genai_types.ContentOrDict] = None, config: Optional[types.OptimizeConfigOrDict] = None, ) -> types.OptimizeResponseEndpoint: - """Optimize a single prompt.""" + """ + Optimize a single prompt. + """ parameter_model = types._OptimizeRequestParameters( content=content, @@ -447,9 +409,11 @@ def _create_custom_job_resource( self, *, custom_job: types.CustomJobOrDict, - config: Optional[types.BaseConfigOrDict] = None, + config: Optional[types.VertexBaseConfigOrDict] = None, ) -> types.CustomJob: - """Creates a custom job.""" + """ + Creates a custom job. + """ parameter_model = types._CustomJobParameters( custom_job=custom_job, @@ -498,9 +462,11 @@ def _create_custom_job_resource( return return_value def _get_custom_job( - self, *, name: str, config: Optional[types.BaseConfigOrDict] = None + self, *, name: str, config: Optional[types.VertexBaseConfigOrDict] = None ) -> types.CustomJob: - """Gets a custom job.""" + """ + Gets a custom job. + """ parameter_model = types._GetCustomJobParameters( name=name, @@ -553,14 +519,14 @@ def _get_custom_job( def _wait_for_completion(self, job_name: str) -> types.CustomJob: JOB_COMPLETE_STATES = [ - types.JobState.JOB_STATE_SUCCEEDED, - types.JobState.JOB_STATE_FAILED, - types.JobState.JOB_STATE_CANCELLED, - types.JobState.JOB_STATE_PAUSED, + genai_types.JobState.JOB_STATE_SUCCEEDED, + genai_types.JobState.JOB_STATE_FAILED, + genai_types.JobState.JOB_STATE_CANCELLED, + genai_types.JobState.JOB_STATE_PAUSED, ] JOB_ERROR_STATES = [ - types.JobState.JOB_STATE_FAILED, - types.JobState.JOB_STATE_CANCELLED, + genai_types.JobState.JOB_STATE_FAILED, + genai_types.JobState.JOB_STATE_CANCELLED, ] log_wait = 5 @@ -597,8 +563,7 @@ def optimize( Args: method: The method for optimizing multiple prompts. config: PromptOptimizerVAPOConfig instance containing the - configuration for prompt optimization. - + configuration for prompt optimization. Returns: The custom job that was created. """ @@ -642,7 +607,7 @@ def optimize( job_spec = types.CustomJobSpec( worker_pool_specs=worker_pool_specs, - base_output_directory=types.GcsDestination(output_uri_prefix=bucket), + base_output_directory=genai_types.GcsDestination(output_uri_prefix=bucket), service_account=service_account, ) @@ -683,16 +648,14 @@ def optimize_prompt( Args: prompt: The prompt to optimize. - config: The configuration for prompt optimization. Currently, config - is not supported for a single prompt optimization. - + config: The configuration for prompt optimization. Currently, config is + not supported for a single prompt optimization. Returns: The parsed response from the API request. """ if config is not None: raise ValueError( - "Currently, config is not supported for a single prompt" - " optimization." + "Currently, config is not supported for a single prompt optimization." ) prompt = genai_types.Content(parts=[genai_types.Part(text=prompt)], role="user") @@ -734,7 +697,7 @@ def _custom_optimize_prompt( # TODO: remove the hack that pops config. request_dict.pop("config", None) - http_options: Optional[types.HttpOptions] = None + http_options: Optional[genai_types.HttpOptions] = None if ( parameter_model.config is not None and parameter_model.config.http_options is not None @@ -778,7 +741,9 @@ async def _optimize_prompt( content: Optional[genai_types.ContentOrDict] = None, config: Optional[types.OptimizeConfigOrDict] = None, ) -> types.OptimizeResponseEndpoint: - """Optimize a single prompt.""" + """ + Optimize a single prompt. + """ parameter_model = types._OptimizeRequestParameters( content=content, @@ -832,9 +797,11 @@ async def _create_custom_job_resource( self, *, custom_job: types.CustomJobOrDict, - config: Optional[types.BaseConfigOrDict] = None, + config: Optional[types.VertexBaseConfigOrDict] = None, ) -> types.CustomJob: - """Creates a custom job.""" + """ + Creates a custom job. + """ parameter_model = types._CustomJobParameters( custom_job=custom_job, @@ -885,9 +852,11 @@ async def _create_custom_job_resource( return return_value async def _get_custom_job( - self, *, name: str, config: Optional[types.BaseConfigOrDict] = None + self, *, name: str, config: Optional[types.VertexBaseConfigOrDict] = None ) -> types.CustomJob: - """Gets a custom job.""" + """ + Gets a custom job. + """ parameter_model = types._GetCustomJobParameters( name=name, @@ -945,6 +914,7 @@ async def optimize( ) -> types.CustomJob: """Call async Vertex AI Prompt Optimizer (VAPO). + Note: The `wait_for_completion` parameter in the config will be ignored when using the AsyncClient, as it is not supported. @@ -962,7 +932,6 @@ async def optimize( vapo is supported). config: PromptOptimizerVAPOConfig instance containing the configuration for prompt optimization. - Returns: The custom job that was created. """ @@ -974,8 +943,7 @@ async def optimize( if config.wait_for_completion: logger.info( - "Ignoring wait_for_completion=True since the AsyncClient does" - " not support it." + "Ignoring wait_for_completion=True since the AsyncClient does not support it." ) if config.optimizer_job_display_name: @@ -1011,7 +979,7 @@ async def optimize( job_spec = types.CustomJobSpec( worker_pool_specs=worker_pool_specs, - base_output_directory=types.GcsDestination(output_uri_prefix=bucket), + base_output_directory=genai_types.GcsDestination(output_uri_prefix=bucket), service_account=service_account, ) @@ -1111,21 +1079,18 @@ async def optimize_prompt( Example usage: client = vertexai.Client(project=PROJECT_NAME, location='us-central1') prompt = "Generate system instructions for analyzing medical articles" - response = await - client.aio.prompt_optimizer.optimize_prompt(prompt=prompt) + response = await client.aio.prompt_optimizer.optimize_prompt(prompt=prompt) Args: prompt: The prompt to optimize. - config: The configuration for prompt optimization. Currently, config - is not supported for a single prompt optimization. - + config: The configuration for prompt optimization. Currently, config is + not supported for a single prompt optimization. Returns: The parsed response from the API request. """ if config is not None: raise ValueError( - "Currently, config is not supported for a single prompt" - " optimization." + "Currently, config is not supported for a single prompt optimization." ) prompt = genai_types.Content(parts=[genai_types.Part(text=prompt)], role="user") diff --git a/vertexai/_genai/sandboxes.py b/vertexai/_genai/sandboxes.py new file mode 100644 index 0000000000..f69d2e8441 --- /dev/null +++ b/vertexai/_genai/sandboxes.py @@ -0,0 +1,1233 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Code generated by the Google Gen AI SDK generator DO NOT EDIT. + +import base64 +import functools +import json +import logging +from typing import Any, Iterator, Optional, Union +from urllib.parse import urlencode + +from google.genai import _api_module +from google.genai import _common +from google.genai._common import get_value_by_path as getv +from google.genai._common import set_value_by_path as setv +from google.genai.pagers import Pager + +from . import _agent_engines_utils +from . import types + + +logger = logging.getLogger("vertexai_genai.sandboxes") + + +def _SandboxEnvironmentSpec_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["code_execution_environment"]) is not None: + setv( + to_object, + ["codeExecutionEnvironment"], + getv(from_object, ["code_execution_environment"]), + ) + + return to_object + + +def _CreateAgentEngineSandboxConfig_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + + if getv(from_object, ["display_name"]) is not None: + setv(parent_object, ["displayName"], getv(from_object, ["display_name"])) + + if getv(from_object, ["description"]) is not None: + setv(parent_object, ["description"], getv(from_object, ["description"])) + + return to_object + + +def _CreateAgentEngineSandboxRequestParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + if getv(from_object, ["spec"]) is not None: + setv( + to_object, + ["spec"], + _SandboxEnvironmentSpec_to_vertex(getv(from_object, ["spec"]), to_object), + ) + + if getv(from_object, ["config"]) is not None: + setv( + to_object, + ["config"], + _CreateAgentEngineSandboxConfig_to_vertex( + getv(from_object, ["config"]), to_object + ), + ) + + return to_object + + +def _DeleteAgentEngineSandboxRequestParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + return to_object + + +def _Chunk_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["mime_type"]) is not None: + setv(to_object, ["mimeType"], getv(from_object, ["mime_type"])) + + if getv(from_object, ["data"]) is not None: + setv(to_object, ["data"], getv(from_object, ["data"])) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + return to_object + + +def _ExecuteCodeAgentEngineSandboxRequestParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + if getv(from_object, ["inputs"]) is not None: + setv( + to_object, + ["inputs"], + [ + _Chunk_to_vertex(item, to_object) + for item in getv(from_object, ["inputs"]) + ], + ) + + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + return to_object + + +def _GetAgentEngineSandboxRequestParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + return to_object + + +def _ListAgentEngineSandboxesConfig_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + + if getv(from_object, ["page_size"]) is not None: + setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"])) + + if getv(from_object, ["page_token"]) is not None: + setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"])) + + if getv(from_object, ["filter"]) is not None: + setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"])) + + return to_object + + +def _ListAgentEngineSandboxesRequestParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["_url", "name"], getv(from_object, ["name"])) + + if getv(from_object, ["config"]) is not None: + setv( + to_object, + ["config"], + _ListAgentEngineSandboxesConfig_to_vertex( + getv(from_object, ["config"]), to_object + ), + ) + + return to_object + + +def _GetAgentEngineSandboxOperationParameters_to_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["operation_name"]) is not None: + setv( + to_object, ["_url", "operationName"], getv(from_object, ["operation_name"]) + ) + + if getv(from_object, ["config"]) is not None: + setv(to_object, ["config"], getv(from_object, ["config"])) + + return to_object + + +def _SandboxEnvironment_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["createTime"]) is not None: + setv(to_object, ["create_time"], getv(from_object, ["createTime"])) + + if getv(from_object, ["displayName"]) is not None: + setv(to_object, ["display_name"], getv(from_object, ["displayName"])) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["spec"]) is not None: + setv(to_object, ["spec"], getv(from_object, ["spec"])) + + if getv(from_object, ["state"]) is not None: + setv(to_object, ["state"], getv(from_object, ["state"])) + + if getv(from_object, ["updateTime"]) is not None: + setv(to_object, ["update_time"], getv(from_object, ["updateTime"])) + + return to_object + + +def _AgentEngineSandboxOperation_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + if getv(from_object, ["done"]) is not None: + setv(to_object, ["done"], getv(from_object, ["done"])) + + if getv(from_object, ["error"]) is not None: + setv(to_object, ["error"], getv(from_object, ["error"])) + + if getv(from_object, ["response"]) is not None: + setv( + to_object, + ["response"], + _SandboxEnvironment_from_vertex(getv(from_object, ["response"]), to_object), + ) + + return to_object + + +def _DeleteAgentEngineSandboxOperation_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["name"]) is not None: + setv(to_object, ["name"], getv(from_object, ["name"])) + + if getv(from_object, ["metadata"]) is not None: + setv(to_object, ["metadata"], getv(from_object, ["metadata"])) + + if getv(from_object, ["done"]) is not None: + setv(to_object, ["done"], getv(from_object, ["done"])) + + if getv(from_object, ["error"]) is not None: + setv(to_object, ["error"], getv(from_object, ["error"])) + + return to_object + + +def _ExecuteSandboxEnvironmentResponse_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["outputs"]) is not None: + setv(to_object, ["outputs"], getv(from_object, ["outputs"])) + + return to_object + + +def _ListAgentEngineSandboxesResponse_from_vertex( + from_object: Union[dict[str, Any], object], + parent_object: Optional[dict[str, Any]] = None, +) -> dict[str, Any]: + to_object: dict[str, Any] = {} + if getv(from_object, ["sdkHttpResponse"]) is not None: + setv(to_object, ["sdk_http_response"], getv(from_object, ["sdkHttpResponse"])) + + if getv(from_object, ["nextPageToken"]) is not None: + setv(to_object, ["next_page_token"], getv(from_object, ["nextPageToken"])) + + if getv(from_object, ["sandboxEnvironments"]) is not None: + setv( + to_object, + ["sandbox_environments"], + [ + _SandboxEnvironment_from_vertex(item, to_object) + for item in getv(from_object, ["sandboxEnvironments"]) + ], + ) + + return to_object + + +class Sandboxes(_api_module.BaseModule): + + def _create( + self, + *, + name: str, + spec: Optional[types.SandboxEnvironmentSpecOrDict] = None, + config: Optional[types.CreateAgentEngineSandboxConfigOrDict] = None, + ) -> types.AgentEngineSandboxOperation: + """ + Creates a new sandbox in the Agent Engine. + """ + + parameter_model = types._CreateAgentEngineSandboxRequestParameters( + name=name, + spec=spec, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/sandboxEnvironments".format_map(request_url_dict) + else: + path = "{name}/sandboxEnvironments" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("post", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _AgentEngineSandboxOperation_from_vertex(response_dict) + + return_value = types.AgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _delete( + self, + *, + name: str, + config: Optional[types.DeleteAgentEngineSandboxConfigOrDict] = None, + ) -> types.DeleteAgentEngineSandboxOperation: + """ + Delete an Agent Engine sandbox. + + Args: + name (str): + Required. The name of the Agent Engine sandbox to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sandboxEnvironments/{sandbox}`. + + """ + + parameter_model = types._DeleteAgentEngineSandboxRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _DeleteAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}".format_map(request_url_dict) + else: + path = "{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("delete", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DeleteAgentEngineSandboxOperation_from_vertex( + response_dict + ) + + return_value = types.DeleteAgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _execute_code( + self, + *, + name: str, + inputs: Optional[list[types.ChunkOrDict]] = None, + config: Optional[types.ExecuteCodeAgentEngineSandboxConfigOrDict] = None, + ) -> types.ExecuteSandboxEnvironmentResponse: + """ + Execute code in an Agent Engine sandbox. + """ + + parameter_model = types._ExecuteCodeAgentEngineSandboxRequestParameters( + name=name, + inputs=inputs, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _ExecuteCodeAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/:execute".format_map(request_url_dict) + else: + path = "{name}/:execute" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("post", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _ExecuteSandboxEnvironmentResponse_from_vertex( + response_dict + ) + + return_value = types.ExecuteSandboxEnvironmentResponse._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get( + self, + *, + name: str, + config: Optional[types.GetAgentEngineSandboxConfigOrDict] = None, + ) -> types.SandboxEnvironment: + """ + Gets an agent engine sandbox. + + Args: + name (str): Required. A fully-qualified resource name or ID such as + "projects/123/locations/us-central1/reasoningEngines/456/sandboxEnvironments/789" + or a shortened name such as "reasoningEngines/456/sandboxEnvironments/789". + + """ + + parameter_model = types._GetAgentEngineSandboxRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}".format_map(request_url_dict) + else: + path = "{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _SandboxEnvironment_from_vertex(response_dict) + + return_value = types.SandboxEnvironment._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _list( + self, + *, + name: str, + config: Optional[types.ListAgentEngineSandboxesConfigOrDict] = None, + ) -> types.ListAgentEngineSandboxesResponse: + """ + Lists Agent Engine sandboxes. + + Args: + name (str): Required. The name of the Agent Engine to list sessions for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (ListAgentEngineSandboxesConfig): + Optional. Additional configurations for listing the Agent Engine sandboxes. + + Returns: + ListReasoningEnginesSandboxesResponse: The requested Agent Engine sandboxes. + + """ + + parameter_model = types._ListAgentEngineSandboxesRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _ListAgentEngineSandboxesRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/sandboxEnvironments".format_map(request_url_dict) + else: + path = "{name}/sandboxEnvironments" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _ListAgentEngineSandboxesResponse_from_vertex(response_dict) + + return_value = types.ListAgentEngineSandboxesResponse._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def _get_sandbox_operation( + self, + *, + operation_name: str, + config: Optional[types.GetAgentEngineOperationConfigOrDict] = None, + ) -> types.AgentEngineSandboxOperation: + parameter_model = types._GetAgentEngineSandboxOperationParameters( + operation_name=operation_name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetAgentEngineSandboxOperationParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{operationName}".format_map(request_url_dict) + else: + path = "{operationName}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = self._api_client.request("get", path, request_dict, http_options) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _AgentEngineSandboxOperation_from_vertex(response_dict) + + return_value = types.AgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + def create( + self, + *, + name: str, + spec: Optional[types.SandboxEnvironmentSpecOrDict] = None, + config: Optional[types.CreateAgentEngineSandboxConfigOrDict] = None, + ) -> types.AgentEngineSandboxOperation: + """Creates a new sandbox in the Agent Engine. + + Args: + name (str): + Required. The name of the agent engine to create sandbox for. + projects/{project}/locations/{location}/reasoningEngines/{resource_id} + spec (SandboxEnvironmentSpec): + Optional. The specification for the sandbox to create. + config (CreateAgentEngineSandboxConfigOrDict): + Optional. The configuration for the sandbox. + + Returns: + AgentEngineSandboxOperation: The operation for creating the sandbox. + """ + operation = self._create( + name=name, + spec=spec, + config=config, + ) + if config is None: + config = types.CreateAgentEngineSandboxConfig() + elif isinstance(config, dict): + config = types.CreateAgentEngineSandboxConfig.model_validate(config) + if config.wait_for_completion: + if not operation.done: + operation = _agent_engines_utils._await_operation( + operation_name=operation.name, + get_operation_fn=self._get_sandbox_operation, + poll_interval_seconds=0.1, + ) + # We need to make a call to get the sandbox because the operation + # response might not contain the relevant fields. + if not operation.response: + raise ValueError("Error retrieving sandbox.") + operation.response = self.get(name=operation.response.name) + return operation + + def list( + self, + *, + name: str, + config: Optional[types.ListAgentEngineSandboxesConfigOrDict] = None, + ) -> Iterator[types.SandboxEnvironment]: + """Lists Agent Engine sandboxes. + + Args: + name (str): + Required. The name of the agent engine to list sandboxes for. + projects/{project}/locations/{location}/reasoningEngines/{resource_id}/SandboxEnvironments/{sandbox_id} + config (ListAgentEngineSandboxConfig): + Optional. The configuration for the sandboxes to list. + + Returns: + Iterable[SandboxEnvironment]: An iterable of agent engine sandboxes. + """ + return Pager( + "sandbox_environments", + functools.partial(self._list, name=name), + self._list(name=name, config=config), + config, + ) + + def execute_code( + self, + *, + name: str, + input_data: dict[str, Any], + config: Optional[types.ExecuteCodeAgentEngineSandboxConfigOrDict] = None, + ) -> types.ExecuteSandboxEnvironmentResponse: + """Executes code in the Agent Engine sandbox. + + Args: + name (str): + Required. The name of the agent engine sandbox to run code in. + projects/{project}/locations/{location}/reasoningEngines/{resource_id}/SandboxEnvironments/{sandbox_id} + input_data (dict[str, Any]): + Required. The input to the code to execute. + config (ExecuteCodeAgentEngineSandboxConfigOrDict): + Optional. The configuration for the sandboxes to run code in. + + Returns: + ExecuteSandboxEnvironmentResponse: The response from executing the code. + """ + json_string = json.dumps(input_data) + + base64_bytes = base64.b64encode(json_string.encode("utf-8")) + base64_string = base64_bytes.decode("utf-8") + + # Only single JSON input is supported for now. + inputs = [{"mime_type": "application/json", "data": base64_string}] + + response = self._execute_code( + name=name, + inputs=inputs, + config=config, + ) + + return response + + def get( + self, + *, + name: str, + config: Optional[types.GetAgentEngineSandboxConfigOrDict] = None, + ) -> types.SandboxEnvironment: + """Gets an agent engine sandbox. + Args: + name (str): + Required. A fully-qualified resource name or ID such as + projects/{project}/locations/{location}/reasoningEngines/{resource_id}/SandboxEnvironments/{sandbox_id} + or a shortened name such as "reasoningEngines/{resource_id}/sandboxEnvironments/{sandbox_id}". + config (GetAgentEngineSandboxConfigOrDict): + Optional. The configuration for the sandbox to get. + + """ + return self._get(name=name, config=config) + + def delete( + self, + *, + name: str, + config: Optional[types.DeleteAgentEngineSandboxConfigOrDict] = None, + ) -> types.DeleteAgentEngineSandboxOperation: + """Deletes an agent engine sandbox. + Args: + name (str): + Required. A fully-qualified resource name or ID such as + projects/{project}/locations/{location}/reasoningEngines/{resource_id}/SandboxEnvironments/{sandbox_id} + or a shortened name such as "reasoningEngines/{resource_id}/sandboxEnvironments/{sandbox_id}". + config (DeleteAgentEngineSandboxConfigOrDict): + Optional. The configuration for the sandbox to delete. + """ + return self._delete(name=name, config=config) + + +class AsyncSandboxes(_api_module.BaseModule): + + async def _create( + self, + *, + name: str, + spec: Optional[types.SandboxEnvironmentSpecOrDict] = None, + config: Optional[types.CreateAgentEngineSandboxConfigOrDict] = None, + ) -> types.AgentEngineSandboxOperation: + """ + Creates a new sandbox in the Agent Engine. + """ + + parameter_model = types._CreateAgentEngineSandboxRequestParameters( + name=name, + spec=spec, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _CreateAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/sandboxEnvironments".format_map(request_url_dict) + else: + path = "{name}/sandboxEnvironments" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "post", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _AgentEngineSandboxOperation_from_vertex(response_dict) + + return_value = types.AgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _delete( + self, + *, + name: str, + config: Optional[types.DeleteAgentEngineSandboxConfigOrDict] = None, + ) -> types.DeleteAgentEngineSandboxOperation: + """ + Delete an Agent Engine sandbox. + + Args: + name (str): + Required. The name of the Agent Engine sandbox to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sandboxEnvironments/{sandbox}`. + + """ + + parameter_model = types._DeleteAgentEngineSandboxRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _DeleteAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}".format_map(request_url_dict) + else: + path = "{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "delete", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _DeleteAgentEngineSandboxOperation_from_vertex( + response_dict + ) + + return_value = types.DeleteAgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _execute_code( + self, + *, + name: str, + inputs: Optional[list[types.ChunkOrDict]] = None, + config: Optional[types.ExecuteCodeAgentEngineSandboxConfigOrDict] = None, + ) -> types.ExecuteSandboxEnvironmentResponse: + """ + Execute code in an Agent Engine sandbox. + """ + + parameter_model = types._ExecuteCodeAgentEngineSandboxRequestParameters( + name=name, + inputs=inputs, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _ExecuteCodeAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/:execute".format_map(request_url_dict) + else: + path = "{name}/:execute" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "post", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _ExecuteSandboxEnvironmentResponse_from_vertex( + response_dict + ) + + return_value = types.ExecuteSandboxEnvironmentResponse._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get( + self, + *, + name: str, + config: Optional[types.GetAgentEngineSandboxConfigOrDict] = None, + ) -> types.SandboxEnvironment: + """ + Gets an agent engine sandbox. + + Args: + name (str): Required. A fully-qualified resource name or ID such as + "projects/123/locations/us-central1/reasoningEngines/456/sandboxEnvironments/789" + or a shortened name such as "reasoningEngines/456/sandboxEnvironments/789". + + """ + + parameter_model = types._GetAgentEngineSandboxRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetAgentEngineSandboxRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}".format_map(request_url_dict) + else: + path = "{name}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _SandboxEnvironment_from_vertex(response_dict) + + return_value = types.SandboxEnvironment._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _list( + self, + *, + name: str, + config: Optional[types.ListAgentEngineSandboxesConfigOrDict] = None, + ) -> types.ListAgentEngineSandboxesResponse: + """ + Lists Agent Engine sandboxes. + + Args: + name (str): Required. The name of the Agent Engine to list sessions for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (ListAgentEngineSandboxesConfig): + Optional. Additional configurations for listing the Agent Engine sandboxes. + + Returns: + ListReasoningEnginesSandboxesResponse: The requested Agent Engine sandboxes. + + """ + + parameter_model = types._ListAgentEngineSandboxesRequestParameters( + name=name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _ListAgentEngineSandboxesRequestParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{name}/sandboxEnvironments".format_map(request_url_dict) + else: + path = "{name}/sandboxEnvironments" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _ListAgentEngineSandboxesResponse_from_vertex(response_dict) + + return_value = types.ListAgentEngineSandboxesResponse._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value + + async def _get_sandbox_operation( + self, + *, + operation_name: str, + config: Optional[types.GetAgentEngineOperationConfigOrDict] = None, + ) -> types.AgentEngineSandboxOperation: + parameter_model = types._GetAgentEngineSandboxOperationParameters( + operation_name=operation_name, + config=config, + ) + + request_url_dict: Optional[dict[str, str]] + if not self._api_client.vertexai: + raise ValueError("This method is only supported in the Vertex AI client.") + else: + request_dict = _GetAgentEngineSandboxOperationParameters_to_vertex( + parameter_model + ) + request_url_dict = request_dict.get("_url") + if request_url_dict: + path = "{operationName}".format_map(request_url_dict) + else: + path = "{operationName}" + + query_params = request_dict.get("_query") + if query_params: + path = f"{path}?{urlencode(query_params)}" + # TODO: remove the hack that pops config. + request_dict.pop("config", None) + + http_options: Optional[types.HttpOptions] = None + if ( + parameter_model.config is not None + and parameter_model.config.http_options is not None + ): + http_options = parameter_model.config.http_options + + request_dict = _common.convert_to_dict(request_dict) + request_dict = _common.encode_unserializable_types(request_dict) + + response = await self._api_client.async_request( + "get", path, request_dict, http_options + ) + + response_dict = "" if not response.body else json.loads(response.body) + + if self._api_client.vertexai: + response_dict = _AgentEngineSandboxOperation_from_vertex(response_dict) + + return_value = types.AgentEngineSandboxOperation._from_response( + response=response_dict, kwargs=parameter_model.model_dump() + ) + + self._api_client._verify_response(return_value) + return return_value diff --git a/vertexai/_genai/session_events.py b/vertexai/_genai/session_events.py index 080d818ed1..f982a737fe 100644 --- a/vertexai/_genai/session_events.py +++ b/vertexai/_genai/session_events.py @@ -54,9 +54,7 @@ def _EventActions_to_vertex( if getv(from_object, ["skip_summarization"]) is not None: setv( - to_object, - ["skipSummarization"], - getv(from_object, ["skip_summarization"]), + to_object, ["skipSummarization"], getv(from_object, ["skip_summarization"]) ) if getv(from_object, ["state_delta"]) is not None: @@ -66,11 +64,7 @@ def _EventActions_to_vertex( setv(to_object, ["transferAgent"], getv(from_object, ["transfer_agent"])) if getv(from_object, ["transfer_to_agent"]) is not None: - setv( - to_object, - ["transferToAgent"], - getv(from_object, ["transfer_to_agent"]), - ) + setv(to_object, ["transferToAgent"], getv(from_object, ["transfer_to_agent"])) return to_object @@ -80,22 +74,16 @@ def _EventMetadata_to_vertex( parent_object: Optional[dict[str, Any]] = None, ) -> dict[str, Any]: to_object: dict[str, Any] = {} + if getv(from_object, ["grounding_metadata"]) is not None: + setv( + to_object, ["groundingMetadata"], getv(from_object, ["grounding_metadata"]) + ) + if getv(from_object, ["branch"]) is not None: setv(to_object, ["branch"], getv(from_object, ["branch"])) if getv(from_object, ["custom_metadata"]) is not None: - setv( - to_object, - ["customMetadata"], - getv(from_object, ["custom_metadata"]), - ) - - if getv(from_object, ["grounding_metadata"]) is not None: - setv( - to_object, - ["groundingMetadata"], - getv(from_object, ["grounding_metadata"]), - ) + setv(to_object, ["customMetadata"], getv(from_object, ["custom_metadata"])) if getv(from_object, ["interrupted"]) is not None: setv(to_object, ["interrupted"], getv(from_object, ["interrupted"])) @@ -136,11 +124,7 @@ def _AppendAgentEngineSessionEventConfig_to_vertex( setv(parent_object, ["errorCode"], getv(from_object, ["error_code"])) if getv(from_object, ["error_message"]) is not None: - setv( - parent_object, - ["errorMessage"], - getv(from_object, ["error_message"]), - ) + setv(parent_object, ["errorMessage"], getv(from_object, ["error_message"])) if getv(from_object, ["event_metadata"]) is not None: setv( @@ -188,18 +172,10 @@ def _ListAgentEngineSessionEventsConfig_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["page_size"]) is not None: - setv( - parent_object, - ["_query", "pageSize"], - getv(from_object, ["page_size"]), - ) + setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"])) if getv(from_object, ["page_token"]) is not None: - setv( - parent_object, - ["_query", "pageToken"], - getv(from_object, ["page_token"]), - ) + setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"])) if getv(from_object, ["filter"]) is not None: setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"])) @@ -277,11 +253,7 @@ def _ListAgentEngineSessionEventsResponse_from_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["sdkHttpResponse"]) is not None: - setv( - to_object, - ["sdk_http_response"], - getv(from_object, ["sdkHttpResponse"]), - ) + setv(to_object, ["sdk_http_response"], getv(from_object, ["sdkHttpResponse"])) if getv(from_object, ["nextPageToken"]) is not None: setv(to_object, ["next_page_token"], getv(from_object, ["nextPageToken"])) @@ -300,6 +272,7 @@ def _ListAgentEngineSessionEventsResponse_from_vertex( class SessionEvents(_api_module.BaseModule): + def append( self, *, @@ -309,24 +282,21 @@ def append( timestamp: datetime.datetime, config: Optional[types.AppendAgentEngineSessionEventConfigOrDict] = None, ) -> types.AppendAgentEngineSessionEventResponse: - """Appends Agent Engine session event. + """ + Appends Agent Engine session event. Args: - name (str): Required. The name of the Agent Engine session to append - event for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. - author (str): Required. The author of the Agent Engine session - event. - invocation_id (str): Required. The invocation ID of the Agent Engine - session event. - timestamp (datetime.datetime): Required. The timestamp of the Agent - Engine session event. - config (AppendAgentEngineSessionEventConfig): Optional. Additional - configurations for appending the Agent Engine session event. + name (str): Required. The name of the Agent Engine session to append event for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. + author (str): Required. The author of the Agent Engine session event. + invocation_id (str): Required. The invocation ID of the Agent Engine session event. + timestamp (datetime.datetime): Required. The timestamp of the Agent Engine session event. + config (AppendAgentEngineSessionEventConfig): + Optional. Additional configurations for appending the Agent Engine session event. Returns: - AppendAgentEngineSessionEventResponse: The requested Agent Engine - session event. + AppendAgentEngineSessionEventResponse: The requested Agent Engine session event. + """ parameter_model = types._AppendAgentEngineSessionEventRequestParameters( @@ -388,18 +358,18 @@ def _list( name: str, config: Optional[types.ListAgentEngineSessionEventsConfigOrDict] = None, ) -> types.ListAgentEngineSessionEventsResponse: - """Lists Agent Engine session events. + """ + Lists Agent Engine session events. Args: - name (str): Required. The name of the Agent Engine session to list - events for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. - config (ListAgentEngineSessionEventsConfig): Optional. Additional - configurations for listing the Agent Engine session events. + name (str): Required. The name of the Agent Engine session to list events for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. + config (ListAgentEngineSessionEventsConfig): + Optional. Additional configurations for listing the Agent Engine session events. Returns: - ListAgentEngineSessionEventsResponse: The requested Agent Engine - session events. + ListAgentEngineSessionEventsResponse: The requested Agent Engine session events. + """ parameter_model = types._ListAgentEngineSessionEventsRequestParameters( @@ -462,13 +432,13 @@ def list( Args: name (str): Required. The name of the agent engine to list session - events for. - config (ListAgentEngineSessionEventsConfig): Optional. The - configuration for the session events to list. Currently, the - `filter` field in `config` only supports filtering by `timestamp`. - The timestamp value must be enclosed in double quotes and include - the time zone information. For example: `config={'filter': - 'timestamp>="2025-08-07T19:44:38.4Z"'}`. + events for. + config (ListAgentEngineSessionEventsConfig): Optional. The configuration + for the session events to list. Currently, the `filter` field in + `config` only supports filtering by `timestamp`. The timestamp + value must be enclosed in double quotes and include the time zone + information. For example: + `config={'filter': 'timestamp>="2025-08-07T19:44:38.4Z"'}`. Returns: Iterator[SessionEvent]: An iterable of session events. @@ -483,6 +453,7 @@ def list( class AsyncSessionEvents(_api_module.BaseModule): + async def append( self, *, @@ -492,24 +463,21 @@ async def append( timestamp: datetime.datetime, config: Optional[types.AppendAgentEngineSessionEventConfigOrDict] = None, ) -> types.AppendAgentEngineSessionEventResponse: - """Appends Agent Engine session event. + """ + Appends Agent Engine session event. Args: - name (str): Required. The name of the Agent Engine session to append - event for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. - author (str): Required. The author of the Agent Engine session - event. - invocation_id (str): Required. The invocation ID of the Agent Engine - session event. - timestamp (datetime.datetime): Required. The timestamp of the Agent - Engine session event. - config (AppendAgentEngineSessionEventConfig): Optional. Additional - configurations for appending the Agent Engine session event. + name (str): Required. The name of the Agent Engine session to append event for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. + author (str): Required. The author of the Agent Engine session event. + invocation_id (str): Required. The invocation ID of the Agent Engine session event. + timestamp (datetime.datetime): Required. The timestamp of the Agent Engine session event. + config (AppendAgentEngineSessionEventConfig): + Optional. Additional configurations for appending the Agent Engine session event. Returns: - AppendAgentEngineSessionEventResponse: The requested Agent Engine - session event. + AppendAgentEngineSessionEventResponse: The requested Agent Engine session event. + """ parameter_model = types._AppendAgentEngineSessionEventRequestParameters( @@ -573,18 +541,18 @@ async def _list( name: str, config: Optional[types.ListAgentEngineSessionEventsConfigOrDict] = None, ) -> types.ListAgentEngineSessionEventsResponse: - """Lists Agent Engine session events. + """ + Lists Agent Engine session events. Args: - name (str): Required. The name of the Agent Engine session to list - events for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. - config (ListAgentEngineSessionEventsConfig): Optional. Additional - configurations for listing the Agent Engine session events. + name (str): Required. The name of the Agent Engine session to list events for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}/sessions/{session_id}`. + config (ListAgentEngineSessionEventsConfig): + Optional. Additional configurations for listing the Agent Engine session events. Returns: - ListAgentEngineSessionEventsResponse: The requested Agent Engine - session events. + ListAgentEngineSessionEventsResponse: The requested Agent Engine session events. + """ parameter_model = types._ListAgentEngineSessionEventsRequestParameters( diff --git a/vertexai/_genai/sessions.py b/vertexai/_genai/sessions.py index 2453f226ad..e22d669346 100644 --- a/vertexai/_genai/sessions.py +++ b/vertexai/_genai/sessions.py @@ -45,11 +45,7 @@ def _CreateAgentEngineSessionConfig_to_vertex( setv(parent_object, ["displayName"], getv(from_object, ["display_name"])) if getv(from_object, ["session_state"]) is not None: - setv( - parent_object, - ["sessionState"], - getv(from_object, ["session_state"]), - ) + setv(parent_object, ["sessionState"], getv(from_object, ["session_state"])) if getv(from_object, ["ttl"]) is not None: setv(parent_object, ["ttl"], getv(from_object, ["ttl"])) @@ -118,18 +114,10 @@ def _ListAgentEngineSessionsConfig_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["page_size"]) is not None: - setv( - parent_object, - ["_query", "pageSize"], - getv(from_object, ["page_size"]), - ) + setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"])) if getv(from_object, ["page_token"]) is not None: - setv( - parent_object, - ["_query", "pageToken"], - getv(from_object, ["page_token"]), - ) + setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"])) if getv(from_object, ["filter"]) is not None: setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"])) @@ -164,9 +152,7 @@ def _GetAgentEngineSessionOperationParameters_to_vertex( to_object: dict[str, Any] = {} if getv(from_object, ["operation_name"]) is not None: setv( - to_object, - ["_url", "operationName"], - getv(from_object, ["operation_name"]), + to_object, ["_url", "operationName"], getv(from_object, ["operation_name"]) ) if getv(from_object, ["config"]) is not None: @@ -185,11 +171,7 @@ def _UpdateAgentEngineSessionConfig_to_vertex( setv(parent_object, ["displayName"], getv(from_object, ["display_name"])) if getv(from_object, ["session_state"]) is not None: - setv( - parent_object, - ["sessionState"], - getv(from_object, ["session_state"]), - ) + setv(parent_object, ["sessionState"], getv(from_object, ["session_state"])) if getv(from_object, ["ttl"]) is not None: setv(parent_object, ["ttl"], getv(from_object, ["ttl"])) @@ -199,9 +181,7 @@ def _UpdateAgentEngineSessionConfig_to_vertex( if getv(from_object, ["update_mask"]) is not None: setv( - parent_object, - ["_query", "updateMask"], - getv(from_object, ["update_mask"]), + parent_object, ["_query", "updateMask"], getv(from_object, ["update_mask"]) ) if getv(from_object, ["user_id"]) is not None: @@ -315,11 +295,7 @@ def _ListReasoningEnginesSessionsResponse_from_vertex( ) -> dict[str, Any]: to_object: dict[str, Any] = {} if getv(from_object, ["sdkHttpResponse"]) is not None: - setv( - to_object, - ["sdk_http_response"], - getv(from_object, ["sdkHttpResponse"]), - ) + setv(to_object, ["sdk_http_response"], getv(from_object, ["sdkHttpResponse"])) if getv(from_object, ["nextPageToken"]) is not None: setv(to_object, ["next_page_token"], getv(from_object, ["nextPageToken"])) @@ -338,6 +314,7 @@ def _ListReasoningEnginesSessionsResponse_from_vertex( class Sessions(_api_module.BaseModule): + def _create( self, *, @@ -345,19 +322,19 @@ def _create( user_id: str, config: Optional[types.CreateAgentEngineSessionConfigOrDict] = None, ) -> types.AgentEngineSessionOperation: - """Creates a new session in the Agent Engine. + """ + Creates a new session in the Agent Engine. Args: - name (str): Required. The name of the Agent Engine session to be - created. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + name (str): Required. The name of the Agent Engine session to be created. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. user_id (str): Required. The user ID of the session. - config (CreateAgentEngineSessionConfig): Optional. Additional - configurations for creating the Agent Engine session. + config (CreateAgentEngineSessionConfig): + Optional. Additional configurations for creating the Agent Engine session. Returns: - AgentEngineSessionOperation: The operation for creating the Agent - Engine session. + AgentEngineSessionOperation: The operation for creating the Agent Engine session. + """ parameter_model = types._CreateAgentEngineSessionRequestParameters( @@ -415,18 +392,18 @@ def delete( name: str, config: Optional[types.DeleteAgentEngineSessionConfigOrDict] = None, ) -> types.DeleteAgentEngineSessionOperation: - """Delete an Agent Engine session. + """ + Delete an Agent Engine session. Args: - name (str): Required. The name of the Agent Engine session to be - deleted. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (DeleteAgentEngineSessionConfig): Optional. Additional - configurations for deleting the Agent Engine session. + name (str): Required. The name of the Agent Engine session to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (DeleteAgentEngineSessionConfig): + Optional. Additional configurations for deleting the Agent Engine session. Returns: - DeleteAgentEngineSessionOperation: The operation for deleting the - Agent Engine session. + DeleteAgentEngineSessionOperation: The operation for deleting the Agent Engine session. + """ parameter_model = types._DeleteAgentEngineSessionRequestParameters( @@ -485,17 +462,18 @@ def get( name: str, config: Optional[types.GetAgentEngineSessionConfigOrDict] = None, ) -> types.Session: - """Gets an agent engine session. + """ + Gets an agent engine session. Args: - name (str): Required. The name of the Agent Engine session to get. - Format: + name (str): Required. The name of the Agent Engine session to get. Format: `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (GetAgentEngineSessionConfig): Optional. Additional - configurations for getting the Agent Engine session. + config (GetAgentEngineSessionConfig): + Optional. Additional configurations for getting the Agent Engine session. Returns: AgentEngineSession: The requested Agent Engine session. + """ parameter_model = types._GetAgentEngineSessionRequestParameters( @@ -552,18 +530,18 @@ def _list( name: str, config: Optional[types.ListAgentEngineSessionsConfigOrDict] = None, ) -> types.ListReasoningEnginesSessionsResponse: - """Lists Agent Engine sessions. + """ + Lists Agent Engine sessions. Args: - name (str): Required. The name of the Agent Engine to list sessions - for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (ListAgentEngineSessionsConfig): Optional. Additional - configurations for listing the Agent Engine sessions. + name (str): Required. The name of the Agent Engine to list sessions for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (ListAgentEngineSessionsConfig): + Optional. Additional configurations for listing the Agent Engine sessions. Returns: - ListReasoningEnginesSessionsResponse: The requested Agent Engine - sessions. + ListReasoningEnginesSessionsResponse: The requested Agent Engine sessions. + """ parameter_model = types._ListAgentEngineSessionsRequestParameters( @@ -676,18 +654,18 @@ def _update( name: str, config: Optional[types.UpdateAgentEngineSessionConfigOrDict] = None, ) -> types.AgentEngineSessionOperation: - """Updates an Agent Engine session. + """ + Updates an Agent Engine session. Args: - name (str): Required. The name of the Agent Engine session to be - updated. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (UpdateAgentEngineSessionConfig): Optional. Additional - configurations for updating the Agent Engine session. + name (str): Required. The name of the Agent Engine session to be updated. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (UpdateAgentEngineSessionConfig): + Optional. Additional configurations for updating the Agent Engine session. Returns: - AgentEngineSessionOperation: The operation for updating the Agent - Engine session. + AgentEngineSessionOperation: The operation for updating the Agent Engine session. + """ parameter_model = types._UpdateAgentEngineSessionRequestParameters( @@ -753,9 +731,9 @@ def events(self): self._events = importlib.import_module(".session_events", __package__) except ImportError as e: raise ImportError( - "The 'agent_engines.sessions.events' module" - " requiresadditional packages. Please install them using" - " pip install google-cloud-aiplatform[agent_engines]" + "The 'agent_engines.sessions.events' module requires" + "additional packages. Please install them using pip install " + "google-cloud-aiplatform[agent_engines]" ) from e return self._events.SessionEvents(self._api_client) @@ -769,11 +747,12 @@ def create( """Creates a new session in the Agent Engine. Args: - name (str): Required. The name of the agent engine to create the - session for. - user_id (str): Required. The user ID of the session. - config (CreateAgentEngineSessionConfig): Optional. The configuration - for the session to create. + name (str): + Required. The name of the agent engine to create the session for. + user_id (str): + Required. The user ID of the session. + config (CreateAgentEngineSessionConfig): + Optional. The configuration for the session to create. Returns: AgentEngineSessionOperation: The operation for creating the session. @@ -791,6 +770,7 @@ def create( operation = _agent_engines_utils._await_operation( operation_name=operation.name, get_operation_fn=self._get_session_operation, + poll_interval_seconds=0.5, ) if operation.response: operation.response = self.get(name=operation.response.name) @@ -813,9 +793,9 @@ def list( Args: name (str): Required. The name of the agent engine to list sessions - for. + for. config (ListAgentEngineSessionConfig): Optional. The configuration - for the sessions to list. + for the sessions to list. Returns: Iterable[Session]: An iterable of sessions. @@ -830,6 +810,7 @@ def list( class AsyncSessions(_api_module.BaseModule): + async def _create( self, *, @@ -837,19 +818,19 @@ async def _create( user_id: str, config: Optional[types.CreateAgentEngineSessionConfigOrDict] = None, ) -> types.AgentEngineSessionOperation: - """Creates a new session in the Agent Engine. + """ + Creates a new session in the Agent Engine. Args: - name (str): Required. The name of the Agent Engine session to be - created. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + name (str): Required. The name of the Agent Engine session to be created. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. user_id (str): Required. The user ID of the session. - config (CreateAgentEngineSessionConfig): Optional. Additional - configurations for creating the Agent Engine session. + config (CreateAgentEngineSessionConfig): + Optional. Additional configurations for creating the Agent Engine session. Returns: - AgentEngineSessionOperation: The operation for creating the Agent - Engine session. + AgentEngineSessionOperation: The operation for creating the Agent Engine session. + """ parameter_model = types._CreateAgentEngineSessionRequestParameters( @@ -909,18 +890,18 @@ async def delete( name: str, config: Optional[types.DeleteAgentEngineSessionConfigOrDict] = None, ) -> types.DeleteAgentEngineSessionOperation: - """Delete an Agent Engine session. + """ + Delete an Agent Engine session. Args: - name (str): Required. The name of the Agent Engine session to be - deleted. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (DeleteAgentEngineSessionConfig): Optional. Additional - configurations for deleting the Agent Engine session. + name (str): Required. The name of the Agent Engine session to be deleted. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (DeleteAgentEngineSessionConfig): + Optional. Additional configurations for deleting the Agent Engine session. Returns: - DeleteAgentEngineSessionOperation: The operation for deleting the - Agent Engine session. + DeleteAgentEngineSessionOperation: The operation for deleting the Agent Engine session. + """ parameter_model = types._DeleteAgentEngineSessionRequestParameters( @@ -981,17 +962,18 @@ async def get( name: str, config: Optional[types.GetAgentEngineSessionConfigOrDict] = None, ) -> types.Session: - """Gets an agent engine session. + """ + Gets an agent engine session. Args: - name (str): Required. The name of the Agent Engine session to get. - Format: + name (str): Required. The name of the Agent Engine session to get. Format: `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (GetAgentEngineSessionConfig): Optional. Additional - configurations for getting the Agent Engine session. + config (GetAgentEngineSessionConfig): + Optional. Additional configurations for getting the Agent Engine session. Returns: AgentEngineSession: The requested Agent Engine session. + """ parameter_model = types._GetAgentEngineSessionRequestParameters( @@ -1050,18 +1032,18 @@ async def _list( name: str, config: Optional[types.ListAgentEngineSessionsConfigOrDict] = None, ) -> types.ListReasoningEnginesSessionsResponse: - """Lists Agent Engine sessions. + """ + Lists Agent Engine sessions. Args: - name (str): Required. The name of the Agent Engine to list sessions - for. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (ListAgentEngineSessionsConfig): Optional. Additional - configurations for listing the Agent Engine sessions. + name (str): Required. The name of the Agent Engine to list sessions for. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (ListAgentEngineSessionsConfig): + Optional. Additional configurations for listing the Agent Engine sessions. Returns: - ListReasoningEnginesSessionsResponse: The requested Agent Engine - sessions. + ListReasoningEnginesSessionsResponse: The requested Agent Engine sessions. + """ parameter_model = types._ListAgentEngineSessionsRequestParameters( @@ -1178,18 +1160,18 @@ async def _update( name: str, config: Optional[types.UpdateAgentEngineSessionConfigOrDict] = None, ) -> types.AgentEngineSessionOperation: - """Updates an Agent Engine session. + """ + Updates an Agent Engine session. Args: - name (str): Required. The name of the Agent Engine session to be - updated. Format: - `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. - config (UpdateAgentEngineSessionConfig): Optional. Additional - configurations for updating the Agent Engine session. + name (str): Required. The name of the Agent Engine session to be updated. Format: + `projects/{project}/locations/{location}/reasoningEngines/{resource_id}`. + config (UpdateAgentEngineSessionConfig): + Optional. Additional configurations for updating the Agent Engine session. Returns: - AgentEngineSessionOperation: The operation for updating the Agent - Engine session. + AgentEngineSessionOperation: The operation for updating the Agent Engine session. + """ parameter_model = types._UpdateAgentEngineSessionRequestParameters( diff --git a/vertexai/_genai/types.py b/vertexai/_genai/types.py index f6fb301a2e..c1c32b63ac 100644 --- a/vertexai/_genai/types.py +++ b/vertexai/_genai/types.py @@ -46,8 +46,6 @@ ) from typing_extensions import TypedDict -# mypy: disable-error-code="attr-defined, comparison-overlap, valid-type" - logger = logging.getLogger("vertexai_genai.types") __all__ = ["PrebuiltMetric", "RubricMetric"] # noqa: F822 @@ -62,6 +60,11 @@ def __getattr__(name: str) -> typing.Any: raise AttributeError(f"module '{__name__}' has no attribute '{name}'") +def _camel_to_snake(camel_case_string: str) -> str: + snake_case_string = re.sub(r"(? "LLMMetric": """Loads a metric configuration from a YAML or JSON file. This method allows for the creation of an LLMMetric instance from a - local file path or a Google Cloud Storage (GCS) URI. It will - automatically + local file path or a Google Cloud Storage (GCS) URI. It will automatically detect the file type (.yaml, .yml, or .json) and parse it accordingly. Args: - config_path: The local path or GCS URI (e.g., - 'gs://bucket/metric.yaml') to the metric configuration file. - client: Optional. The Vertex AI client instance to use for - authentication. If not provided, Application Default Credentials - (ADC) will be used. + config_path: The local path or GCS URI (e.g., 'gs://bucket/metric.yaml') + to the metric configuration file. + client: Optional. The Vertex AI client instance to use for authentication. + If not provided, Application Default Credentials (ADC) will be used. Returns: An instance of LLMMetric configured with the loaded data. Raises: - ValueError: If the file path is invalid or the file content cannot - be parsed. - ImportError: If a required library like 'PyYAML' or - 'google-cloud-storage' is not installed. + ValueError: If the file path is invalid or the file content cannot be parsed. + ImportError: If a required library like 'PyYAML' or 'google-cloud-storage' is not installed. IOError: If the file cannot be read from the specified path. """ file_extension = os.path.splitext(config_path)[1].lower() if file_extension not in [".yaml", ".yml", ".json"]: raise ValueError( - "Unsupported file extension for metric config. Must be .yaml," - " .yml, or .json" + "Unsupported file extension for metric config. Must be .yaml, .yml, or .json" ) content_str: str @@ -1556,9 +1452,7 @@ def load(cls, config_path: str, client: Optional[Any] = None) -> "LLMMetric": content_str = blob.download_as_bytes().decode("utf-8") except ImportError as e: raise ImportError( - "Reading from GCS requires the 'google-cloud-storage'" - " library. Please install it with 'pip install" - " google-cloud-aiplatform[evaluation]'." + "Reading from GCS requires the 'google-cloud-storage' library. Please install it with 'pip install google-cloud-aiplatform[evaluation]'." ) from e except Exception as e: raise IOError(f"Failed to read from GCS path {config_path}: {e}") from e @@ -1578,9 +1472,7 @@ def load(cls, config_path: str, client: Optional[Any] = None) -> "LLMMetric": if file_extension in [".yaml", ".yml"]: if yaml is None: raise ImportError( - "YAML parsing requires the pyyaml library. Please install" - " it with 'pip install" - " google-cloud-aiplatform[evaluation]'." + "YAML parsing requires the pyyaml library. Please install it with 'pip install google-cloud-aiplatform[evaluation]'." ) data = yaml.safe_load(content_str) elif file_extension == ".json": @@ -2107,8 +1999,7 @@ class ToolCallValidResults(_common.BaseModel): """Results for tool call valid metric.""" tool_call_valid_metric_values: Optional[list[ToolCallValidMetricValue]] = Field( - default=None, - description="""Output only. Tool call valid metric values.""", + default=None, description="""Output only. Tool call valid metric values.""" ) @@ -2146,8 +2037,7 @@ class ToolNameMatchResults(_common.BaseModel): """Results for tool name match metric.""" tool_name_match_metric_values: Optional[list[ToolNameMatchMetricValue]] = Field( - default=None, - description="""Output only. Tool name match metric values.""", + default=None, description="""Output only. Tool name match metric values.""" ) @@ -2165,8 +2055,7 @@ class ToolParameterKeyMatchMetricValue(_common.BaseModel): """Tool parameter key match metric value for an instance.""" score: Optional[float] = Field( - default=None, - description="""Output only. Tool parameter key match score.""", + default=None, description="""Output only. Tool parameter key match score.""" ) @@ -2267,8 +2156,7 @@ class EvaluateInstancesResponse(_common.BaseModel): default=None, description="""Results for bleu metric.""" ) comet_result: Optional[CometResult] = Field( - default=None, - description="""Translation metrics. Result for Comet metric.""", + default=None, description="""Translation metrics. Result for Comet metric.""" ) exact_match_results: Optional[ExactMatchResults] = Field( default=None, @@ -2281,8 +2169,7 @@ class EvaluateInstancesResponse(_common.BaseModel): default=None, description="""Result for pairwise metric.""" ) pointwise_metric_result: Optional[PointwiseMetricResult] = Field( - default=None, - description="""Generic metrics. Result for pointwise metric.""", + default=None, description="""Generic metrics. Result for pointwise metric.""" ) rouge_results: Optional[RougeResults] = Field( default=None, description="""Results for rouge metric.""" @@ -2295,8 +2182,7 @@ class EvaluateInstancesResponse(_common.BaseModel): default=None, description="""Results for tool name match metric.""" ) tool_parameter_key_match_results: Optional[ToolParameterKeyMatchResults] = Field( - default=None, - description="""Results for tool parameter key match metric.""", + default=None, description="""Results for tool parameter key match metric.""" ) tool_parameter_kv_match_results: Optional[ToolParameterKVMatchResults] = Field( default=None, @@ -2375,7 +2261,7 @@ class PredefinedMetricSpecDict(TypedDict, total=False): class RubricGenerationConfig(_common.BaseModel): """Config for generating rubrics.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -2383,7 +2269,7 @@ class RubricGenerationConfig(_common.BaseModel): class RubricGenerationConfigDict(TypedDict, total=False): """Config for generating rubrics.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -2462,7 +2348,7 @@ class GenerateInstanceRubricsResponseDict(TypedDict, total=False): class OptimizeConfig(_common.BaseModel): """Config for Prompt Optimizer.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -2470,7 +2356,7 @@ class OptimizeConfig(_common.BaseModel): class OptimizeConfigDict(TypedDict, total=False): """Config for Prompt Optimizer.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -2517,32 +2403,8 @@ class OptimizeResponseEndpointDict(TypedDict, total=False): ] -class GcsDestination(_common.BaseModel): - """The Google Cloud Storage location where the output is to be written to.""" - - output_uri_prefix: Optional[str] = Field( - default=None, - description="""Required. Google Cloud Storage URI to output directory. If the uri doesn't end with '/', a '/' will be automatically appended. The directory is created if it doesn't exist.""", - ) - - -class GcsDestinationDict(TypedDict, total=False): - """The Google Cloud Storage location where the output is to be written to.""" - - output_uri_prefix: Optional[str] - """Required. Google Cloud Storage URI to output directory. If the uri doesn't end with '/', a '/' will be automatically appended. The directory is created if it doesn't exist.""" - - -GcsDestinationOrDict = Union[GcsDestination, GcsDestinationDict] - - class DnsPeeringConfig(_common.BaseModel): - """DNS peering configuration. - - These configurations are used to create DNS peering zones in the Vertex - tenant project VPC, enabling resolution of records within the specified - domain hosted in the target network's Cloud DNS. - """ + """DNS peering configuration. These configurations are used to create DNS peering zones in the Vertex tenant project VPC, enabling resolution of records within the specified domain hosted in the target network's Cloud DNS.""" domain: Optional[str] = Field( default=None, @@ -2559,12 +2421,7 @@ class DnsPeeringConfig(_common.BaseModel): class DnsPeeringConfigDict(TypedDict, total=False): - """DNS peering configuration. - - These configurations are used to create DNS peering zones in the Vertex - tenant project VPC, enabling resolution of records within the specified - domain hosted in the target network's Cloud DNS. - """ + """DNS peering configuration. These configurations are used to create DNS peering zones in the Vertex tenant project VPC, enabling resolution of records within the specified domain hosted in the target network's Cloud DNS.""" domain: Optional[str] """Required. The DNS name suffix of the zone being peered to, e.g., "my-internal-domain.corp.". Must end with a dot.""" @@ -2722,8 +2579,7 @@ class DiskSpec(_common.BaseModel): """Represents the spec of disk options.""" boot_disk_size_gb: Optional[int] = Field( - default=None, - description="""Size in GB of the boot disk (default is 100GB).""", + default=None, description="""Size in GB of the boot disk (default is 100GB).""" ) boot_disk_type: Optional[str] = Field( default=None, @@ -2961,7 +2817,7 @@ class WorkerPoolSpecDict(TypedDict, total=False): class CustomJobSpec(_common.BaseModel): """Represents a job that runs custom workloads such as a Docker container or a Python package.""" - base_output_directory: Optional[GcsDestination] = Field( + base_output_directory: Optional[genai_types.GcsDestination] = Field( default=None, description="""The Cloud Storage location to store the output of this CustomJob or HyperparameterTuningJob. For HyperparameterTuningJob, the baseOutputDirectory of each child CustomJob backing a Trial is set to a subdirectory of name id under its parent HyperparameterTuningJob's baseOutputDirectory. The following Vertex AI environment variables will be passed to containers or python modules when this field is set: For CustomJob: * AIP_MODEL_DIR = `/model/` * AIP_CHECKPOINT_DIR = `/checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `/logs/` For CustomJob backing a Trial of HyperparameterTuningJob: * AIP_MODEL_DIR = `//model/` * AIP_CHECKPOINT_DIR = `//checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `//logs/`""", ) @@ -2998,8 +2854,7 @@ class CustomJobSpec(_common.BaseModel): description="""The ID of the location to store protected artifacts. e.g. us-central1. Populate only when the location is different than CustomJob location. List of supported locations: https://cloud.google.com/vertex-ai/docs/general/locations""", ) psc_interface_config: Optional[PscInterfaceConfig] = Field( - default=None, - description="""Optional. Configuration for PSC-I for CustomJob.""", + default=None, description="""Optional. Configuration for PSC-I for CustomJob.""" ) reserved_ip_ranges: Optional[list[str]] = Field( default=None, @@ -3025,7 +2880,7 @@ class CustomJobSpec(_common.BaseModel): class CustomJobSpecDict(TypedDict, total=False): """Represents a job that runs custom workloads such as a Docker container or a Python package.""" - base_output_directory: Optional[GcsDestinationDict] + base_output_directory: Optional[genai_types.GcsDestinationDict] """The Cloud Storage location to store the output of this CustomJob or HyperparameterTuningJob. For HyperparameterTuningJob, the baseOutputDirectory of each child CustomJob backing a Trial is set to a subdirectory of name id under its parent HyperparameterTuningJob's baseOutputDirectory. The following Vertex AI environment variables will be passed to containers or python modules when this field is set: For CustomJob: * AIP_MODEL_DIR = `/model/` * AIP_CHECKPOINT_DIR = `/checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `/logs/` For CustomJob backing a Trial of HyperparameterTuningJob: * AIP_MODEL_DIR = `//model/` * AIP_CHECKPOINT_DIR = `//checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `//logs/`""" enable_dashboard_access: Optional[bool] @@ -3074,70 +2929,6 @@ class CustomJobSpecDict(TypedDict, total=False): CustomJobSpecOrDict = Union[CustomJobSpec, CustomJobSpecDict] -class EncryptionSpec(_common.BaseModel): - """The encryption spec.""" - - kms_key_name: Optional[str] = Field( - default=None, - description="""Required. The Cloud KMS resource identifier of the customer managed encryption key used to protect a resource. Has the form: `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`. The key needs to be in the same region as where the compute resource is created.""", - ) - - -class EncryptionSpecDict(TypedDict, total=False): - """The encryption spec.""" - - kms_key_name: Optional[str] - """Required. The Cloud KMS resource identifier of the customer managed encryption key used to protect a resource. Has the form: `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`. The key needs to be in the same region as where the compute resource is created.""" - - -EncryptionSpecOrDict = Union[EncryptionSpec, EncryptionSpecDict] - - -class GoogleRpcStatus(_common.BaseModel): - """The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. - - It is used by [gRPC](https://github.com/grpc). Each `Status` message - contains three pieces of data: error code, error message, and error details. - You can find out more about this error model and how to work with it in the - [API Design Guide](https://cloud.google.com/apis/design/errors). - """ - - code: Optional[int] = Field( - default=None, - description="""The status code, which should be an enum value of google.rpc.Code.""", - ) - details: Optional[list[dict[str, Any]]] = Field( - default=None, - description="""A list of messages that carry the error details. There is a common set of message types for APIs to use.""", - ) - message: Optional[str] = Field( - default=None, - description="""A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.""", - ) - - -class GoogleRpcStatusDict(TypedDict, total=False): - """The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. - - It is used by [gRPC](https://github.com/grpc). Each `Status` message - contains three pieces of data: error code, error message, and error details. - You can find out more about this error model and how to work with it in the - [API Design Guide](https://cloud.google.com/apis/design/errors). - """ - - code: Optional[int] - """The status code, which should be an enum value of google.rpc.Code.""" - - details: Optional[list[dict[str, Any]]] - """A list of messages that carry the error details. There is a common set of message types for APIs to use.""" - - message: Optional[str] - """A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.""" - - -GoogleRpcStatusOrDict = Union[GoogleRpcStatus, GoogleRpcStatusDict] - - class CustomJob(_common.BaseModel): """Represents a job that runs custom workloads such as a Docker container or a Python package.""" @@ -3148,29 +2939,31 @@ class CustomJob(_common.BaseModel): job_spec: Optional[CustomJobSpec] = Field( default=None, description="""Required. Job spec.""" ) - create_time: Optional[datetime.datetime] = Field( + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( default=None, - description="""Output only. Time when the CustomJob was created.""", + description="""Customer-managed encryption key options for a CustomJob. If this is set, then all resources created by the CustomJob will be encrypted with the provided encryption key.""", + ) + state: Optional[genai_types.JobState] = Field( + default=None, description="""Output only. The detailed state of the job.""" ) - encryption_spec: Optional[EncryptionSpec] = Field( + error: Optional[genai_types.GoogleRpcStatus] = Field( default=None, - description="""Customer-managed encryption key options for a CustomJob. If this is set, then all resources created by the CustomJob will be encrypted with the provided encryption key.""", + description="""Output only. Only populated when job's state is `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.""", ) - end_time: Optional[datetime.datetime] = Field( + create_time: Optional[datetime.datetime] = Field( default=None, - description="""Output only. Time when the CustomJob entered any of the following states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.""", + description="""Output only. Time when the CustomJob was created.""", ) - error: Optional[GoogleRpcStatus] = Field( + end_time: Optional[datetime.datetime] = Field( default=None, - description="""Output only. Only populated when job's state is `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.""", + description="""Output only. Time when the CustomJob entered any of the following states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.""", ) labels: Optional[dict[str, str]] = Field( default=None, description="""The labels with user-defined metadata to organize CustomJobs. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. See https://goo.gl/xmQnxf for more information and examples of labels.""", ) name: Optional[str] = Field( - default=None, - description="""Output only. Resource name of a CustomJob.""", + default=None, description="""Output only. Resource name of a CustomJob.""" ) satisfies_pzi: Optional[bool] = Field( default=None, description="""Output only. Reserved for future use.""" @@ -3182,10 +2975,6 @@ class CustomJob(_common.BaseModel): default=None, description="""Output only. Time when the CustomJob for the first time entered the `JOB_STATE_RUNNING` state.""", ) - state: Optional[JobState] = Field( - default=None, - description="""Output only. The detailed state of the job.""", - ) update_time: Optional[datetime.datetime] = Field( default=None, description="""Output only. Time when the CustomJob was most recently updated.""", @@ -3205,18 +2994,21 @@ class CustomJobDict(TypedDict, total=False): job_spec: Optional[CustomJobSpecDict] """Required. Job spec.""" + encryption_spec: Optional[genai_types.EncryptionSpecDict] + """Customer-managed encryption key options for a CustomJob. If this is set, then all resources created by the CustomJob will be encrypted with the provided encryption key.""" + + state: Optional[genai_types.JobState] + """Output only. The detailed state of the job.""" + + error: Optional[genai_types.GoogleRpcStatusDict] + """Output only. Only populated when job's state is `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.""" + create_time: Optional[datetime.datetime] """Output only. Time when the CustomJob was created.""" - encryption_spec: Optional[EncryptionSpecDict] - """Customer-managed encryption key options for a CustomJob. If this is set, then all resources created by the CustomJob will be encrypted with the provided encryption key.""" - end_time: Optional[datetime.datetime] """Output only. Time when the CustomJob entered any of the following states: `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.""" - error: Optional[GoogleRpcStatusDict] - """Output only. Only populated when job's state is `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.""" - labels: Optional[dict[str, str]] """The labels with user-defined metadata to organize CustomJobs. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. See https://goo.gl/xmQnxf for more information and examples of labels.""" @@ -3232,9 +3024,6 @@ class CustomJobDict(TypedDict, total=False): start_time: Optional[datetime.datetime] """Output only. Time when the CustomJob for the first time entered the `JOB_STATE_RUNNING` state.""" - state: Optional[JobState] - """Output only. The detailed state of the job.""" - update_time: Optional[datetime.datetime] """Output only. Time when the CustomJob was most recently updated.""" @@ -3245,27 +3034,29 @@ class CustomJobDict(TypedDict, total=False): CustomJobOrDict = Union[CustomJob, CustomJobDict] -class BaseConfig(_common.BaseModel): +class VertexBaseConfig(_common.BaseModel): + """Base config for Vertex AI.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) -class BaseConfigDict(TypedDict, total=False): +class VertexBaseConfigDict(TypedDict, total=False): + """Base config for Vertex AI.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" -BaseConfigOrDict = Union[BaseConfig, BaseConfigDict] +VertexBaseConfigOrDict = Union[VertexBaseConfig, VertexBaseConfigDict] class _CustomJobParameters(_common.BaseModel): """Represents a job that runs custom workloads such as a Docker container or a Python package.""" custom_job: Optional[CustomJob] = Field(default=None, description="""""") - config: Optional[BaseConfig] = Field(default=None, description="""""") + config: Optional[VertexBaseConfig] = Field(default=None, description="""""") class _CustomJobParametersDict(TypedDict, total=False): @@ -3274,7 +3065,7 @@ class _CustomJobParametersDict(TypedDict, total=False): custom_job: Optional[CustomJobDict] """""" - config: Optional[BaseConfigDict] + config: Optional[VertexBaseConfigDict] """""" @@ -3285,7 +3076,7 @@ class _GetCustomJobParameters(_common.BaseModel): """Represents a job that runs custom workloads such as a Docker container or a Python package.""" name: Optional[str] = Field(default=None, description="""""") - config: Optional[BaseConfig] = Field(default=None, description="""""") + config: Optional[VertexBaseConfig] = Field(default=None, description="""""") class _GetCustomJobParametersDict(TypedDict, total=False): @@ -3294,7 +3085,7 @@ class _GetCustomJobParametersDict(TypedDict, total=False): name: Optional[str] """""" - config: Optional[BaseConfigDict] + config: Optional[VertexBaseConfigDict] """""" @@ -3621,14 +3412,12 @@ class MemoryBankCustomizationConfigMemoryTopic(_common.BaseModel): custom_memory_topic: Optional[ MemoryBankCustomizationConfigMemoryTopicCustomMemoryTopic ] = Field( - default=None, - description="""A custom memory topic defined by the developer.""", + default=None, description="""A custom memory topic defined by the developer.""" ) managed_memory_topic: Optional[ MemoryBankCustomizationConfigMemoryTopicManagedMemoryTopic ] = Field( - default=None, - description="""A managed memory topic defined by Memory Bank.""", + default=None, description="""A managed memory topic defined by Memory Bank.""" ) @@ -3682,10 +3471,7 @@ class MemoryBankCustomizationConfigGenerateMemoriesExampleConversationSourceEven class MemoryBankCustomizationConfigGenerateMemoriesExampleConversationSource( _common.BaseModel ): - """A conversation source for the example. - - This is similar to `DirectContentsSource`. - """ + """A conversation source for the example. This is similar to `DirectContentsSource`.""" events: Optional[ list[ @@ -3700,10 +3486,7 @@ class MemoryBankCustomizationConfigGenerateMemoriesExampleConversationSource( class MemoryBankCustomizationConfigGenerateMemoriesExampleConversationSourceDict( TypedDict, total=False ): - """A conversation source for the example. - - This is similar to `DirectContentsSource`. - """ + """A conversation source for the example. This is similar to `DirectContentsSource`.""" events: Optional[ list[ @@ -3725,8 +3508,7 @@ class MemoryBankCustomizationConfigGenerateMemoriesExampleGeneratedMemory( """A memory generated by the operation.""" fact: Optional[str] = Field( - default=None, - description="""Required. The fact to generate a memory from.""", + default=None, description="""Required. The fact to generate a memory from.""" ) @@ -3966,7 +3748,7 @@ class ReasoningEngineContextSpecDict(TypedDict, total=False): class CreateAgentEngineConfig(_common.BaseModel): """Config for create agent engine.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) display_name: Optional[str] = Field( @@ -3981,8 +3763,7 @@ class CreateAgentEngineConfig(_common.BaseModel): default=None, description="""The description of the Agent Engine.""" ) spec: Optional[ReasoningEngineSpec] = Field( - default=None, - description="""Optional. Configurations of the Agent Engine.""", + default=None, description="""Optional. Configurations of the Agent Engine.""" ) context_spec: Optional[ReasoningEngineContextSpec] = Field( default=None, @@ -4020,7 +3801,7 @@ class CreateAgentEngineConfig(_common.BaseModel): Recommended value: 2 * cpu + 1. Defaults to 9. """, ) - encryption_spec: Optional[EncryptionSpec] = Field( + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( default=None, description="""The encryption spec to be used for the Agent Engine.""", ) @@ -4029,7 +3810,7 @@ class CreateAgentEngineConfig(_common.BaseModel): class CreateAgentEngineConfigDict(TypedDict, total=False): """Config for create agent engine.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" display_name: Optional[str] @@ -4075,7 +3856,7 @@ class CreateAgentEngineConfigDict(TypedDict, total=False): Recommended value: 2 * cpu + 1. Defaults to 9. """ - encryption_spec: Optional[EncryptionSpecDict] + encryption_spec: Optional[genai_types.EncryptionSpecDict] """The encryption spec to be used for the Agent Engine.""" @@ -4105,6 +3886,10 @@ class _CreateAgentEngineRequestParametersDict(TypedDict, total=False): class ReasoningEngine(_common.BaseModel): """An agent engine.""" + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( + default=None, + description="""Customer-managed encryption key spec for a ReasoningEngine. If set, this ReasoningEngine and all sub-resources of this ReasoningEngine will be secured by this key.""", + ) context_spec: Optional[ReasoningEngineContextSpec] = Field( default=None, description="""Optional. Configuration for how Agent Engine sub-resources should manage context.""", @@ -4121,10 +3906,6 @@ class ReasoningEngine(_common.BaseModel): default=None, description="""Required. The display name of the ReasoningEngine.""", ) - encryption_spec: Optional[EncryptionSpec] = Field( - default=None, - description="""Customer-managed encryption key spec for a ReasoningEngine. If set, this ReasoningEngine and all sub-resources of this ReasoningEngine will be secured by this key.""", - ) etag: Optional[str] = Field( default=None, description="""Optional. Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""", @@ -4134,8 +3915,7 @@ class ReasoningEngine(_common.BaseModel): description="""Identifier. The resource name of the ReasoningEngine. Format: `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`""", ) spec: Optional[ReasoningEngineSpec] = Field( - default=None, - description="""Optional. Configurations of the ReasoningEngine""", + default=None, description="""Optional. Configurations of the ReasoningEngine""" ) update_time: Optional[datetime.datetime] = Field( default=None, @@ -4146,6 +3926,9 @@ class ReasoningEngine(_common.BaseModel): class ReasoningEngineDict(TypedDict, total=False): """An agent engine.""" + encryption_spec: Optional[genai_types.EncryptionSpecDict] + """Customer-managed encryption key spec for a ReasoningEngine. If set, this ReasoningEngine and all sub-resources of this ReasoningEngine will be secured by this key.""" + context_spec: Optional[ReasoningEngineContextSpecDict] """Optional. Configuration for how Agent Engine sub-resources should manage context.""" @@ -4158,9 +3941,6 @@ class ReasoningEngineDict(TypedDict, total=False): display_name: Optional[str] """Required. The display name of the ReasoningEngine.""" - encryption_spec: Optional[EncryptionSpecDict] - """Customer-managed encryption key spec for a ReasoningEngine. If set, this ReasoningEngine and all sub-resources of this ReasoningEngine will be secured by this key.""" - etag: Optional[str] """Optional. Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""" @@ -4226,7 +4006,7 @@ class AgentEngineOperationDict(TypedDict, total=False): class DeleteAgentEngineConfig(_common.BaseModel): """Config for deleting agent engine.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -4234,7 +4014,7 @@ class DeleteAgentEngineConfig(_common.BaseModel): class DeleteAgentEngineConfigDict(TypedDict, total=False): """Config for deleting agent engine.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -4319,7 +4099,7 @@ class DeleteAgentEngineOperationDict(TypedDict, total=False): class GetAgentEngineConfig(_common.BaseModel): """Config for create agent engine.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -4327,7 +4107,7 @@ class GetAgentEngineConfig(_common.BaseModel): class GetAgentEngineConfigDict(TypedDict, total=False): """Config for create agent engine.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -4361,7 +4141,7 @@ class _GetAgentEngineRequestParametersDict(TypedDict, total=False): class ListAgentEngineConfig(_common.BaseModel): """Config for listing agent engines.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) page_size: Optional[int] = Field(default=None, description="""""") @@ -4376,7 +4156,7 @@ class ListAgentEngineConfig(_common.BaseModel): class ListAgentEngineConfigDict(TypedDict, total=False): """Config for listing agent engines.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" page_size: Optional[int] @@ -4411,36 +4191,10 @@ class _ListAgentEngineRequestParametersDict(TypedDict, total=False): ] -class HttpResponse(_common.BaseModel): - """A wrapper class for the http response.""" - - headers: Optional[dict[str, str]] = Field( - default=None, - description="""Used to retain the processed HTTP headers in the response.""", - ) - body: Optional[str] = Field( - default=None, - description="""The raw HTTP response body, in JSON format.""", - ) - - -class HttpResponseDict(TypedDict, total=False): - """A wrapper class for the http response.""" - - headers: Optional[dict[str, str]] - """Used to retain the processed HTTP headers in the response.""" - - body: Optional[str] - """The raw HTTP response body, in JSON format.""" - - -HttpResponseOrDict = Union[HttpResponse, HttpResponseDict] - - class ListReasoningEnginesResponse(_common.BaseModel): """Response for listing agent engines.""" - sdk_http_response: Optional[HttpResponse] = Field( + sdk_http_response: Optional[genai_types.HttpResponse] = Field( default=None, description="""Used to retain the full HTTP response.""" ) next_page_token: Optional[str] = Field(default=None, description="""""") @@ -4454,7 +4208,7 @@ class ListReasoningEnginesResponse(_common.BaseModel): class ListReasoningEnginesResponseDict(TypedDict, total=False): """Response for listing agent engines.""" - sdk_http_response: Optional[HttpResponseDict] + sdk_http_response: Optional[genai_types.HttpResponseDict] """Used to retain the full HTTP response.""" next_page_token: Optional[str] @@ -4472,14 +4226,14 @@ class ListReasoningEnginesResponseDict(TypedDict, total=False): class GetAgentEngineOperationConfig(_common.BaseModel): - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) class GetAgentEngineOperationConfigDict(TypedDict, total=False): - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -4492,12 +4246,10 @@ class _GetAgentEngineOperationParameters(_common.BaseModel): """Parameters for getting an operation with an agent engine as a response.""" operation_name: Optional[str] = Field( - default=None, - description="""The server-assigned name for the operation.""", + default=None, description="""The server-assigned name for the operation.""" ) config: Optional[GetAgentEngineOperationConfig] = Field( - default=None, - description="""Used to override the default configuration.""", + default=None, description="""Used to override the default configuration.""" ) @@ -4519,7 +4271,7 @@ class _GetAgentEngineOperationParametersDict(TypedDict, total=False): class QueryAgentEngineConfig(_common.BaseModel): """Config for querying agent engines.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) class_method: Optional[str] = Field( @@ -4534,7 +4286,7 @@ class QueryAgentEngineConfig(_common.BaseModel): class QueryAgentEngineConfigDict(TypedDict, total=False): """Config for querying agent engines.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" class_method: Optional[str] @@ -4598,7 +4350,7 @@ class QueryReasoningEngineResponseDict(TypedDict, total=False): class UpdateAgentEngineConfig(_common.BaseModel): """Config for updating agent engine.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) display_name: Optional[str] = Field( @@ -4613,8 +4365,7 @@ class UpdateAgentEngineConfig(_common.BaseModel): default=None, description="""The description of the Agent Engine.""" ) spec: Optional[ReasoningEngineSpec] = Field( - default=None, - description="""Optional. Configurations of the Agent Engine.""", + default=None, description="""Optional. Configurations of the Agent Engine.""" ) context_spec: Optional[ReasoningEngineContextSpec] = Field( default=None, @@ -4652,7 +4403,7 @@ class UpdateAgentEngineConfig(_common.BaseModel): Recommended value: 2 * cpu + 1. Defaults to 9. """, ) - encryption_spec: Optional[EncryptionSpec] = Field( + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( default=None, description="""The encryption spec to be used for the Agent Engine.""", ) @@ -4666,7 +4417,7 @@ class UpdateAgentEngineConfig(_common.BaseModel): class UpdateAgentEngineConfigDict(TypedDict, total=False): """Config for updating agent engine.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" display_name: Optional[str] @@ -4712,7 +4463,7 @@ class UpdateAgentEngineConfigDict(TypedDict, total=False): Recommended value: 2 * cpu + 1. Defaults to 9. """ - encryption_spec: Optional[EncryptionSpecDict] + encryption_spec: Optional[genai_types.EncryptionSpecDict] """The encryption spec to be used for the Agent Engine.""" update_mask: Optional[str] @@ -4752,7 +4503,7 @@ class _UpdateAgentEngineRequestParametersDict(TypedDict, total=False): class AgentEngineMemoryConfig(_common.BaseModel): """Config for creating a Memory.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) display_name: Optional[str] = Field( @@ -4780,7 +4531,7 @@ class AgentEngineMemoryConfig(_common.BaseModel): class AgentEngineMemoryConfigDict(TypedDict, total=False): """Config for creating a Memory.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" display_name: Optional[str] @@ -4982,7 +4733,7 @@ class AgentEngineMemoryOperationDict(TypedDict, total=False): class DeleteAgentEngineMemoryConfig(_common.BaseModel): """Config for deleting an Agent Engine Memory.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -4990,7 +4741,7 @@ class DeleteAgentEngineMemoryConfig(_common.BaseModel): class DeleteAgentEngineMemoryConfigDict(TypedDict, total=False): """Config for deleting an Agent Engine Memory.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -5003,8 +4754,7 @@ class _DeleteAgentEngineMemoryRequestParameters(_common.BaseModel): """Parameters for deleting agent engines.""" name: Optional[str] = Field( - default=None, - description="""Name of the agent engine memory to delete.""", + default=None, description="""Name of the agent engine memory to delete.""" ) config: Optional[DeleteAgentEngineMemoryConfig] = Field( default=None, description="""""" @@ -5200,7 +4950,7 @@ class GenerateMemoriesRequestDirectMemoriesSourceDict(TypedDict, total=False): class GenerateAgentEngineMemoriesConfig(_common.BaseModel): """Config for generating memories.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) disable_consolidation: Optional[bool] = Field( @@ -5221,7 +4971,7 @@ class GenerateAgentEngineMemoriesConfig(_common.BaseModel): class GenerateAgentEngineMemoriesConfigDict(TypedDict, total=False): """Config for generating memories.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" disable_consolidation: Optional[bool] @@ -5252,17 +5002,17 @@ class _GenerateAgentEngineMemoriesRequestParameters(_common.BaseModel): default=None, description="""The vertex session source of the memories that should be generated.""", ) - direct_contents_source: Optional[ - GenerateMemoriesRequestDirectContentsSource - ] = Field( - default=None, - description="""The direct contents source of the memories that should be generated.""", + direct_contents_source: Optional[GenerateMemoriesRequestDirectContentsSource] = ( + Field( + default=None, + description="""The direct contents source of the memories that should be generated.""", + ) ) - direct_memories_source: Optional[ - GenerateMemoriesRequestDirectMemoriesSource - ] = Field( - default=None, - description="""The direct memories source of the memories that should be generated.""", + direct_memories_source: Optional[GenerateMemoriesRequestDirectMemoriesSource] = ( + Field( + default=None, + description="""The direct memories source of the memories that should be generated.""", + ) ) scope: Optional[dict[str, str]] = Field( default=None, @@ -5333,8 +5083,7 @@ class GenerateMemoriesResponseGeneratedMemoryDict(TypedDict, total=False): GenerateMemoriesResponseGeneratedMemoryOrDict = Union[ - GenerateMemoriesResponseGeneratedMemory, - GenerateMemoriesResponseGeneratedMemoryDict, + GenerateMemoriesResponseGeneratedMemory, GenerateMemoriesResponseGeneratedMemoryDict ] @@ -5402,15 +5151,14 @@ class AgentEngineGenerateMemoriesOperationDict(TypedDict, total=False): AgentEngineGenerateMemoriesOperationOrDict = Union[ - AgentEngineGenerateMemoriesOperation, - AgentEngineGenerateMemoriesOperationDict, + AgentEngineGenerateMemoriesOperation, AgentEngineGenerateMemoriesOperationDict ] class GetAgentEngineMemoryConfig(_common.BaseModel): """Config for getting an Agent Engine Memory.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -5418,7 +5166,7 @@ class GetAgentEngineMemoryConfig(_common.BaseModel): class GetAgentEngineMemoryConfigDict(TypedDict, total=False): """Config for getting an Agent Engine Memory.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -5449,15 +5197,14 @@ class _GetAgentEngineMemoryRequestParametersDict(TypedDict, total=False): _GetAgentEngineMemoryRequestParametersOrDict = Union[ - _GetAgentEngineMemoryRequestParameters, - _GetAgentEngineMemoryRequestParametersDict, + _GetAgentEngineMemoryRequestParameters, _GetAgentEngineMemoryRequestParametersDict ] class ListAgentEngineMemoryConfig(_common.BaseModel): """Config for listing agent engine memories.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) page_size: Optional[int] = Field(default=None, description="""""") @@ -5467,12 +5214,23 @@ class ListAgentEngineMemoryConfig(_common.BaseModel): description="""An expression for filtering the results of the request. For field names both snake_case and camelCase are supported.""", ) + order_by: Optional[str] = Field( + default=None, + description="""The standard list order by string. If not specified, the default + order is `create_time desc`. If specified, the default sorting order of + provided fields is ascending. More detail in + [AIP-132](https://google.aip.dev/132). + + Supported fields: + * `create_time` + * `update_time`""", + ) class ListAgentEngineMemoryConfigDict(TypedDict, total=False): """Config for listing agent engine memories.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" page_size: Optional[int] @@ -5485,6 +5243,16 @@ class ListAgentEngineMemoryConfigDict(TypedDict, total=False): """An expression for filtering the results of the request. For field names both snake_case and camelCase are supported.""" + order_by: Optional[str] + """The standard list order by string. If not specified, the default + order is `create_time desc`. If specified, the default sorting order of + provided fields is ascending. More detail in + [AIP-132](https://google.aip.dev/132). + + Supported fields: + * `create_time` + * `update_time`""" + ListAgentEngineMemoryConfigOrDict = Union[ ListAgentEngineMemoryConfig, ListAgentEngineMemoryConfigDict @@ -5513,15 +5281,14 @@ class _ListAgentEngineMemoryRequestParametersDict(TypedDict, total=False): _ListAgentEngineMemoryRequestParametersOrDict = Union[ - _ListAgentEngineMemoryRequestParameters, - _ListAgentEngineMemoryRequestParametersDict, + _ListAgentEngineMemoryRequestParameters, _ListAgentEngineMemoryRequestParametersDict ] class ListReasoningEnginesMemoriesResponse(_common.BaseModel): """Response for listing agent engine memories.""" - sdk_http_response: Optional[HttpResponse] = Field( + sdk_http_response: Optional[genai_types.HttpResponse] = Field( default=None, description="""Used to retain the full HTTP response.""" ) next_page_token: Optional[str] = Field(default=None, description="""""") @@ -5533,7 +5300,7 @@ class ListReasoningEnginesMemoriesResponse(_common.BaseModel): class ListReasoningEnginesMemoriesResponseDict(TypedDict, total=False): """Response for listing agent engine memories.""" - sdk_http_response: Optional[HttpResponseDict] + sdk_http_response: Optional[genai_types.HttpResponseDict] """Used to retain the full HTTP response.""" next_page_token: Optional[str] @@ -5544,8 +5311,7 @@ class ListReasoningEnginesMemoriesResponseDict(TypedDict, total=False): ListReasoningEnginesMemoriesResponseOrDict = Union[ - ListReasoningEnginesMemoriesResponse, - ListReasoningEnginesMemoriesResponseDict, + ListReasoningEnginesMemoriesResponse, ListReasoningEnginesMemoriesResponseDict ] @@ -5553,12 +5319,10 @@ class _GetAgentEngineMemoryOperationParameters(_common.BaseModel): """Parameters for getting an operation with a memory as a response.""" operation_name: Optional[str] = Field( - default=None, - description="""The server-assigned name for the operation.""", + default=None, description="""The server-assigned name for the operation.""" ) config: Optional[GetAgentEngineOperationConfig] = Field( - default=None, - description="""Used to override the default configuration.""", + default=None, description="""Used to override the default configuration.""" ) @@ -5582,12 +5346,10 @@ class _GetAgentEngineGenerateMemoriesOperationParameters(_common.BaseModel): """Parameters for getting an operation with generated memories as a response.""" operation_name: Optional[str] = Field( - default=None, - description="""The server-assigned name for the operation.""", + default=None, description="""The server-assigned name for the operation.""" ) config: Optional[GetAgentEngineOperationConfig] = Field( - default=None, - description="""Used to override the default configuration.""", + default=None, description="""Used to override the default configuration.""" ) @@ -5668,7 +5430,7 @@ class RetrieveMemoriesRequestSimpleRetrievalParamsDict(TypedDict, total=False): class RetrieveAgentEngineMemoriesConfig(_common.BaseModel): """Config for retrieving memories.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -5676,7 +5438,7 @@ class RetrieveAgentEngineMemoriesConfig(_common.BaseModel): class RetrieveAgentEngineMemoriesConfigDict(TypedDict, total=False): """Config for retrieving memories.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -5706,11 +5468,11 @@ class _RetrieveAgentEngineMemoriesRequestParameters(_common.BaseModel): default=None, description="""Parameters for semantic similarity search based retrieval.""", ) - simple_retrieval_params: Optional[ - RetrieveMemoriesRequestSimpleRetrievalParams - ] = Field( - default=None, - description="""Parameters for simple (non-similarity search) retrieval.""", + simple_retrieval_params: Optional[RetrieveMemoriesRequestSimpleRetrievalParams] = ( + Field( + default=None, + description="""Parameters for simple (non-similarity search) retrieval.""", + ) ) config: Optional[RetrieveAgentEngineMemoriesConfig] = Field( default=None, description="""""" @@ -5771,8 +5533,7 @@ class RetrieveMemoriesResponseRetrievedMemoryDict(TypedDict, total=False): RetrieveMemoriesResponseRetrievedMemoryOrDict = Union[ - RetrieveMemoriesResponseRetrievedMemory, - RetrieveMemoriesResponseRetrievedMemoryDict, + RetrieveMemoriesResponseRetrievedMemory, RetrieveMemoriesResponseRetrievedMemoryDict ] @@ -5806,7 +5567,7 @@ class RetrieveMemoriesResponseDict(TypedDict, total=False): class UpdateAgentEngineMemoryConfig(_common.BaseModel): """Config for updating agent engine memory.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) display_name: Optional[str] = Field( @@ -5839,7 +5600,7 @@ class UpdateAgentEngineMemoryConfig(_common.BaseModel): class UpdateAgentEngineMemoryConfigDict(TypedDict, total=False): """Config for updating agent engine memory.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" display_name: Optional[str] @@ -5873,8 +5634,7 @@ class _UpdateAgentEngineMemoryRequestParameters(_common.BaseModel): """Parameters for updating agent engine memories.""" name: Optional[str] = Field( - default=None, - description="""Name of the agent engine memory to update.""", + default=None, description="""Name of the agent engine memory to update.""" ) fact: Optional[str] = Field( default=None, @@ -5923,168 +5683,204 @@ class _UpdateAgentEngineMemoryRequestParametersDict(TypedDict, total=False): ] -class CreateAgentEngineSessionConfig(_common.BaseModel): - """Config for creating a Session.""" +class SandboxEnvironmentSpecCodeExecutionEnvironment(_common.BaseModel): + """The code execution environment with customized settings.""" - http_options: Optional[HttpOptions] = Field( - default=None, description="""Used to override HTTP request options.""" - ) - display_name: Optional[str] = Field( - default=None, description="""The display name of the session.""" - ) - session_state: Optional[dict[str, Any]] = Field( + code_language: Optional[Language] = Field( default=None, - description="""Session state which stores key conversation points.""", + description="""The coding language supported in this environment.""", ) - wait_for_completion: Optional[bool] = Field( - default=True, - description="""Waits for the operation to complete before returning.""", + dependencies: Optional[list[str]] = Field( + default=None, + description="""Optional. The additional dependencies to install in the code execution environment. For example, "pandas==2.2.3".""", ) - ttl: Optional[str] = Field( + env: Optional[list[EnvVar]] = Field( default=None, - description="""Optional. Input only. The TTL for this resource. - - The expiration time is computed: now + TTL.""", + description="""Optional. The environment variables to set in the code execution environment.""", ) - expire_time: Optional[datetime.datetime] = Field( + machine_config: Optional[MachineConfig] = Field( default=None, - description="""Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""", + description="""The machine config of the code execution environment.""", ) -class CreateAgentEngineSessionConfigDict(TypedDict, total=False): - """Config for creating a Session.""" +class SandboxEnvironmentSpecCodeExecutionEnvironmentDict(TypedDict, total=False): + """The code execution environment with customized settings.""" - http_options: Optional[HttpOptionsDict] - """Used to override HTTP request options.""" + code_language: Optional[Language] + """The coding language supported in this environment.""" - display_name: Optional[str] - """The display name of the session.""" + dependencies: Optional[list[str]] + """Optional. The additional dependencies to install in the code execution environment. For example, "pandas==2.2.3".""" - session_state: Optional[dict[str, Any]] - """Session state which stores key conversation points.""" + env: Optional[list[EnvVarDict]] + """Optional. The environment variables to set in the code execution environment.""" - wait_for_completion: Optional[bool] - """Waits for the operation to complete before returning.""" + machine_config: Optional[MachineConfig] + """The machine config of the code execution environment.""" - ttl: Optional[str] - """Optional. Input only. The TTL for this resource. - The expiration time is computed: now + TTL.""" +SandboxEnvironmentSpecCodeExecutionEnvironmentOrDict = Union[ + SandboxEnvironmentSpecCodeExecutionEnvironment, + SandboxEnvironmentSpecCodeExecutionEnvironmentDict, +] - expire_time: Optional[datetime.datetime] - """Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""" +class SandboxEnvironmentSpec(_common.BaseModel): + """The specification of a sandbox environment.""" -CreateAgentEngineSessionConfigOrDict = Union[ - CreateAgentEngineSessionConfig, CreateAgentEngineSessionConfigDict -] + code_execution_environment: Optional[ + SandboxEnvironmentSpecCodeExecutionEnvironment + ] = Field(default=None, description="""Optional. The code execution environment.""") -class _CreateAgentEngineSessionRequestParameters(_common.BaseModel): - """Parameters for creating Agent Engine Sessions.""" +class SandboxEnvironmentSpecDict(TypedDict, total=False): + """The specification of a sandbox environment.""" - name: Optional[str] = Field( - default=None, - description="""Name of the agent engine to create the session under.""", + code_execution_environment: Optional[ + SandboxEnvironmentSpecCodeExecutionEnvironmentDict + ] + """Optional. The code execution environment.""" + + +SandboxEnvironmentSpecOrDict = Union[SandboxEnvironmentSpec, SandboxEnvironmentSpecDict] + + +class CreateAgentEngineSandboxConfig(_common.BaseModel): + """Config for creating a Sandbox.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" ) - user_id: Optional[str] = Field( - default=None, description="""The user ID of the session.""" + display_name: Optional[str] = Field( + default=None, description="""The display name of the sandbox.""" ) - config: Optional[CreateAgentEngineSessionConfig] = Field( + description: Optional[str] = Field( + default=None, description="""The description of the sandbox.""" + ) + wait_for_completion: Optional[bool] = Field( + default=True, + description="""Waits for the operation to complete before returning.""", + ) + + +class CreateAgentEngineSandboxConfigDict(TypedDict, total=False): + """Config for creating a Sandbox.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + display_name: Optional[str] + """The display name of the sandbox.""" + + description: Optional[str] + """The description of the sandbox.""" + + wait_for_completion: Optional[bool] + """Waits for the operation to complete before returning.""" + + +CreateAgentEngineSandboxConfigOrDict = Union[ + CreateAgentEngineSandboxConfig, CreateAgentEngineSandboxConfigDict +] + + +class _CreateAgentEngineSandboxRequestParameters(_common.BaseModel): + """Parameters for creating Agent Engine Sandboxes.""" + + name: Optional[str] = Field( + default=None, + description="""Name of the agent engine to create the sandbox under.""", + ) + spec: Optional[SandboxEnvironmentSpec] = Field( + default=None, description="""The specification of the sandbox.""" + ) + config: Optional[CreateAgentEngineSandboxConfig] = Field( default=None, description="""""" ) -class _CreateAgentEngineSessionRequestParametersDict(TypedDict, total=False): - """Parameters for creating Agent Engine Sessions.""" +class _CreateAgentEngineSandboxRequestParametersDict(TypedDict, total=False): + """Parameters for creating Agent Engine Sandboxes.""" name: Optional[str] - """Name of the agent engine to create the session under.""" + """Name of the agent engine to create the sandbox under.""" - user_id: Optional[str] - """The user ID of the session.""" + spec: Optional[SandboxEnvironmentSpecDict] + """The specification of the sandbox.""" - config: Optional[CreateAgentEngineSessionConfigDict] + config: Optional[CreateAgentEngineSandboxConfigDict] """""" -_CreateAgentEngineSessionRequestParametersOrDict = Union[ - _CreateAgentEngineSessionRequestParameters, - _CreateAgentEngineSessionRequestParametersDict, +_CreateAgentEngineSandboxRequestParametersOrDict = Union[ + _CreateAgentEngineSandboxRequestParameters, + _CreateAgentEngineSandboxRequestParametersDict, ] -class Session(_common.BaseModel): - """A session.""" +class SandboxEnvironment(_common.BaseModel): + """A sandbox environment.""" create_time: Optional[datetime.datetime] = Field( default=None, - description="""Output only. Timestamp when the session was created.""", + description="""Output only. The timestamp when this SandboxEnvironment was created.""", ) display_name: Optional[str] = Field( default=None, - description="""Optional. The display name of the session.""", + description="""Required. The display name of the SandboxEnvironment.""", ) - expire_time: Optional[datetime.datetime] = Field( + metadata: Optional[Any] = Field( default=None, - description="""Optional. Timestamp of when this session is considered expired. This is *always* provided on output, regardless of what was sent on input.""", + description="""Output only. Additional information about the SandboxEnvironment.""", ) name: Optional[str] = Field( - default=None, - description="""Identifier. The resource name of the session. Format: 'projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}'.""", + default=None, description="""Identifier. The name of the SandboxEnvironment.""" ) - session_state: Optional[dict[str, Any]] = Field( + spec: Optional[SandboxEnvironmentSpec] = Field( default=None, - description="""Optional. Session specific memory which stores key conversation points.""", + description="""Optional. The configuration of the SandboxEnvironment.""", ) - ttl: Optional[str] = Field( + state: Optional[State] = Field( default=None, - description="""Optional. Input only. The TTL for this session.""", + description="""Output only. The runtime state of the SandboxEnvironment.""", ) update_time: Optional[datetime.datetime] = Field( default=None, - description="""Output only. Timestamp when the session was updated.""", - ) - user_id: Optional[str] = Field( - default=None, - description="""Required. Immutable. String id provided by the user""", + description="""Output only. The timestamp when this SandboxEnvironment was most recently updated.""", ) -class SessionDict(TypedDict, total=False): - """A session.""" +class SandboxEnvironmentDict(TypedDict, total=False): + """A sandbox environment.""" create_time: Optional[datetime.datetime] - """Output only. Timestamp when the session was created.""" + """Output only. The timestamp when this SandboxEnvironment was created.""" display_name: Optional[str] - """Optional. The display name of the session.""" + """Required. The display name of the SandboxEnvironment.""" - expire_time: Optional[datetime.datetime] - """Optional. Timestamp of when this session is considered expired. This is *always* provided on output, regardless of what was sent on input.""" + metadata: Optional[Any] + """Output only. Additional information about the SandboxEnvironment.""" name: Optional[str] - """Identifier. The resource name of the session. Format: 'projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}'.""" + """Identifier. The name of the SandboxEnvironment.""" - session_state: Optional[dict[str, Any]] - """Optional. Session specific memory which stores key conversation points.""" + spec: Optional[SandboxEnvironmentSpecDict] + """Optional. The configuration of the SandboxEnvironment.""" - ttl: Optional[str] - """Optional. Input only. The TTL for this session.""" + state: Optional[State] + """Output only. The runtime state of the SandboxEnvironment.""" update_time: Optional[datetime.datetime] - """Output only. Timestamp when the session was updated.""" - - user_id: Optional[str] - """Required. Immutable. String id provided by the user""" + """Output only. The timestamp when this SandboxEnvironment was most recently updated.""" -SessionOrDict = Union[Session, SessionDict] +SandboxEnvironmentOrDict = Union[SandboxEnvironment, SandboxEnvironmentDict] -class AgentEngineSessionOperation(_common.BaseModel): - """Operation that has an agent engine session as a response.""" +class AgentEngineSandboxOperation(_common.BaseModel): + """Operation that has an agent engine sandbox as a response.""" name: Optional[str] = Field( default=None, @@ -6102,13 +5898,13 @@ class AgentEngineSessionOperation(_common.BaseModel): default=None, description="""The error result of the operation in case of failure or cancellation.""", ) - response: Optional[Session] = Field( - default=None, description="""The Agent Engine Session.""" + response: Optional[SandboxEnvironment] = Field( + default=None, description="""The Agent Engine Sandbox.""" ) -class AgentEngineSessionOperationDict(TypedDict, total=False): - """Operation that has an agent engine session as a response.""" +class AgentEngineSandboxOperationDict(TypedDict, total=False): + """Operation that has an agent engine sandbox as a response.""" name: Optional[str] """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""" @@ -6122,65 +5918,64 @@ class AgentEngineSessionOperationDict(TypedDict, total=False): error: Optional[dict[str, Any]] """The error result of the operation in case of failure or cancellation.""" - response: Optional[SessionDict] - """The Agent Engine Session.""" + response: Optional[SandboxEnvironmentDict] + """The Agent Engine Sandbox.""" -AgentEngineSessionOperationOrDict = Union[ - AgentEngineSessionOperation, AgentEngineSessionOperationDict +AgentEngineSandboxOperationOrDict = Union[ + AgentEngineSandboxOperation, AgentEngineSandboxOperationDict ] -class DeleteAgentEngineSessionConfig(_common.BaseModel): - """Config for deleting an Agent Engine Session.""" +class DeleteAgentEngineSandboxConfig(_common.BaseModel): + """Config for deleting an Agent Engine Sandbox.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) -class DeleteAgentEngineSessionConfigDict(TypedDict, total=False): - """Config for deleting an Agent Engine Session.""" +class DeleteAgentEngineSandboxConfigDict(TypedDict, total=False): + """Config for deleting an Agent Engine Sandbox.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" -DeleteAgentEngineSessionConfigOrDict = Union[ - DeleteAgentEngineSessionConfig, DeleteAgentEngineSessionConfigDict +DeleteAgentEngineSandboxConfigOrDict = Union[ + DeleteAgentEngineSandboxConfig, DeleteAgentEngineSandboxConfigDict ] -class _DeleteAgentEngineSessionRequestParameters(_common.BaseModel): - """Parameters for deleting agent engine sessions.""" +class _DeleteAgentEngineSandboxRequestParameters(_common.BaseModel): + """Parameters for deleting agent engines.""" name: Optional[str] = Field( - default=None, - description="""Name of the agent engine session to delete.""", + default=None, description="""Name of the agent engine sandbox to delete.""" ) - config: Optional[DeleteAgentEngineSessionConfig] = Field( + config: Optional[DeleteAgentEngineSandboxConfig] = Field( default=None, description="""""" ) -class _DeleteAgentEngineSessionRequestParametersDict(TypedDict, total=False): - """Parameters for deleting agent engine sessions.""" +class _DeleteAgentEngineSandboxRequestParametersDict(TypedDict, total=False): + """Parameters for deleting agent engines.""" name: Optional[str] - """Name of the agent engine session to delete.""" + """Name of the agent engine sandbox to delete.""" - config: Optional[DeleteAgentEngineSessionConfigDict] + config: Optional[DeleteAgentEngineSandboxConfigDict] """""" -_DeleteAgentEngineSessionRequestParametersOrDict = Union[ - _DeleteAgentEngineSessionRequestParameters, - _DeleteAgentEngineSessionRequestParametersDict, +_DeleteAgentEngineSandboxRequestParametersOrDict = Union[ + _DeleteAgentEngineSandboxRequestParameters, + _DeleteAgentEngineSandboxRequestParametersDict, ] -class DeleteAgentEngineSessionOperation(_common.BaseModel): - """Operation for deleting agent engine sessions.""" +class DeleteAgentEngineSandboxOperation(_common.BaseModel): + """Operation for deleting agent engines.""" name: Optional[str] = Field( default=None, @@ -6200,8 +5995,8 @@ class DeleteAgentEngineSessionOperation(_common.BaseModel): ) -class DeleteAgentEngineSessionOperationDict(TypedDict, total=False): - """Operation for deleting agent engine sessions.""" +class DeleteAgentEngineSandboxOperationDict(TypedDict, total=False): + """Operation for deleting agent engines.""" name: Optional[str] """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""" @@ -6216,62 +6011,186 @@ class DeleteAgentEngineSessionOperationDict(TypedDict, total=False): """The error result of the operation in case of failure or cancellation.""" -DeleteAgentEngineSessionOperationOrDict = Union[ - DeleteAgentEngineSessionOperation, DeleteAgentEngineSessionOperationDict +DeleteAgentEngineSandboxOperationOrDict = Union[ + DeleteAgentEngineSandboxOperation, DeleteAgentEngineSandboxOperationDict ] -class GetAgentEngineSessionConfig(_common.BaseModel): - """Config for getting an Agent Engine Session.""" +class Metadata(_common.BaseModel): + """Metadata for a chunk.""" + + attributes: Optional[dict[str, bytes]] = Field( + default=None, + description="""Optional. Attributes attached to the data. The keys have semantic conventions and the consumers of the attributes should know how to deserialize the value bytes based on the keys.""", + ) + + +class MetadataDict(TypedDict, total=False): + """Metadata for a chunk.""" + + attributes: Optional[dict[str, bytes]] + """Optional. Attributes attached to the data. The keys have semantic conventions and the consumers of the attributes should know how to deserialize the value bytes based on the keys.""" + + +MetadataOrDict = Union[Metadata, MetadataDict] + + +class Chunk(_common.BaseModel): + """A chunk of data.""" - http_options: Optional[HttpOptions] = Field( + mime_type: Optional[str] = Field( + default=None, + description="""Required. Mime type of the chunk data. See https://www.iana.org/assignments/media-types/media-types.xhtml for the full list.""", + ) + data: Optional[bytes] = Field( + default=None, description="""Required. The data in the chunk.""" + ) + metadata: Optional[Metadata] = Field( + default=None, + description="""Optional. Metadata that is associated with the data in the payload.""", + ) + + +class ChunkDict(TypedDict, total=False): + """A chunk of data.""" + + mime_type: Optional[str] + """Required. Mime type of the chunk data. See https://www.iana.org/assignments/media-types/media-types.xhtml for the full list.""" + + data: Optional[bytes] + """Required. The data in the chunk.""" + + metadata: Optional[MetadataDict] + """Optional. Metadata that is associated with the data in the payload.""" + + +ChunkOrDict = Union[Chunk, ChunkDict] + + +class ExecuteCodeAgentEngineSandboxConfig(_common.BaseModel): + """Config for executing code in an Agent Engine sandbox.""" + + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) -class GetAgentEngineSessionConfigDict(TypedDict, total=False): - """Config for getting an Agent Engine Session.""" +class ExecuteCodeAgentEngineSandboxConfigDict(TypedDict, total=False): + """Config for executing code in an Agent Engine sandbox.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" -GetAgentEngineSessionConfigOrDict = Union[ - GetAgentEngineSessionConfig, GetAgentEngineSessionConfigDict +ExecuteCodeAgentEngineSandboxConfigOrDict = Union[ + ExecuteCodeAgentEngineSandboxConfig, ExecuteCodeAgentEngineSandboxConfigDict ] -class _GetAgentEngineSessionRequestParameters(_common.BaseModel): - """Parameters for getting an agent engine session.""" +class _ExecuteCodeAgentEngineSandboxRequestParameters(_common.BaseModel): + """Parameters for executing code in an agent engine sandbox.""" name: Optional[str] = Field( - default=None, description="""Name of the agent engine.""" + default=None, + description="""Name of the agent engine sandbox to execute code in.""", ) - config: Optional[GetAgentEngineSessionConfig] = Field( + inputs: Optional[list[Chunk]] = Field( + default=None, description="""Inputs to the code execution.""" + ) + config: Optional[ExecuteCodeAgentEngineSandboxConfig] = Field( default=None, description="""""" ) -class _GetAgentEngineSessionRequestParametersDict(TypedDict, total=False): - """Parameters for getting an agent engine session.""" +class _ExecuteCodeAgentEngineSandboxRequestParametersDict(TypedDict, total=False): + """Parameters for executing code in an agent engine sandbox.""" name: Optional[str] - """Name of the agent engine.""" + """Name of the agent engine sandbox to execute code in.""" - config: Optional[GetAgentEngineSessionConfigDict] + inputs: Optional[list[ChunkDict]] + """Inputs to the code execution.""" + + config: Optional[ExecuteCodeAgentEngineSandboxConfigDict] """""" -_GetAgentEngineSessionRequestParametersOrDict = Union[ - _GetAgentEngineSessionRequestParameters, - _GetAgentEngineSessionRequestParametersDict, +_ExecuteCodeAgentEngineSandboxRequestParametersOrDict = Union[ + _ExecuteCodeAgentEngineSandboxRequestParameters, + _ExecuteCodeAgentEngineSandboxRequestParametersDict, ] -class ListAgentEngineSessionsConfig(_common.BaseModel): - """Config for listing agent engine sessions.""" +class ExecuteSandboxEnvironmentResponse(_common.BaseModel): + """The response for executing a sandbox environment.""" + + outputs: Optional[list[Chunk]] = Field( + default=None, description="""The outputs from the sandbox environment.""" + ) + + +class ExecuteSandboxEnvironmentResponseDict(TypedDict, total=False): + """The response for executing a sandbox environment.""" + + outputs: Optional[list[ChunkDict]] + """The outputs from the sandbox environment.""" + + +ExecuteSandboxEnvironmentResponseOrDict = Union[ + ExecuteSandboxEnvironmentResponse, ExecuteSandboxEnvironmentResponseDict +] + + +class GetAgentEngineSandboxConfig(_common.BaseModel): + """Config for getting an Agent Engine Memory.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + + +class GetAgentEngineSandboxConfigDict(TypedDict, total=False): + """Config for getting an Agent Engine Memory.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + +GetAgentEngineSandboxConfigOrDict = Union[ + GetAgentEngineSandboxConfig, GetAgentEngineSandboxConfigDict +] + + +class _GetAgentEngineSandboxRequestParameters(_common.BaseModel): + """Parameters for getting an agent engine sandbox.""" + + name: Optional[str] = Field( + default=None, description="""Name of the agent engine sandbox.""" + ) + config: Optional[GetAgentEngineSandboxConfig] = Field( + default=None, description="""""" + ) + + +class _GetAgentEngineSandboxRequestParametersDict(TypedDict, total=False): + """Parameters for getting an agent engine sandbox.""" - http_options: Optional[HttpOptions] = Field( + name: Optional[str] + """Name of the agent engine sandbox.""" + + config: Optional[GetAgentEngineSandboxConfigDict] + """""" + + +_GetAgentEngineSandboxRequestParametersOrDict = Union[ + _GetAgentEngineSandboxRequestParameters, _GetAgentEngineSandboxRequestParametersDict +] + + +class ListAgentEngineSandboxesConfig(_common.BaseModel): + """Config for listing agent engine sandboxes.""" + + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) page_size: Optional[int] = Field(default=None, description="""""") @@ -6283,10 +6202,10 @@ class ListAgentEngineSessionsConfig(_common.BaseModel): ) -class ListAgentEngineSessionsConfigDict(TypedDict, total=False): - """Config for listing agent engine sessions.""" +class ListAgentEngineSandboxesConfigDict(TypedDict, total=False): + """Config for listing agent engine sandboxes.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" page_size: Optional[int] @@ -6300,84 +6219,81 @@ class ListAgentEngineSessionsConfigDict(TypedDict, total=False): For field names both snake_case and camelCase are supported.""" -ListAgentEngineSessionsConfigOrDict = Union[ - ListAgentEngineSessionsConfig, ListAgentEngineSessionsConfigDict +ListAgentEngineSandboxesConfigOrDict = Union[ + ListAgentEngineSandboxesConfig, ListAgentEngineSandboxesConfigDict ] -class _ListAgentEngineSessionsRequestParameters(_common.BaseModel): - """Parameters for listing agent engines.""" +class _ListAgentEngineSandboxesRequestParameters(_common.BaseModel): + """Parameters for listing agent engine sandboxes.""" name: Optional[str] = Field( default=None, description="""Name of the agent engine.""" ) - config: Optional[ListAgentEngineSessionsConfig] = Field( + config: Optional[ListAgentEngineSandboxesConfig] = Field( default=None, description="""""" ) -class _ListAgentEngineSessionsRequestParametersDict(TypedDict, total=False): - """Parameters for listing agent engines.""" +class _ListAgentEngineSandboxesRequestParametersDict(TypedDict, total=False): + """Parameters for listing agent engine sandboxes.""" name: Optional[str] """Name of the agent engine.""" - config: Optional[ListAgentEngineSessionsConfigDict] + config: Optional[ListAgentEngineSandboxesConfigDict] """""" -_ListAgentEngineSessionsRequestParametersOrDict = Union[ - _ListAgentEngineSessionsRequestParameters, - _ListAgentEngineSessionsRequestParametersDict, +_ListAgentEngineSandboxesRequestParametersOrDict = Union[ + _ListAgentEngineSandboxesRequestParameters, + _ListAgentEngineSandboxesRequestParametersDict, ] -class ListReasoningEnginesSessionsResponse(_common.BaseModel): - """Response for listing agent engine sessions.""" +class ListAgentEngineSandboxesResponse(_common.BaseModel): + """Response for listing agent engine sandboxes.""" - sdk_http_response: Optional[HttpResponse] = Field( + sdk_http_response: Optional[genai_types.HttpResponse] = Field( default=None, description="""Used to retain the full HTTP response.""" ) next_page_token: Optional[str] = Field(default=None, description="""""") - sessions: Optional[list[Session]] = Field( - default=None, description="""List of agent engine sessions.""" + sandbox_environments: Optional[list[SandboxEnvironment]] = Field( + default=None, description="""List of agent engine sandboxes.""" ) -class ListReasoningEnginesSessionsResponseDict(TypedDict, total=False): - """Response for listing agent engine sessions.""" +class ListAgentEngineSandboxesResponseDict(TypedDict, total=False): + """Response for listing agent engine sandboxes.""" - sdk_http_response: Optional[HttpResponseDict] + sdk_http_response: Optional[genai_types.HttpResponseDict] """Used to retain the full HTTP response.""" next_page_token: Optional[str] """""" - sessions: Optional[list[SessionDict]] - """List of agent engine sessions.""" + sandbox_environments: Optional[list[SandboxEnvironmentDict]] + """List of agent engine sandboxes.""" -ListReasoningEnginesSessionsResponseOrDict = Union[ - ListReasoningEnginesSessionsResponse, - ListReasoningEnginesSessionsResponseDict, +ListAgentEngineSandboxesResponseOrDict = Union[ + ListAgentEngineSandboxesResponse, ListAgentEngineSandboxesResponseDict ] -class _GetAgentEngineSessionOperationParameters(_common.BaseModel): - """Parameters for getting an operation with a session as a response.""" +class _GetAgentEngineSandboxOperationParameters(_common.BaseModel): + """Parameters for getting an operation with a sandbox as a response.""" operation_name: Optional[str] = Field( - default=None, - description="""The server-assigned name for the operation.""", + default=None, description="""The server-assigned name for the operation.""" ) config: Optional[GetAgentEngineOperationConfig] = Field( - default=None, - description="""Used to override the default configuration.""", + default=None, description="""Used to override the default configuration.""" ) -class _GetAgentEngineSessionOperationParametersDict(TypedDict, total=False): - """Parameters for getting an operation with a session as a response.""" +class _GetAgentEngineSandboxOperationParametersDict(TypedDict, total=False): + """Parameters for getting an operation with a sandbox as a response.""" operation_name: Optional[str] """The server-assigned name for the operation.""" @@ -6386,16 +6302,16 @@ class _GetAgentEngineSessionOperationParametersDict(TypedDict, total=False): """Used to override the default configuration.""" -_GetAgentEngineSessionOperationParametersOrDict = Union[ - _GetAgentEngineSessionOperationParameters, - _GetAgentEngineSessionOperationParametersDict, +_GetAgentEngineSandboxOperationParametersOrDict = Union[ + _GetAgentEngineSandboxOperationParameters, + _GetAgentEngineSandboxOperationParametersDict, ] -class UpdateAgentEngineSessionConfig(_common.BaseModel): - """Config for updating agent engine session.""" +class CreateAgentEngineSessionConfig(_common.BaseModel): + """Config for creating a Session.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) display_name: Optional[str] = Field( @@ -6419,21 +6335,12 @@ class UpdateAgentEngineSessionConfig(_common.BaseModel): default=None, description="""Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""", ) - update_mask: Optional[str] = Field( - default=None, - description="""The update mask to apply. For the `FieldMask` definition, see - https://protobuf.dev/reference/protobuf/google.protobuf/#field-mask.""", - ) - user_id: Optional[str] = Field( - default=None, - description="""User ID of the agent engine session to update.""", - ) -class UpdateAgentEngineSessionConfigDict(TypedDict, total=False): - """Config for updating agent engine session.""" +class CreateAgentEngineSessionConfigDict(TypedDict, total=False): + """Config for creating a Session.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" display_name: Optional[str] @@ -6453,605 +6360,584 @@ class UpdateAgentEngineSessionConfigDict(TypedDict, total=False): expire_time: Optional[datetime.datetime] """Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""" - update_mask: Optional[str] - """The update mask to apply. For the `FieldMask` definition, see - https://protobuf.dev/reference/protobuf/google.protobuf/#field-mask.""" - user_id: Optional[str] - """User ID of the agent engine session to update.""" +CreateAgentEngineSessionConfigOrDict = Union[ + CreateAgentEngineSessionConfig, CreateAgentEngineSessionConfigDict +] -UpdateAgentEngineSessionConfigOrDict = Union[ - UpdateAgentEngineSessionConfig, UpdateAgentEngineSessionConfigDict -] - - -class _UpdateAgentEngineSessionRequestParameters(_common.BaseModel): - """Parameters for updating agent engine sessions.""" +class _CreateAgentEngineSessionRequestParameters(_common.BaseModel): + """Parameters for creating Agent Engine Sessions.""" name: Optional[str] = Field( default=None, - description="""Name of the agent engine session to update.""", + description="""Name of the agent engine to create the session under.""", ) - config: Optional[UpdateAgentEngineSessionConfig] = Field( + user_id: Optional[str] = Field( + default=None, description="""The user ID of the session.""" + ) + config: Optional[CreateAgentEngineSessionConfig] = Field( default=None, description="""""" ) -class _UpdateAgentEngineSessionRequestParametersDict(TypedDict, total=False): - """Parameters for updating agent engine sessions.""" +class _CreateAgentEngineSessionRequestParametersDict(TypedDict, total=False): + """Parameters for creating Agent Engine Sessions.""" name: Optional[str] - """Name of the agent engine session to update.""" + """Name of the agent engine to create the session under.""" - config: Optional[UpdateAgentEngineSessionConfigDict] + user_id: Optional[str] + """The user ID of the session.""" + + config: Optional[CreateAgentEngineSessionConfigDict] """""" -_UpdateAgentEngineSessionRequestParametersOrDict = Union[ - _UpdateAgentEngineSessionRequestParameters, - _UpdateAgentEngineSessionRequestParametersDict, +_CreateAgentEngineSessionRequestParametersOrDict = Union[ + _CreateAgentEngineSessionRequestParameters, + _CreateAgentEngineSessionRequestParametersDict, ] -class EventActions(_common.BaseModel): - """Actions are parts of events that are executed by the agent.""" +class Session(_common.BaseModel): + """A session.""" - artifact_delta: Optional[dict[str, int]] = Field( + create_time: Optional[datetime.datetime] = Field( default=None, - description="""Optional. Indicates that the event is updating an artifact. key is the filename, value is the version.""", + description="""Output only. Timestamp when the session was created.""", ) - escalate: Optional[bool] = Field( - default=None, - description="""Optional. The agent is escalating to a higher level agent.""", + display_name: Optional[str] = Field( + default=None, description="""Optional. The display name of the session.""" ) - requested_auth_configs: Optional[dict[str, Any]] = Field( + expire_time: Optional[datetime.datetime] = Field( default=None, - description="""Optional. Will only be set by a tool response indicating tool request euc. Struct key is the function call id since one function call response (from model) could correspond to multiple function calls. Struct value is the required auth config, which can be another struct.""", + description="""Optional. Timestamp of when this session is considered expired. This is *always* provided on output, regardless of what was sent on input.""", ) - skip_summarization: Optional[bool] = Field( + name: Optional[str] = Field( default=None, - description="""Optional. If true, it won't call model to summarize function response. Only used for function_response event.""", + description="""Identifier. The resource name of the session. Format: 'projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}'.""", ) - state_delta: Optional[dict[str, Any]] = Field( + session_state: Optional[dict[str, Any]] = Field( default=None, - description="""Optional. Indicates that the event is updating the state with the given delta.""", + description="""Optional. Session specific memory which stores key conversation points.""", ) - transfer_agent: Optional[str] = Field( + ttl: Optional[str] = Field( + default=None, description="""Optional. Input only. The TTL for this session.""" + ) + update_time: Optional[datetime.datetime] = Field( default=None, - description="""Optional. If set, the event transfers to the specified agent.""", + description="""Output only. Timestamp when the session was updated.""", ) - transfer_to_agent: Optional[bool] = Field( + user_id: Optional[str] = Field( default=None, - description="""Deprecated. If set, the event transfers to the specified agent.""", + description="""Required. Immutable. String id provided by the user""", ) -class EventActionsDict(TypedDict, total=False): - """Actions are parts of events that are executed by the agent.""" +class SessionDict(TypedDict, total=False): + """A session.""" - artifact_delta: Optional[dict[str, int]] - """Optional. Indicates that the event is updating an artifact. key is the filename, value is the version.""" + create_time: Optional[datetime.datetime] + """Output only. Timestamp when the session was created.""" - escalate: Optional[bool] - """Optional. The agent is escalating to a higher level agent.""" + display_name: Optional[str] + """Optional. The display name of the session.""" - requested_auth_configs: Optional[dict[str, Any]] - """Optional. Will only be set by a tool response indicating tool request euc. Struct key is the function call id since one function call response (from model) could correspond to multiple function calls. Struct value is the required auth config, which can be another struct.""" + expire_time: Optional[datetime.datetime] + """Optional. Timestamp of when this session is considered expired. This is *always* provided on output, regardless of what was sent on input.""" - skip_summarization: Optional[bool] - """Optional. If true, it won't call model to summarize function response. Only used for function_response event.""" + name: Optional[str] + """Identifier. The resource name of the session. Format: 'projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}'.""" - state_delta: Optional[dict[str, Any]] - """Optional. Indicates that the event is updating the state with the given delta.""" + session_state: Optional[dict[str, Any]] + """Optional. Session specific memory which stores key conversation points.""" - transfer_agent: Optional[str] - """Optional. If set, the event transfers to the specified agent.""" + ttl: Optional[str] + """Optional. Input only. The TTL for this session.""" - transfer_to_agent: Optional[bool] - """Deprecated. If set, the event transfers to the specified agent.""" + update_time: Optional[datetime.datetime] + """Output only. Timestamp when the session was updated.""" + user_id: Optional[str] + """Required. Immutable. String id provided by the user""" -EventActionsOrDict = Union[EventActions, EventActionsDict] +SessionOrDict = Union[Session, SessionDict] -class GroundingChunkMapsPlaceAnswerSourcesAuthorAttribution(_common.BaseModel): - """Author attribution for a photo or review.""" - display_name: Optional[str] = Field( +class AgentEngineSessionOperation(_common.BaseModel): + """Operation that has an agent engine session as a response.""" + + name: Optional[str] = Field( + default=None, + description="""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""", + ) + metadata: Optional[dict[str, Any]] = Field( default=None, - description="""Name of the author of the Photo or Review.""", + description="""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""", ) - photo_uri: Optional[str] = Field( + done: Optional[bool] = Field( default=None, - description="""Profile photo URI of the author of the Photo or Review.""", + description="""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""", ) - uri: Optional[str] = Field( + error: Optional[dict[str, Any]] = Field( default=None, - description="""URI of the author of the Photo or Review.""", + description="""The error result of the operation in case of failure or cancellation.""", + ) + response: Optional[Session] = Field( + default=None, description="""The Agent Engine Session.""" ) -class GroundingChunkMapsPlaceAnswerSourcesAuthorAttributionDict(TypedDict, total=False): - """Author attribution for a photo or review.""" - - display_name: Optional[str] - """Name of the author of the Photo or Review.""" - - photo_uri: Optional[str] - """Profile photo URI of the author of the Photo or Review.""" +class AgentEngineSessionOperationDict(TypedDict, total=False): + """Operation that has an agent engine session as a response.""" - uri: Optional[str] - """URI of the author of the Photo or Review.""" + name: Optional[str] + """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""" + metadata: Optional[dict[str, Any]] + """Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""" -GroundingChunkMapsPlaceAnswerSourcesAuthorAttributionOrDict = Union[ - GroundingChunkMapsPlaceAnswerSourcesAuthorAttribution, - GroundingChunkMapsPlaceAnswerSourcesAuthorAttributionDict, -] + done: Optional[bool] + """If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""" + error: Optional[dict[str, Any]] + """The error result of the operation in case of failure or cancellation.""" -class GroundingChunkMapsPlaceAnswerSourcesReviewSnippet(_common.BaseModel): - """Encapsulates a review snippet.""" + response: Optional[SessionDict] + """The Agent Engine Session.""" - author_attribution: Optional[ - GroundingChunkMapsPlaceAnswerSourcesAuthorAttribution - ] = Field(default=None, description="""This review's author.""") - flag_content_uri: Optional[str] = Field( - default=None, - description="""A link where users can flag a problem with the review.""", - ) - google_maps_uri: Optional[str] = Field( - default=None, - description="""A link to show the review on Google Maps.""", - ) - relative_publish_time_description: Optional[str] = Field( - default=None, - description="""A string of formatted recent time, expressing the review time relative to the current time in a form appropriate for the language and country.""", - ) - review: Optional[str] = Field( - default=None, - description="""A reference representing this place review which may be used to look up this place review again.""", - ) +AgentEngineSessionOperationOrDict = Union[ + AgentEngineSessionOperation, AgentEngineSessionOperationDict +] -class GroundingChunkMapsPlaceAnswerSourcesReviewSnippetDict(TypedDict, total=False): - """Encapsulates a review snippet.""" - author_attribution: Optional[ - GroundingChunkMapsPlaceAnswerSourcesAuthorAttributionDict - ] - """This review's author.""" +class DeleteAgentEngineSessionConfig(_common.BaseModel): + """Config for deleting an Agent Engine Session.""" - flag_content_uri: Optional[str] - """A link where users can flag a problem with the review.""" + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) - google_maps_uri: Optional[str] - """A link to show the review on Google Maps.""" - relative_publish_time_description: Optional[str] - """A string of formatted recent time, expressing the review time relative to the current time in a form appropriate for the language and country.""" +class DeleteAgentEngineSessionConfigDict(TypedDict, total=False): + """Config for deleting an Agent Engine Session.""" - review: Optional[str] - """A reference representing this place review which may be used to look up this place review again.""" + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" -GroundingChunkMapsPlaceAnswerSourcesReviewSnippetOrDict = Union[ - GroundingChunkMapsPlaceAnswerSourcesReviewSnippet, - GroundingChunkMapsPlaceAnswerSourcesReviewSnippetDict, +DeleteAgentEngineSessionConfigOrDict = Union[ + DeleteAgentEngineSessionConfig, DeleteAgentEngineSessionConfigDict ] -class GroundingChunkMapsPlaceAnswerSources(_common.BaseModel): - """Sources used to generate the place answer.""" +class _DeleteAgentEngineSessionRequestParameters(_common.BaseModel): + """Parameters for deleting agent engine sessions.""" - flag_content_uri: Optional[str] = Field( - default=None, - description="""A link where users can flag a problem with the generated answer.""", + name: Optional[str] = Field( + default=None, description="""Name of the agent engine session to delete.""" ) - review_snippets: Optional[ - list[GroundingChunkMapsPlaceAnswerSourcesReviewSnippet] - ] = Field( - default=None, - description="""Snippets of reviews that are used to generate the answer.""", + config: Optional[DeleteAgentEngineSessionConfig] = Field( + default=None, description="""""" ) -class GroundingChunkMapsPlaceAnswerSourcesDict(TypedDict, total=False): - """Sources used to generate the place answer.""" +class _DeleteAgentEngineSessionRequestParametersDict(TypedDict, total=False): + """Parameters for deleting agent engine sessions.""" - flag_content_uri: Optional[str] - """A link where users can flag a problem with the generated answer.""" + name: Optional[str] + """Name of the agent engine session to delete.""" - review_snippets: Optional[ - list[GroundingChunkMapsPlaceAnswerSourcesReviewSnippetDict] - ] - """Snippets of reviews that are used to generate the answer.""" + config: Optional[DeleteAgentEngineSessionConfigDict] + """""" -GroundingChunkMapsPlaceAnswerSourcesOrDict = Union[ - GroundingChunkMapsPlaceAnswerSources, - GroundingChunkMapsPlaceAnswerSourcesDict, +_DeleteAgentEngineSessionRequestParametersOrDict = Union[ + _DeleteAgentEngineSessionRequestParameters, + _DeleteAgentEngineSessionRequestParametersDict, ] -class GroundingChunkMaps(_common.BaseModel): - """Chunk from Google Maps.""" +class DeleteAgentEngineSessionOperation(_common.BaseModel): + """Operation for deleting agent engine sessions.""" - place_answer_sources: Optional[GroundingChunkMapsPlaceAnswerSources] = Field( + name: Optional[str] = Field( default=None, - description="""Sources used to generate the place answer. This includes review snippets and photos that were used to generate the answer, as well as uris to flag content.""", + description="""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""", ) - place_id: Optional[str] = Field( + metadata: Optional[dict[str, Any]] = Field( default=None, - description="""This Place's resource name, in `places/{place_id}` format. Can be used to look up the Place.""", + description="""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""", ) - text: Optional[str] = Field(default=None, description="""Text of the chunk.""") - title: Optional[str] = Field(default=None, description="""Title of the chunk.""") - uri: Optional[str] = Field( - default=None, description="""URI reference of the chunk.""" + done: Optional[bool] = Field( + default=None, + description="""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""", + ) + error: Optional[dict[str, Any]] = Field( + default=None, + description="""The error result of the operation in case of failure or cancellation.""", ) -class GroundingChunkMapsDict(TypedDict, total=False): - """Chunk from Google Maps.""" - - place_answer_sources: Optional[GroundingChunkMapsPlaceAnswerSourcesDict] - """Sources used to generate the place answer. This includes review snippets and photos that were used to generate the answer, as well as uris to flag content.""" +class DeleteAgentEngineSessionOperationDict(TypedDict, total=False): + """Operation for deleting agent engine sessions.""" - place_id: Optional[str] - """This Place's resource name, in `places/{place_id}` format. Can be used to look up the Place.""" + name: Optional[str] + """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""" - text: Optional[str] - """Text of the chunk.""" + metadata: Optional[dict[str, Any]] + """Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""" - title: Optional[str] - """Title of the chunk.""" + done: Optional[bool] + """If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""" - uri: Optional[str] - """URI reference of the chunk.""" + error: Optional[dict[str, Any]] + """The error result of the operation in case of failure or cancellation.""" -GroundingChunkMapsOrDict = Union[GroundingChunkMaps, GroundingChunkMapsDict] +DeleteAgentEngineSessionOperationOrDict = Union[ + DeleteAgentEngineSessionOperation, DeleteAgentEngineSessionOperationDict +] -class RagChunkPageSpan(_common.BaseModel): - """Represents where the chunk starts and ends in the document.""" +class GetAgentEngineSessionConfig(_common.BaseModel): + """Config for getting an Agent Engine Session.""" - first_page: Optional[int] = Field( - default=None, - description="""Page where chunk starts in the document. Inclusive. 1-indexed.""", - ) - last_page: Optional[int] = Field( - default=None, - description="""Page where chunk ends in the document. Inclusive. 1-indexed.""", + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" ) -class RagChunkPageSpanDict(TypedDict, total=False): - """Represents where the chunk starts and ends in the document.""" - - first_page: Optional[int] - """Page where chunk starts in the document. Inclusive. 1-indexed.""" +class GetAgentEngineSessionConfigDict(TypedDict, total=False): + """Config for getting an Agent Engine Session.""" - last_page: Optional[int] - """Page where chunk ends in the document. Inclusive. 1-indexed.""" + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" -RagChunkPageSpanOrDict = Union[RagChunkPageSpan, RagChunkPageSpanDict] +GetAgentEngineSessionConfigOrDict = Union[ + GetAgentEngineSessionConfig, GetAgentEngineSessionConfigDict +] -class RagChunk(_common.BaseModel): - """A RagChunk includes the content of a chunk of a RagFile, and associated metadata.""" +class _GetAgentEngineSessionRequestParameters(_common.BaseModel): + """Parameters for getting an agent engine session.""" - page_span: Optional[RagChunkPageSpan] = Field( - default=None, - description="""If populated, represents where the chunk starts and ends in the document.""", + name: Optional[str] = Field( + default=None, description="""Name of the agent engine.""" ) - text: Optional[str] = Field( - default=None, description="""The content of the chunk.""" + config: Optional[GetAgentEngineSessionConfig] = Field( + default=None, description="""""" ) -class RagChunkDict(TypedDict, total=False): - """A RagChunk includes the content of a chunk of a RagFile, and associated metadata.""" +class _GetAgentEngineSessionRequestParametersDict(TypedDict, total=False): + """Parameters for getting an agent engine session.""" - page_span: Optional[RagChunkPageSpanDict] - """If populated, represents where the chunk starts and ends in the document.""" + name: Optional[str] + """Name of the agent engine.""" - text: Optional[str] - """The content of the chunk.""" + config: Optional[GetAgentEngineSessionConfigDict] + """""" -RagChunkOrDict = Union[RagChunk, RagChunkDict] +_GetAgentEngineSessionRequestParametersOrDict = Union[ + _GetAgentEngineSessionRequestParameters, _GetAgentEngineSessionRequestParametersDict +] -class GroundingChunkRetrievedContext(_common.BaseModel): - """Chunk from context retrieved by the retrieval tools.""" +class ListAgentEngineSessionsConfig(_common.BaseModel): + """Config for listing agent engine sessions.""" - document_name: Optional[str] = Field( - default=None, - description="""Output only. The full document name for the referenced Vertex AI Search document.""", + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" ) - rag_chunk: Optional[RagChunk] = Field( + page_size: Optional[int] = Field(default=None, description="""""") + page_token: Optional[str] = Field(default=None, description="""""") + filter: Optional[str] = Field( default=None, - description="""Additional context for the RAG retrieval result. This is only populated when using the RAG retrieval tool.""", - ) - text: Optional[str] = Field( - default=None, description="""Text of the attribution.""" - ) - title: Optional[str] = Field( - default=None, description="""Title of the attribution.""" - ) - uri: Optional[str] = Field( - default=None, description="""URI reference of the attribution.""" + description="""An expression for filtering the results of the request. + For field names both snake_case and camelCase are supported.""", ) -class GroundingChunkRetrievedContextDict(TypedDict, total=False): - """Chunk from context retrieved by the retrieval tools.""" +class ListAgentEngineSessionsConfigDict(TypedDict, total=False): + """Config for listing agent engine sessions.""" - document_name: Optional[str] - """Output only. The full document name for the referenced Vertex AI Search document.""" + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" - rag_chunk: Optional[RagChunkDict] - """Additional context for the RAG retrieval result. This is only populated when using the RAG retrieval tool.""" + page_size: Optional[int] + """""" - text: Optional[str] - """Text of the attribution.""" + page_token: Optional[str] + """""" - title: Optional[str] - """Title of the attribution.""" + filter: Optional[str] + """An expression for filtering the results of the request. + For field names both snake_case and camelCase are supported.""" - uri: Optional[str] - """URI reference of the attribution.""" - -GroundingChunkRetrievedContextOrDict = Union[ - GroundingChunkRetrievedContext, GroundingChunkRetrievedContextDict +ListAgentEngineSessionsConfigOrDict = Union[ + ListAgentEngineSessionsConfig, ListAgentEngineSessionsConfigDict ] -class GroundingChunkWeb(_common.BaseModel): - """Chunk from the web.""" +class _ListAgentEngineSessionsRequestParameters(_common.BaseModel): + """Parameters for listing agent engines.""" - domain: Optional[str] = Field( - default=None, description="""Domain of the (original) URI.""" + name: Optional[str] = Field( + default=None, description="""Name of the agent engine.""" ) - title: Optional[str] = Field(default=None, description="""Title of the chunk.""") - uri: Optional[str] = Field( - default=None, description="""URI reference of the chunk.""" + config: Optional[ListAgentEngineSessionsConfig] = Field( + default=None, description="""""" ) -class GroundingChunkWebDict(TypedDict, total=False): - """Chunk from the web.""" - - domain: Optional[str] - """Domain of the (original) URI.""" +class _ListAgentEngineSessionsRequestParametersDict(TypedDict, total=False): + """Parameters for listing agent engines.""" - title: Optional[str] - """Title of the chunk.""" + name: Optional[str] + """Name of the agent engine.""" - uri: Optional[str] - """URI reference of the chunk.""" + config: Optional[ListAgentEngineSessionsConfigDict] + """""" -GroundingChunkWebOrDict = Union[GroundingChunkWeb, GroundingChunkWebDict] +_ListAgentEngineSessionsRequestParametersOrDict = Union[ + _ListAgentEngineSessionsRequestParameters, + _ListAgentEngineSessionsRequestParametersDict, +] -class GroundingChunk(_common.BaseModel): - """Grounding chunk.""" +class ListReasoningEnginesSessionsResponse(_common.BaseModel): + """Response for listing agent engine sessions.""" - maps: Optional[GroundingChunkMaps] = Field( - default=None, description="""Grounding chunk from Google Maps.""" - ) - retrieved_context: Optional[GroundingChunkRetrievedContext] = Field( - default=None, - description="""Grounding chunk from context retrieved by the retrieval tools.""", + sdk_http_response: Optional[genai_types.HttpResponse] = Field( + default=None, description="""Used to retain the full HTTP response.""" ) - web: Optional[GroundingChunkWeb] = Field( - default=None, description="""Grounding chunk from the web.""" + next_page_token: Optional[str] = Field(default=None, description="""""") + sessions: Optional[list[Session]] = Field( + default=None, description="""List of agent engine sessions.""" ) -class GroundingChunkDict(TypedDict, total=False): - """Grounding chunk.""" +class ListReasoningEnginesSessionsResponseDict(TypedDict, total=False): + """Response for listing agent engine sessions.""" - maps: Optional[GroundingChunkMapsDict] - """Grounding chunk from Google Maps.""" + sdk_http_response: Optional[genai_types.HttpResponseDict] + """Used to retain the full HTTP response.""" - retrieved_context: Optional[GroundingChunkRetrievedContextDict] - """Grounding chunk from context retrieved by the retrieval tools.""" + next_page_token: Optional[str] + """""" - web: Optional[GroundingChunkWebDict] - """Grounding chunk from the web.""" + sessions: Optional[list[SessionDict]] + """List of agent engine sessions.""" -GroundingChunkOrDict = Union[GroundingChunk, GroundingChunkDict] +ListReasoningEnginesSessionsResponseOrDict = Union[ + ListReasoningEnginesSessionsResponse, ListReasoningEnginesSessionsResponseDict +] -class Segment(_common.BaseModel): - """Segment of the content.""" +class _GetAgentEngineSessionOperationParameters(_common.BaseModel): + """Parameters for getting an operation with a session as a response.""" - end_index: Optional[int] = Field( - default=None, - description="""Output only. End index in the given Part, measured in bytes. Offset from the start of the Part, exclusive, starting at zero.""", - ) - part_index: Optional[int] = Field( - default=None, - description="""Output only. The index of a Part object within its parent Content object.""", - ) - start_index: Optional[int] = Field( - default=None, - description="""Output only. Start index in the given Part, measured in bytes. Offset from the start of the Part, inclusive, starting at zero.""", + operation_name: Optional[str] = Field( + default=None, description="""The server-assigned name for the operation.""" ) - text: Optional[str] = Field( - default=None, - description="""Output only. The text corresponding to the segment from the response.""", + config: Optional[GetAgentEngineOperationConfig] = Field( + default=None, description="""Used to override the default configuration.""" ) -class SegmentDict(TypedDict, total=False): - """Segment of the content.""" - - end_index: Optional[int] - """Output only. End index in the given Part, measured in bytes. Offset from the start of the Part, exclusive, starting at zero.""" - - part_index: Optional[int] - """Output only. The index of a Part object within its parent Content object.""" +class _GetAgentEngineSessionOperationParametersDict(TypedDict, total=False): + """Parameters for getting an operation with a session as a response.""" - start_index: Optional[int] - """Output only. Start index in the given Part, measured in bytes. Offset from the start of the Part, inclusive, starting at zero.""" + operation_name: Optional[str] + """The server-assigned name for the operation.""" - text: Optional[str] - """Output only. The text corresponding to the segment from the response.""" + config: Optional[GetAgentEngineOperationConfigDict] + """Used to override the default configuration.""" -SegmentOrDict = Union[Segment, SegmentDict] +_GetAgentEngineSessionOperationParametersOrDict = Union[ + _GetAgentEngineSessionOperationParameters, + _GetAgentEngineSessionOperationParametersDict, +] -class GroundingSupport(_common.BaseModel): - """Grounding support.""" +class UpdateAgentEngineSessionConfig(_common.BaseModel): + """Config for updating agent engine session.""" - confidence_scores: Optional[list[float]] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + display_name: Optional[str] = Field( + default=None, description="""The display name of the session.""" + ) + session_state: Optional[dict[str, Any]] = Field( + default=None, + description="""Session state which stores key conversation points.""", + ) + wait_for_completion: Optional[bool] = Field( + default=True, + description="""Waits for the operation to complete before returning.""", + ) + ttl: Optional[str] = Field( default=None, - description="""Confidence score of the support references. Ranges from 0 to 1. 1 is the most confident. For Gemini 2.0 and before, this list must have the same size as the grounding_chunk_indices. For Gemini 2.5 and after, this list will be empty and should be ignored.""", + description="""Optional. Input only. The TTL for this resource. + + The expiration time is computed: now + TTL.""", ) - grounding_chunk_indices: Optional[list[int]] = Field( + expire_time: Optional[datetime.datetime] = Field( default=None, - description="""A list of indices (into 'grounding_chunk') specifying the citations associated with the claim. For instance [1,3,4] means that grounding_chunk[1], grounding_chunk[3], grounding_chunk[4] are the retrieved content attributed to the claim.""", + description="""Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""", ) - segment: Optional[Segment] = Field( + update_mask: Optional[str] = Field( default=None, - description="""Segment of the content this support belongs to.""", + description="""The update mask to apply. For the `FieldMask` definition, see + https://protobuf.dev/reference/protobuf/google.protobuf/#field-mask.""", + ) + user_id: Optional[str] = Field( + default=None, description="""User ID of the agent engine session to update.""" ) -class GroundingSupportDict(TypedDict, total=False): - """Grounding support.""" - - confidence_scores: Optional[list[float]] - """Confidence score of the support references. Ranges from 0 to 1. 1 is the most confident. For Gemini 2.0 and before, this list must have the same size as the grounding_chunk_indices. For Gemini 2.5 and after, this list will be empty and should be ignored.""" - - grounding_chunk_indices: Optional[list[int]] - """A list of indices (into 'grounding_chunk') specifying the citations associated with the claim. For instance [1,3,4] means that grounding_chunk[1], grounding_chunk[3], grounding_chunk[4] are the retrieved content attributed to the claim.""" +class UpdateAgentEngineSessionConfigDict(TypedDict, total=False): + """Config for updating agent engine session.""" - segment: Optional[SegmentDict] - """Segment of the content this support belongs to.""" + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + display_name: Optional[str] + """The display name of the session.""" -GroundingSupportOrDict = Union[GroundingSupport, GroundingSupportDict] + session_state: Optional[dict[str, Any]] + """Session state which stores key conversation points.""" + wait_for_completion: Optional[bool] + """Waits for the operation to complete before returning.""" -class RetrievalMetadata(_common.BaseModel): - """Metadata related to retrieval in the grounding flow.""" + ttl: Optional[str] + """Optional. Input only. The TTL for this resource. - google_search_dynamic_retrieval_score: Optional[float] = Field( - default=None, - description="""Optional. Score indicating how likely information from Google Search could help answer the prompt. The score is in the range `[0, 1]`, where 0 is the least likely and 1 is the most likely. This score is only populated when Google Search grounding and dynamic retrieval is enabled. It will be compared to the threshold to determine whether to trigger Google Search.""", - ) + The expiration time is computed: now + TTL.""" + expire_time: Optional[datetime.datetime] + """Optional. Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what `expiration` was sent on input.""" -class RetrievalMetadataDict(TypedDict, total=False): - """Metadata related to retrieval in the grounding flow.""" + update_mask: Optional[str] + """The update mask to apply. For the `FieldMask` definition, see + https://protobuf.dev/reference/protobuf/google.protobuf/#field-mask.""" - google_search_dynamic_retrieval_score: Optional[float] - """Optional. Score indicating how likely information from Google Search could help answer the prompt. The score is in the range `[0, 1]`, where 0 is the least likely and 1 is the most likely. This score is only populated when Google Search grounding and dynamic retrieval is enabled. It will be compared to the threshold to determine whether to trigger Google Search.""" + user_id: Optional[str] + """User ID of the agent engine session to update.""" -RetrievalMetadataOrDict = Union[RetrievalMetadata, RetrievalMetadataDict] +UpdateAgentEngineSessionConfigOrDict = Union[ + UpdateAgentEngineSessionConfig, UpdateAgentEngineSessionConfigDict +] -class SearchEntryPoint(_common.BaseModel): - """Google search entry point.""" +class _UpdateAgentEngineSessionRequestParameters(_common.BaseModel): + """Parameters for updating agent engine sessions.""" - rendered_content: Optional[str] = Field( - default=None, - description="""Optional. Web content snippet that can be embedded in a web page or an app webview.""", + name: Optional[str] = Field( + default=None, description="""Name of the agent engine session to update.""" ) - sdk_blob: Optional[bytes] = Field( - default=None, - description="""Optional. Base64 encoded JSON representing array of tuple.""", + config: Optional[UpdateAgentEngineSessionConfig] = Field( + default=None, description="""""" ) -class SearchEntryPointDict(TypedDict, total=False): - """Google search entry point.""" +class _UpdateAgentEngineSessionRequestParametersDict(TypedDict, total=False): + """Parameters for updating agent engine sessions.""" - rendered_content: Optional[str] - """Optional. Web content snippet that can be embedded in a web page or an app webview.""" + name: Optional[str] + """Name of the agent engine session to update.""" - sdk_blob: Optional[bytes] - """Optional. Base64 encoded JSON representing array of tuple.""" + config: Optional[UpdateAgentEngineSessionConfigDict] + """""" -SearchEntryPointOrDict = Union[SearchEntryPoint, SearchEntryPointDict] +_UpdateAgentEngineSessionRequestParametersOrDict = Union[ + _UpdateAgentEngineSessionRequestParameters, + _UpdateAgentEngineSessionRequestParametersDict, +] -class GroundingMetadata(_common.BaseModel): - """Metadata returned to client when grounding is enabled.""" +class EventActions(_common.BaseModel): + """Actions are parts of events that are executed by the agent.""" - google_maps_widget_context_token: Optional[str] = Field( + artifact_delta: Optional[dict[str, int]] = Field( default=None, - description="""Optional. Output only. Resource name of the Google Maps widget context token to be used with the PlacesContextElement widget to render contextual data. This is populated only for Google Maps grounding.""", + description="""Optional. Indicates that the event is updating an artifact. key is the filename, value is the version.""", ) - grounding_chunks: Optional[list[GroundingChunk]] = Field( + escalate: Optional[bool] = Field( default=None, - description="""List of supporting references retrieved from specified grounding source.""", + description="""Optional. The agent is escalating to a higher level agent.""", ) - grounding_supports: Optional[list[GroundingSupport]] = Field( - default=None, description="""Optional. List of grounding support.""" + requested_auth_configs: Optional[dict[str, Any]] = Field( + default=None, + description="""Optional. Will only be set by a tool response indicating tool request euc. Struct key is the function call id since one function call response (from model) could correspond to multiple function calls. Struct value is the required auth config, which can be another struct.""", ) - retrieval_metadata: Optional[RetrievalMetadata] = Field( + skip_summarization: Optional[bool] = Field( default=None, - description="""Optional. Output only. Retrieval metadata.""", + description="""Optional. If true, it won't call model to summarize function response. Only used for function_response event.""", ) - retrieval_queries: Optional[list[str]] = Field( + state_delta: Optional[dict[str, Any]] = Field( default=None, - description="""Optional. Queries executed by the retrieval tools.""", + description="""Optional. Indicates that the event is updating the state with the given delta.""", ) - search_entry_point: Optional[SearchEntryPoint] = Field( + transfer_agent: Optional[str] = Field( default=None, - description="""Optional. Google search entry for the following-up web searches.""", + description="""Optional. If set, the event transfers to the specified agent.""", ) - web_search_queries: Optional[list[str]] = Field( + transfer_to_agent: Optional[bool] = Field( default=None, - description="""Optional. Web search queries for the following-up web search.""", + description="""Deprecated. If set, the event transfers to the specified agent.""", ) -class GroundingMetadataDict(TypedDict, total=False): - """Metadata returned to client when grounding is enabled.""" +class EventActionsDict(TypedDict, total=False): + """Actions are parts of events that are executed by the agent.""" - google_maps_widget_context_token: Optional[str] - """Optional. Output only. Resource name of the Google Maps widget context token to be used with the PlacesContextElement widget to render contextual data. This is populated only for Google Maps grounding.""" + artifact_delta: Optional[dict[str, int]] + """Optional. Indicates that the event is updating an artifact. key is the filename, value is the version.""" - grounding_chunks: Optional[list[GroundingChunkDict]] - """List of supporting references retrieved from specified grounding source.""" + escalate: Optional[bool] + """Optional. The agent is escalating to a higher level agent.""" - grounding_supports: Optional[list[GroundingSupportDict]] - """Optional. List of grounding support.""" + requested_auth_configs: Optional[dict[str, Any]] + """Optional. Will only be set by a tool response indicating tool request euc. Struct key is the function call id since one function call response (from model) could correspond to multiple function calls. Struct value is the required auth config, which can be another struct.""" - retrieval_metadata: Optional[RetrievalMetadataDict] - """Optional. Output only. Retrieval metadata.""" + skip_summarization: Optional[bool] + """Optional. If true, it won't call model to summarize function response. Only used for function_response event.""" - retrieval_queries: Optional[list[str]] - """Optional. Queries executed by the retrieval tools.""" + state_delta: Optional[dict[str, Any]] + """Optional. Indicates that the event is updating the state with the given delta.""" - search_entry_point: Optional[SearchEntryPointDict] - """Optional. Google search entry for the following-up web searches.""" + transfer_agent: Optional[str] + """Optional. If set, the event transfers to the specified agent.""" - web_search_queries: Optional[list[str]] - """Optional. Web search queries for the following-up web search.""" + transfer_to_agent: Optional[bool] + """Deprecated. If set, the event transfers to the specified agent.""" -GroundingMetadataOrDict = Union[GroundingMetadata, GroundingMetadataDict] +EventActionsOrDict = Union[EventActions, EventActionsDict] class EventMetadata(_common.BaseModel): """Metadata relating to a LLM response event.""" + grounding_metadata: Optional[genai_types.GroundingMetadata] = Field( + default=None, + description="""Optional. Metadata returned to client when grounding is enabled.""", + ) branch: Optional[str] = Field( default=None, description="""Optional. The branch of the event. The format is like agent_1.agent_2.agent_3, where agent_1 is the parent of agent_2, and agent_2 is the parent of agent_3. Branch is used when multiple child agents shouldn't see their siblings' conversation history.""", @@ -7059,10 +6945,6 @@ class EventMetadata(_common.BaseModel): custom_metadata: Optional[dict[str, Any]] = Field( default=None, description="""The custom metadata of the LlmResponse.""" ) - grounding_metadata: Optional[GroundingMetadata] = Field( - default=None, - description="""Optional. Metadata returned to client when grounding is enabled.""", - ) interrupted: Optional[bool] = Field( default=None, description="""Optional. Flag indicating that LLM was interrupted when generating the content. Usually it's due to user interruption during a bidi streaming.""", @@ -7084,15 +6966,15 @@ class EventMetadata(_common.BaseModel): class EventMetadataDict(TypedDict, total=False): """Metadata relating to a LLM response event.""" + grounding_metadata: Optional[genai_types.GroundingMetadataDict] + """Optional. Metadata returned to client when grounding is enabled.""" + branch: Optional[str] """Optional. The branch of the event. The format is like agent_1.agent_2.agent_3, where agent_1 is the parent of agent_2, and agent_2 is the parent of agent_3. Branch is used when multiple child agents shouldn't see their siblings' conversation history.""" custom_metadata: Optional[dict[str, Any]] """The custom metadata of the LlmResponse.""" - grounding_metadata: Optional[GroundingMetadataDict] - """Optional. Metadata returned to client when grounding is enabled.""" - interrupted: Optional[bool] """Optional. Flag indicating that LLM was interrupted when generating the content. Usually it's due to user interruption during a bidi streaming.""" @@ -7112,7 +6994,7 @@ class EventMetadataDict(TypedDict, total=False): class AppendAgentEngineSessionEventConfig(_common.BaseModel): """Config for appending agent engine session event.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) content: Optional[genai_types.Content] = Field( @@ -7136,7 +7018,7 @@ class AppendAgentEngineSessionEventConfig(_common.BaseModel): class AppendAgentEngineSessionEventConfigDict(TypedDict, total=False): """Config for appending agent engine session event.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" content: Optional[genai_types.ContentDict] @@ -7167,15 +7049,13 @@ class _AppendAgentEngineSessionEventRequestParameters(_common.BaseModel): default=None, description="""Name of the agent engine session.""" ) author: Optional[str] = Field( - default=None, - description="""Author of the agent engine session event.""", + default=None, description="""Author of the agent engine session event.""" ) invocation_id: Optional[str] = Field( default=None, description="""Invocation ID of the agent engine.""" ) timestamp: Optional[datetime.datetime] = Field( - default=None, - description="""Timestamp indicating when the event was created.""", + default=None, description="""Timestamp indicating when the event was created.""" ) config: Optional[AppendAgentEngineSessionEventConfig] = Field( default=None, description="""""" @@ -7220,15 +7100,14 @@ class AppendAgentEngineSessionEventResponseDict(TypedDict, total=False): AppendAgentEngineSessionEventResponseOrDict = Union[ - AppendAgentEngineSessionEventResponse, - AppendAgentEngineSessionEventResponseDict, + AppendAgentEngineSessionEventResponse, AppendAgentEngineSessionEventResponseDict ] class ListAgentEngineSessionEventsConfig(_common.BaseModel): """Config for listing agent engine session events.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) page_size: Optional[int] = Field(default=None, description="""""") @@ -7243,7 +7122,7 @@ class ListAgentEngineSessionEventsConfig(_common.BaseModel): class ListAgentEngineSessionEventsConfigDict(TypedDict, total=False): """Config for listing agent engine session events.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" page_size: Optional[int] @@ -7299,98 +7178,1320 @@ class SessionEvent(_common.BaseModel): actions: Optional[EventActions] = Field( default=None, description="""Optional. Actions executed by the agent.""" ) - author: Optional[str] = Field( + author: Optional[str] = Field( + default=None, + description="""Required. The name of the agent that sent the event, or user.""", + ) + error_code: Optional[str] = Field( + default=None, + description="""Optional. Error code if the response is an error. Code varies by model.""", + ) + error_message: Optional[str] = Field( + default=None, + description="""Optional. Error message if the response is an error.""", + ) + event_metadata: Optional[EventMetadata] = Field( + default=None, description="""Optional. Metadata relating to this event.""" + ) + invocation_id: Optional[str] = Field( + default=None, + description="""Required. The invocation id of the event, multiple events can have the same invocation id.""", + ) + name: Optional[str] = Field( + default=None, + description="""Identifier. The resource name of the event. Format:`projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}/events/{event}`.""", + ) + timestamp: Optional[datetime.datetime] = Field( + default=None, + description="""Required. Timestamp when the event was created on client side.""", + ) + + +class SessionEventDict(TypedDict, total=False): + """A session event.""" + + content: Optional[genai_types.ContentDict] + """Optional. Content of the event provided by the author.""" + + actions: Optional[EventActionsDict] + """Optional. Actions executed by the agent.""" + + author: Optional[str] + """Required. The name of the agent that sent the event, or user.""" + + error_code: Optional[str] + """Optional. Error code if the response is an error. Code varies by model.""" + + error_message: Optional[str] + """Optional. Error message if the response is an error.""" + + event_metadata: Optional[EventMetadataDict] + """Optional. Metadata relating to this event.""" + + invocation_id: Optional[str] + """Required. The invocation id of the event, multiple events can have the same invocation id.""" + + name: Optional[str] + """Identifier. The resource name of the event. Format:`projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}/events/{event}`.""" + + timestamp: Optional[datetime.datetime] + """Required. Timestamp when the event was created on client side.""" + + +SessionEventOrDict = Union[SessionEvent, SessionEventDict] + + +class ListAgentEngineSessionEventsResponse(_common.BaseModel): + """Response for listing agent engine session events.""" + + sdk_http_response: Optional[genai_types.HttpResponse] = Field( + default=None, description="""Used to retain the full HTTP response.""" + ) + next_page_token: Optional[str] = Field(default=None, description="""""") + session_events: Optional[list[SessionEvent]] = Field( + default=None, description="""List of session events.""" + ) + + +class ListAgentEngineSessionEventsResponseDict(TypedDict, total=False): + """Response for listing agent engine session events.""" + + sdk_http_response: Optional[genai_types.HttpResponseDict] + """Used to retain the full HTTP response.""" + + next_page_token: Optional[str] + """""" + + session_events: Optional[list[SessionEventDict]] + """List of session events.""" + + +ListAgentEngineSessionEventsResponseOrDict = Union[ + ListAgentEngineSessionEventsResponse, ListAgentEngineSessionEventsResponseDict +] + + +class CreateDatasetConfig(_common.BaseModel): + """Config for creating a dataset resource to store prompts.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + + +class CreateDatasetConfigDict(TypedDict, total=False): + """Config for creating a dataset resource to store prompts.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + +CreateDatasetConfigOrDict = Union[CreateDatasetConfig, CreateDatasetConfigDict] + + +class SchemaPredictParamsGroundingConfigSourceEntry(_common.BaseModel): + """Single source entry for the grounding checking.""" + + enterprise_datastore: Optional[str] = Field( + default=None, + description="""The uri of the Vertex AI Search data source. Deprecated. Use vertex_ai_search_datastore instead.""", + ) + inline_context: Optional[str] = Field( + default=None, + description="""The grounding text passed inline with the Predict API. It can support up to 1 million bytes.""", + ) + type: Optional[ + Literal["UNSPECIFIED", "WEB", "ENTERPRISE", "VERTEX_AI_SEARCH", "INLINE"] + ] = Field( + default=None, description="""The type of the grounding checking source.""" + ) + vertex_ai_search_datastore: Optional[str] = Field( + default=None, description="""The uri of the Vertex AI Search data source.""" + ) + + +class SchemaPredictParamsGroundingConfigSourceEntryDict(TypedDict, total=False): + """Single source entry for the grounding checking.""" + + enterprise_datastore: Optional[str] + """The uri of the Vertex AI Search data source. Deprecated. Use vertex_ai_search_datastore instead.""" + + inline_context: Optional[str] + """The grounding text passed inline with the Predict API. It can support up to 1 million bytes.""" + + type: Optional[ + Literal["UNSPECIFIED", "WEB", "ENTERPRISE", "VERTEX_AI_SEARCH", "INLINE"] + ] + """The type of the grounding checking source.""" + + vertex_ai_search_datastore: Optional[str] + """The uri of the Vertex AI Search data source.""" + + +SchemaPredictParamsGroundingConfigSourceEntryOrDict = Union[ + SchemaPredictParamsGroundingConfigSourceEntry, + SchemaPredictParamsGroundingConfigSourceEntryDict, +] + + +class SchemaPredictParamsGroundingConfig(_common.BaseModel): + """The configuration for grounding checking.""" + + disable_attribution: Optional[bool] = Field( + default=None, + description="""If set, skip finding claim attributions (i.e not generate grounding citation).""", + ) + sources: Optional[list[SchemaPredictParamsGroundingConfigSourceEntry]] = Field( + default=None, description="""The sources for the grounding checking.""" + ) + + +class SchemaPredictParamsGroundingConfigDict(TypedDict, total=False): + """The configuration for grounding checking.""" + + disable_attribution: Optional[bool] + """If set, skip finding claim attributions (i.e not generate grounding citation).""" + + sources: Optional[list[SchemaPredictParamsGroundingConfigSourceEntryDict]] + """The sources for the grounding checking.""" + + +SchemaPredictParamsGroundingConfigOrDict = Union[ + SchemaPredictParamsGroundingConfig, SchemaPredictParamsGroundingConfigDict +] + + +class SchemaPromptInstancePromptExecution(_common.BaseModel): + """A prompt instance's parameters set that contains a set of variable values.""" + + arguments: Optional[dict[str, "SchemaPromptInstanceVariableValue"]] = Field( + default=None, description="""Maps variable names to their value.""" + ) + + +class SchemaPromptInstancePromptExecutionDict(TypedDict, total=False): + """A prompt instance's parameters set that contains a set of variable values.""" + + arguments: Optional[dict[str, "SchemaPromptInstanceVariableValueDict"]] + """Maps variable names to their value.""" + + +SchemaPromptInstancePromptExecutionOrDict = Union[ + SchemaPromptInstancePromptExecution, SchemaPromptInstancePromptExecutionDict +] + + +class SchemaPromptSpecPromptMessage(_common.BaseModel): + """Represents a prompt message.""" + + generation_config: Optional[genai_types.GenerationConfig] = Field( + default=None, description="""Generation config.""" + ) + tool_config: Optional[genai_types.FunctionCallingConfig] = Field( + default=None, + description="""Tool config. This config is shared for all tools provided in the request.""", + ) + tools: Optional[list[genai_types.Tool]] = Field( + default=None, + description="""A list of `Tools` the model may use to generate the next response. A `Tool` is a piece of code that enables the system to interact with external systems to perform an action, or set of actions, outside of knowledge and scope of the model.""", + ) + safety_settings: Optional[list[genai_types.SafetySetting]] = Field( + default=None, + description="""Per request settings for blocking unsafe content. Enforced on GenerateContentResponse.candidates.""", + ) + contents: Optional[list[genai_types.Content]] = Field( + default=None, + description="""The content of the current conversation with the model. For single-turn queries, this is a single instance. For multi-turn queries, this is a repeated field that contains conversation history + latest request.""", + ) + system_instruction: Optional[genai_types.Content] = Field( + default=None, + description="""The user provided system instructions for the model. Note: only text should be used in parts and content in each part will be in a separate paragraph.""", + ) + variables: Optional[list[dict[str, genai_types.Part]]] = Field( + default=None, description="""""" + ) + model: Optional[str] = Field(default=None, description="""The model name.""") + + +class SchemaPromptSpecPromptMessageDict(TypedDict, total=False): + """Represents a prompt message.""" + + generation_config: Optional[genai_types.GenerationConfigDict] + """Generation config.""" + + tool_config: Optional[genai_types.FunctionCallingConfigDict] + """Tool config. This config is shared for all tools provided in the request.""" + + tools: Optional[list[genai_types.ToolDict]] + """A list of `Tools` the model may use to generate the next response. A `Tool` is a piece of code that enables the system to interact with external systems to perform an action, or set of actions, outside of knowledge and scope of the model.""" + + safety_settings: Optional[list[genai_types.SafetySettingDict]] + """Per request settings for blocking unsafe content. Enforced on GenerateContentResponse.candidates.""" + + contents: Optional[list[genai_types.ContentDict]] + """The content of the current conversation with the model. For single-turn queries, this is a single instance. For multi-turn queries, this is a repeated field that contains conversation history + latest request.""" + + system_instruction: Optional[genai_types.ContentDict] + """The user provided system instructions for the model. Note: only text should be used in parts and content in each part will be in a separate paragraph.""" + + variables: Optional[list[dict[str, genai_types.PartDict]]] + """""" + + model: Optional[str] + """The model name.""" + + +SchemaPromptSpecPromptMessageOrDict = Union[ + SchemaPromptSpecPromptMessage, SchemaPromptSpecPromptMessageDict +] + + +class SchemaPromptSpecMultimodalPrompt(_common.BaseModel): + """Prompt variation that embeds preambles to prompt string.""" + + prompt_message: Optional[SchemaPromptSpecPromptMessage] = Field( + default=None, description="""The prompt message.""" + ) + + +class SchemaPromptSpecMultimodalPromptDict(TypedDict, total=False): + """Prompt variation that embeds preambles to prompt string.""" + + prompt_message: Optional[SchemaPromptSpecPromptMessageDict] + """The prompt message.""" + + +SchemaPromptSpecMultimodalPromptOrDict = Union[ + SchemaPromptSpecMultimodalPrompt, SchemaPromptSpecMultimodalPromptDict +] + + +class SchemaPromptSpecPartList(_common.BaseModel): + """Represents a prompt spec part list.""" + + parts: Optional[list[genai_types.Part]] = Field( + default=None, description="""A list of elements that can be part of a prompt.""" + ) + + +class SchemaPromptSpecPartListDict(TypedDict, total=False): + """Represents a prompt spec part list.""" + + parts: Optional[list[genai_types.PartDict]] + """A list of elements that can be part of a prompt.""" + + +SchemaPromptSpecPartListOrDict = Union[ + SchemaPromptSpecPartList, SchemaPromptSpecPartListDict +] + + +class SchemaPromptSpecStructuredPrompt(_common.BaseModel): + """Represents a structured prompt.""" + + context: Optional[genai_types.Content] = Field( + default=None, description="""Preamble: The context of the prompt.""" + ) + examples: Optional[list[SchemaPromptSpecPartList]] = Field( + default=None, + description="""Preamble: A set of examples for expected model response.""", + ) + infill_prefix: Optional[str] = Field( + default=None, + description="""Preamble: For infill prompt, the prefix before expected model response.""", + ) + infill_suffix: Optional[str] = Field( + default=None, + description="""Preamble: For infill prompt, the suffix after expected model response.""", + ) + input_prefixes: Optional[list[str]] = Field( + default=None, + description="""Preamble: The input prefixes before each example input.""", + ) + output_prefixes: Optional[list[str]] = Field( + default=None, + description="""Preamble: The output prefixes before each example output.""", + ) + prediction_inputs: Optional[list[SchemaPromptSpecPartList]] = Field( + default=None, + description="""Preamble: The input test data for prediction. Each PartList in this field represents one text-only input set for a single model request.""", + ) + prompt_message: Optional[SchemaPromptSpecPromptMessage] = Field( + default=None, description="""The prompt message.""" + ) + + +class SchemaPromptSpecStructuredPromptDict(TypedDict, total=False): + """Represents a structured prompt.""" + + context: Optional[genai_types.ContentDict] + """Preamble: The context of the prompt.""" + + examples: Optional[list[SchemaPromptSpecPartListDict]] + """Preamble: A set of examples for expected model response.""" + + infill_prefix: Optional[str] + """Preamble: For infill prompt, the prefix before expected model response.""" + + infill_suffix: Optional[str] + """Preamble: For infill prompt, the suffix after expected model response.""" + + input_prefixes: Optional[list[str]] + """Preamble: The input prefixes before each example input.""" + + output_prefixes: Optional[list[str]] + """Preamble: The output prefixes before each example output.""" + + prediction_inputs: Optional[list[SchemaPromptSpecPartListDict]] + """Preamble: The input test data for prediction. Each PartList in this field represents one text-only input set for a single model request.""" + + prompt_message: Optional[SchemaPromptSpecPromptMessageDict] + """The prompt message.""" + + +SchemaPromptSpecStructuredPromptOrDict = Union[ + SchemaPromptSpecStructuredPrompt, SchemaPromptSpecStructuredPromptDict +] + + +class SchemaPromptSpecReferenceSentencePair(_common.BaseModel): + """A pair of sentences used as reference in source and target languages.""" + + source_sentence: Optional[str] = Field( + default=None, description="""Source sentence in the sentence pair.""" + ) + target_sentence: Optional[str] = Field( + default=None, description="""Target sentence in the sentence pair.""" + ) + + +class SchemaPromptSpecReferenceSentencePairDict(TypedDict, total=False): + """A pair of sentences used as reference in source and target languages.""" + + source_sentence: Optional[str] + """Source sentence in the sentence pair.""" + + target_sentence: Optional[str] + """Target sentence in the sentence pair.""" + + +SchemaPromptSpecReferenceSentencePairOrDict = Union[ + SchemaPromptSpecReferenceSentencePair, SchemaPromptSpecReferenceSentencePairDict +] + + +class SchemaPromptSpecReferenceSentencePairList(_common.BaseModel): + """A list of reference sentence pairs.""" + + reference_sentence_pairs: Optional[list[SchemaPromptSpecReferenceSentencePair]] = ( + Field(default=None, description="""Reference sentence pairs.""") + ) + + +class SchemaPromptSpecReferenceSentencePairListDict(TypedDict, total=False): + """A list of reference sentence pairs.""" + + reference_sentence_pairs: Optional[list[SchemaPromptSpecReferenceSentencePairDict]] + """Reference sentence pairs.""" + + +SchemaPromptSpecReferenceSentencePairListOrDict = Union[ + SchemaPromptSpecReferenceSentencePairList, + SchemaPromptSpecReferenceSentencePairListDict, +] + + +class SchemaPromptSpecTranslationFileInputSource(_common.BaseModel): + + content: Optional[str] = Field(default=None, description="""The file's contents.""") + display_name: Optional[str] = Field( + default=None, description="""The file's display name.""" + ) + mime_type: Optional[str] = Field( + default=None, description="""The file's mime type.""" + ) + + +class SchemaPromptSpecTranslationFileInputSourceDict(TypedDict, total=False): + + content: Optional[str] + """The file's contents.""" + + display_name: Optional[str] + """The file's display name.""" + + mime_type: Optional[str] + """The file's mime type.""" + + +SchemaPromptSpecTranslationFileInputSourceOrDict = Union[ + SchemaPromptSpecTranslationFileInputSource, + SchemaPromptSpecTranslationFileInputSourceDict, +] + + +class SchemaPromptSpecTranslationGcsInputSource(_common.BaseModel): + + input_uri: Optional[str] = Field( + default=None, + description="""Source data URI. For example, `gs://my_bucket/my_object`.""", + ) + + +class SchemaPromptSpecTranslationGcsInputSourceDict(TypedDict, total=False): + + input_uri: Optional[str] + """Source data URI. For example, `gs://my_bucket/my_object`.""" + + +SchemaPromptSpecTranslationGcsInputSourceOrDict = Union[ + SchemaPromptSpecTranslationGcsInputSource, + SchemaPromptSpecTranslationGcsInputSourceDict, +] + + +class SchemaPromptSpecTranslationSentenceFileInput(_common.BaseModel): + + file_input_source: Optional[SchemaPromptSpecTranslationFileInputSource] = Field( + default=None, description="""Inlined file source.""" + ) + gcs_input_source: Optional[SchemaPromptSpecTranslationGcsInputSource] = Field( + default=None, description="""Cloud Storage file source.""" + ) + + +class SchemaPromptSpecTranslationSentenceFileInputDict(TypedDict, total=False): + + file_input_source: Optional[SchemaPromptSpecTranslationFileInputSourceDict] + """Inlined file source.""" + + gcs_input_source: Optional[SchemaPromptSpecTranslationGcsInputSourceDict] + """Cloud Storage file source.""" + + +SchemaPromptSpecTranslationSentenceFileInputOrDict = Union[ + SchemaPromptSpecTranslationSentenceFileInput, + SchemaPromptSpecTranslationSentenceFileInputDict, +] + + +class SchemaPromptSpecTranslationExample(_common.BaseModel): + """The translation example that contains reference sentences from various sources.""" + + reference_sentence_pair_lists: Optional[ + list[SchemaPromptSpecReferenceSentencePairList] + ] = Field(default=None, description="""The reference sentences from inline text.""") + reference_sentences_file_inputs: Optional[ + list[SchemaPromptSpecTranslationSentenceFileInput] + ] = Field(default=None, description="""The reference sentences from file.""") + + +class SchemaPromptSpecTranslationExampleDict(TypedDict, total=False): + """The translation example that contains reference sentences from various sources.""" + + reference_sentence_pair_lists: Optional[ + list[SchemaPromptSpecReferenceSentencePairListDict] + ] + """The reference sentences from inline text.""" + + reference_sentences_file_inputs: Optional[ + list[SchemaPromptSpecTranslationSentenceFileInputDict] + ] + """The reference sentences from file.""" + + +SchemaPromptSpecTranslationExampleOrDict = Union[ + SchemaPromptSpecTranslationExample, SchemaPromptSpecTranslationExampleDict +] + + +class SchemaPromptSpecTranslationOption(_common.BaseModel): + """Optional settings for translation prompt.""" + + number_of_shots: Optional[int] = Field( + default=None, description="""How many shots to use.""" + ) + + +class SchemaPromptSpecTranslationOptionDict(TypedDict, total=False): + """Optional settings for translation prompt.""" + + number_of_shots: Optional[int] + """How many shots to use.""" + + +SchemaPromptSpecTranslationOptionOrDict = Union[ + SchemaPromptSpecTranslationOption, SchemaPromptSpecTranslationOptionDict +] + + +class SchemaPromptSpecTranslationPrompt(_common.BaseModel): + """Prompt variation for Translation use case.""" + + example: Optional[SchemaPromptSpecTranslationExample] = Field( + default=None, description="""The translation example.""" + ) + option: Optional[SchemaPromptSpecTranslationOption] = Field( + default=None, description="""The translation option.""" + ) + prompt_message: Optional[SchemaPromptSpecPromptMessage] = Field( + default=None, description="""The prompt message.""" + ) + source_language_code: Optional[str] = Field( + default=None, description="""The source language code.""" + ) + target_language_code: Optional[str] = Field( + default=None, description="""The target language code.""" + ) + + +class SchemaPromptSpecTranslationPromptDict(TypedDict, total=False): + """Prompt variation for Translation use case.""" + + example: Optional[SchemaPromptSpecTranslationExampleDict] + """The translation example.""" + + option: Optional[SchemaPromptSpecTranslationOptionDict] + """The translation option.""" + + prompt_message: Optional[SchemaPromptSpecPromptMessageDict] + """The prompt message.""" + + source_language_code: Optional[str] + """The source language code.""" + + target_language_code: Optional[str] + """The target language code.""" + + +SchemaPromptSpecTranslationPromptOrDict = Union[ + SchemaPromptSpecTranslationPrompt, SchemaPromptSpecTranslationPromptDict +] + + +class SchemaPromptApiSchema(_common.BaseModel): + """The A2 schema of a prompt.""" + + api_schema_version: Optional[str] = Field( + default=None, + description="""The Schema version that represents changes to the API behavior.""", + ) + executions: Optional[list[SchemaPromptInstancePromptExecution]] = Field( + default=None, + description="""A list of execution instances for constructing a ready-to-use prompt.""", + ) + multimodal_prompt: Optional[SchemaPromptSpecMultimodalPrompt] = Field( + default=None, + description="""Multimodal prompt which embeds preambles to prompt string.""", + ) + structured_prompt: Optional[SchemaPromptSpecStructuredPrompt] = Field( + default=None, + description="""The prompt variation that stores preambles in separate fields.""", + ) + translation_prompt: Optional[SchemaPromptSpecTranslationPrompt] = Field( + default=None, description="""The prompt variation for Translation use case.""" + ) + + +class SchemaPromptApiSchemaDict(TypedDict, total=False): + """The A2 schema of a prompt.""" + + api_schema_version: Optional[str] + """The Schema version that represents changes to the API behavior.""" + + executions: Optional[list[SchemaPromptInstancePromptExecutionDict]] + """A list of execution instances for constructing a ready-to-use prompt.""" + + multimodal_prompt: Optional[SchemaPromptSpecMultimodalPromptDict] + """Multimodal prompt which embeds preambles to prompt string.""" + + structured_prompt: Optional[SchemaPromptSpecStructuredPromptDict] + """The prompt variation that stores preambles in separate fields.""" + + translation_prompt: Optional[SchemaPromptSpecTranslationPromptDict] + """The prompt variation for Translation use case.""" + + +SchemaPromptApiSchemaOrDict = Union[SchemaPromptApiSchema, SchemaPromptApiSchemaDict] + + +class SchemaTextPromptDatasetMetadata(_common.BaseModel): + """Represents the text prompt dataset metadata.""" + + candidate_count: Optional[int] = Field( + default=None, description="""Number of candidates.""" + ) + gcs_uri: Optional[str] = Field( + default=None, + description="""The Google Cloud Storage URI that stores the prompt data.""", + ) + grounding_config: Optional[SchemaPredictParamsGroundingConfig] = Field( + default=None, description="""Grounding checking configuration.""" + ) + has_prompt_variable: Optional[bool] = Field( + default=None, description="""Whether the prompt dataset has prompt variable.""" + ) + logprobs: Optional[bool] = Field( + default=None, + description="""Whether or not the user has enabled logit probabilities in the model parameters.""", + ) + max_output_tokens: Optional[int] = Field( + default=None, + description="""Value of the maximum number of tokens generated set when the dataset was saved.""", + ) + note: Optional[str] = Field( + default=None, + description="""User-created prompt note. Note size limit is 2KB.""", + ) + prompt_api_schema: Optional[SchemaPromptApiSchema] = Field( + default=None, + description="""The API schema of the prompt to support both UI and SDK usages.""", + ) + prompt_type: Optional[str] = Field( + default=None, description="""Type of the prompt dataset.""" + ) + seed_enabled: Optional[bool] = Field( + default=None, + description="""Seeding enables model to return a deterministic response on a best effort basis. Determinism isn't guaranteed. This field determines whether or not seeding is enabled.""", + ) + seed_value: Optional[int] = Field( + default=None, description="""The actual value of the seed.""" + ) + stop_sequences: Optional[list[str]] = Field( + default=None, description="""Customized stop sequences.""" + ) + system_instruction: Optional[str] = Field( + default=None, + description="""The content of the prompt dataset system instruction.""", + ) + system_instruction_gcs_uri: Optional[str] = Field( + default=None, + description="""The Google Cloud Storage URI that stores the system instruction, starting with gs://.""", + ) + temperature: Optional[float] = Field( + default=None, + description="""Temperature value used for sampling set when the dataset was saved. This value is used to tune the degree of randomness.""", + ) + text: Optional[str] = Field( + default=None, description="""The content of the prompt dataset.""" + ) + top_k: Optional[int] = Field( + default=None, + description="""Top K value set when the dataset was saved. This value determines how many candidates with highest probability from the vocab would be selected for each decoding step.""", + ) + top_p: Optional[float] = Field( + default=None, + description="""Top P value set when the dataset was saved. Given topK tokens for decoding, top candidates will be selected until the sum of their probabilities is topP.""", + ) + + +class SchemaTextPromptDatasetMetadataDict(TypedDict, total=False): + """Represents the text prompt dataset metadata.""" + + candidate_count: Optional[int] + """Number of candidates.""" + + gcs_uri: Optional[str] + """The Google Cloud Storage URI that stores the prompt data.""" + + grounding_config: Optional[SchemaPredictParamsGroundingConfigDict] + """Grounding checking configuration.""" + + has_prompt_variable: Optional[bool] + """Whether the prompt dataset has prompt variable.""" + + logprobs: Optional[bool] + """Whether or not the user has enabled logit probabilities in the model parameters.""" + + max_output_tokens: Optional[int] + """Value of the maximum number of tokens generated set when the dataset was saved.""" + + note: Optional[str] + """User-created prompt note. Note size limit is 2KB.""" + + prompt_api_schema: Optional[SchemaPromptApiSchemaDict] + """The API schema of the prompt to support both UI and SDK usages.""" + + prompt_type: Optional[str] + """Type of the prompt dataset.""" + + seed_enabled: Optional[bool] + """Seeding enables model to return a deterministic response on a best effort basis. Determinism isn't guaranteed. This field determines whether or not seeding is enabled.""" + + seed_value: Optional[int] + """The actual value of the seed.""" + + stop_sequences: Optional[list[str]] + """Customized stop sequences.""" + + system_instruction: Optional[str] + """The content of the prompt dataset system instruction.""" + + system_instruction_gcs_uri: Optional[str] + """The Google Cloud Storage URI that stores the system instruction, starting with gs://.""" + + temperature: Optional[float] + """Temperature value used for sampling set when the dataset was saved. This value is used to tune the degree of randomness.""" + + text: Optional[str] + """The content of the prompt dataset.""" + + top_k: Optional[int] + """Top K value set when the dataset was saved. This value determines how many candidates with highest probability from the vocab would be selected for each decoding step.""" + + top_p: Optional[float] + """Top P value set when the dataset was saved. Given topK tokens for decoding, top candidates will be selected until the sum of their probabilities is topP.""" + + +SchemaTextPromptDatasetMetadataOrDict = Union[ + SchemaTextPromptDatasetMetadata, SchemaTextPromptDatasetMetadataDict +] + + +class _CreateDatasetParameters(_common.BaseModel): + """Parameters for creating a dataset resource to store prompts.""" + + config: Optional[CreateDatasetConfig] = Field(default=None, description="""""") + name: Optional[str] = Field(default=None, description="""""") + display_name: Optional[str] = Field(default=None, description="""""") + metadata_schema_uri: Optional[str] = Field(default=None, description="""""") + metadata: Optional[SchemaTextPromptDatasetMetadata] = Field( + default=None, description="""""" + ) + description: Optional[str] = Field(default=None, description="""""") + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( + default=None, description="""""" + ) + model_reference: Optional[str] = Field(default=None, description="""""") + + +class _CreateDatasetParametersDict(TypedDict, total=False): + """Parameters for creating a dataset resource to store prompts.""" + + config: Optional[CreateDatasetConfigDict] + """""" + + name: Optional[str] + """""" + + display_name: Optional[str] + """""" + + metadata_schema_uri: Optional[str] + """""" + + metadata: Optional[SchemaTextPromptDatasetMetadataDict] + """""" + + description: Optional[str] + """""" + + encryption_spec: Optional[genai_types.EncryptionSpecDict] + """""" + + model_reference: Optional[str] + """""" + + +_CreateDatasetParametersOrDict = Union[ + _CreateDatasetParameters, _CreateDatasetParametersDict +] + + +class DatasetOperation(_common.BaseModel): + """Represents the create dataset operation.""" + + name: Optional[str] = Field( + default=None, + description="""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""", + ) + metadata: Optional[dict[str, Any]] = Field( + default=None, + description="""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""", + ) + done: Optional[bool] = Field( + default=None, + description="""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""", + ) + error: Optional[dict[str, Any]] = Field( + default=None, + description="""The error result of the operation in case of failure or cancellation.""", + ) + response: Optional[dict[str, Any]] = Field( + default=None, description="""The result of the operation.""" + ) + + +class DatasetOperationDict(TypedDict, total=False): + """Represents the create dataset operation.""" + + name: Optional[str] + """The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""" + + metadata: Optional[dict[str, Any]] + """Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""" + + done: Optional[bool] + """If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""" + + error: Optional[dict[str, Any]] + """The error result of the operation in case of failure or cancellation.""" + + response: Optional[dict[str, Any]] + """The result of the operation.""" + + +DatasetOperationOrDict = Union[DatasetOperation, DatasetOperationDict] + + +class CreateDatasetVersionConfig(_common.BaseModel): + """Config for creating a dataset version resource to store prompts.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + + +class CreateDatasetVersionConfigDict(TypedDict, total=False): + """Config for creating a dataset version resource to store prompts.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + +CreateDatasetVersionConfigOrDict = Union[ + CreateDatasetVersionConfig, CreateDatasetVersionConfigDict +] + + +class DatasetVersion(_common.BaseModel): + """Represents a dataset version resource to store prompts.""" + + metadata: Optional[SchemaTextPromptDatasetMetadata] = Field( + default=None, + description="""Required. Output only. Additional information about the DatasetVersion.""", + ) + big_query_dataset_name: Optional[str] = Field( + default=None, + description="""Output only. Name of the associated BigQuery dataset.""", + ) + create_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when this DatasetVersion was created.""", + ) + display_name: Optional[str] = Field( + default=None, + description="""The user-defined name of the DatasetVersion. The name can be up to 128 characters long and can consist of any UTF-8 characters.""", + ) + etag: Optional[str] = Field( + default=None, + description="""Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""", + ) + model_reference: Optional[str] = Field( + default=None, + description="""Output only. Reference to the public base model last used by the dataset version. Only set for prompt dataset versions.""", + ) + name: Optional[str] = Field( + default=None, + description="""Output only. Identifier. The resource name of the DatasetVersion. Format: `projects/{project}/locations/{location}/datasets/{dataset}/datasetVersions/{dataset_version}`""", + ) + satisfies_pzi: Optional[bool] = Field( + default=None, description="""Output only. Reserved for future use.""" + ) + satisfies_pzs: Optional[bool] = Field( + default=None, description="""Output only. Reserved for future use.""" + ) + update_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when this DatasetVersion was last updated.""", + ) + + +class DatasetVersionDict(TypedDict, total=False): + """Represents a dataset version resource to store prompts.""" + + metadata: Optional[SchemaTextPromptDatasetMetadataDict] + """Required. Output only. Additional information about the DatasetVersion.""" + + big_query_dataset_name: Optional[str] + """Output only. Name of the associated BigQuery dataset.""" + + create_time: Optional[datetime.datetime] + """Output only. Timestamp when this DatasetVersion was created.""" + + display_name: Optional[str] + """The user-defined name of the DatasetVersion. The name can be up to 128 characters long and can consist of any UTF-8 characters.""" + + etag: Optional[str] + """Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""" + + model_reference: Optional[str] + """Output only. Reference to the public base model last used by the dataset version. Only set for prompt dataset versions.""" + + name: Optional[str] + """Output only. Identifier. The resource name of the DatasetVersion. Format: `projects/{project}/locations/{location}/datasets/{dataset}/datasetVersions/{dataset_version}`""" + + satisfies_pzi: Optional[bool] + """Output only. Reserved for future use.""" + + satisfies_pzs: Optional[bool] + """Output only. Reserved for future use.""" + + update_time: Optional[datetime.datetime] + """Output only. Timestamp when this DatasetVersion was last updated.""" + + +DatasetVersionOrDict = Union[DatasetVersion, DatasetVersionDict] + + +class _CreateDatasetVersionParameters(_common.BaseModel): + """Represents the create dataset version parameters.""" + + config: Optional[CreateDatasetVersionConfig] = Field( + default=None, description="""""" + ) + dataset_name: Optional[str] = Field(default=None, description="""""") + dataset_version: Optional[DatasetVersion] = Field(default=None, description="""""") + parent: Optional[str] = Field(default=None, description="""""") + display_name: Optional[str] = Field(default=None, description="""""") + + +class _CreateDatasetVersionParametersDict(TypedDict, total=False): + """Represents the create dataset version parameters.""" + + config: Optional[CreateDatasetVersionConfigDict] + """""" + + dataset_name: Optional[str] + """""" + + dataset_version: Optional[DatasetVersionDict] + """""" + + parent: Optional[str] + """""" + + display_name: Optional[str] + """""" + + +_CreateDatasetVersionParametersOrDict = Union[ + _CreateDatasetVersionParameters, _CreateDatasetVersionParametersDict +] + + +class _GetDatasetParameters(_common.BaseModel): + """Parameters for getting a dataset resource to store prompts.""" + + config: Optional[VertexBaseConfig] = Field(default=None, description="""""") + name: Optional[str] = Field(default=None, description="""""") + + +class _GetDatasetParametersDict(TypedDict, total=False): + """Parameters for getting a dataset resource to store prompts.""" + + config: Optional[VertexBaseConfigDict] + """""" + + name: Optional[str] + """""" + + +_GetDatasetParametersOrDict = Union[_GetDatasetParameters, _GetDatasetParametersDict] + + +class SavedQuery(_common.BaseModel): + """A SavedQuery is a view of the dataset. It references a subset of annotations by problem type and filters.""" + + annotation_filter: Optional[str] = Field( + default=None, + description="""Output only. Filters on the Annotations in the dataset.""", + ) + annotation_spec_count: Optional[int] = Field( + default=None, + description="""Output only. Number of AnnotationSpecs in the context of the SavedQuery.""", + ) + create_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when this SavedQuery was created.""", + ) + display_name: Optional[str] = Field( + default=None, + description="""Required. The user-defined name of the SavedQuery. The name can be up to 128 characters long and can consist of any UTF-8 characters.""", + ) + etag: Optional[str] = Field( + default=None, + description="""Used to perform a consistent read-modify-write update. If not set, a blind "overwrite" update happens.""", + ) + metadata: Optional[Any] = Field( + default=None, + description="""Some additional information about the SavedQuery.""", + ) + name: Optional[str] = Field( + default=None, description="""Output only. Resource name of the SavedQuery.""" + ) + problem_type: Optional[str] = Field( + default=None, + description="""Required. Problem type of the SavedQuery. Allowed values: * IMAGE_CLASSIFICATION_SINGLE_LABEL * IMAGE_CLASSIFICATION_MULTI_LABEL * IMAGE_BOUNDING_POLY * IMAGE_BOUNDING_BOX * TEXT_CLASSIFICATION_SINGLE_LABEL * TEXT_CLASSIFICATION_MULTI_LABEL * TEXT_EXTRACTION * TEXT_SENTIMENT * VIDEO_CLASSIFICATION * VIDEO_OBJECT_TRACKING""", + ) + support_automl_training: Optional[bool] = Field( + default=None, + description="""Output only. If the Annotations belonging to the SavedQuery can be used for AutoML training.""", + ) + update_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when SavedQuery was last updated.""", + ) + + +class SavedQueryDict(TypedDict, total=False): + """A SavedQuery is a view of the dataset. It references a subset of annotations by problem type and filters.""" + + annotation_filter: Optional[str] + """Output only. Filters on the Annotations in the dataset.""" + + annotation_spec_count: Optional[int] + """Output only. Number of AnnotationSpecs in the context of the SavedQuery.""" + + create_time: Optional[datetime.datetime] + """Output only. Timestamp when this SavedQuery was created.""" + + display_name: Optional[str] + """Required. The user-defined name of the SavedQuery. The name can be up to 128 characters long and can consist of any UTF-8 characters.""" + + etag: Optional[str] + """Used to perform a consistent read-modify-write update. If not set, a blind "overwrite" update happens.""" + + metadata: Optional[Any] + """Some additional information about the SavedQuery.""" + + name: Optional[str] + """Output only. Resource name of the SavedQuery.""" + + problem_type: Optional[str] + """Required. Problem type of the SavedQuery. Allowed values: * IMAGE_CLASSIFICATION_SINGLE_LABEL * IMAGE_CLASSIFICATION_MULTI_LABEL * IMAGE_BOUNDING_POLY * IMAGE_BOUNDING_BOX * TEXT_CLASSIFICATION_SINGLE_LABEL * TEXT_CLASSIFICATION_MULTI_LABEL * TEXT_EXTRACTION * TEXT_SENTIMENT * VIDEO_CLASSIFICATION * VIDEO_OBJECT_TRACKING""" + + support_automl_training: Optional[bool] + """Output only. If the Annotations belonging to the SavedQuery can be used for AutoML training.""" + + update_time: Optional[datetime.datetime] + """Output only. Timestamp when SavedQuery was last updated.""" + + +SavedQueryOrDict = Union[SavedQuery, SavedQueryDict] + + +class Dataset(_common.BaseModel): + """Represents a dataset resource to store prompts.""" + + metadata: Optional[SchemaTextPromptDatasetMetadata] = Field( + default=None, + description="""Required. Additional information about the Dataset.""", + ) + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( + default=None, + description="""Customer-managed encryption key spec for a Dataset. If set, this Dataset and all sub-resources of this Dataset will be secured by this key.""", + ) + create_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when this Dataset was created.""", + ) + data_item_count: Optional[int] = Field( + default=None, + description="""Output only. The number of DataItems in this Dataset. Only apply for non-structured Dataset.""", + ) + description: Optional[str] = Field( + default=None, description="""The description of the Dataset.""" + ) + display_name: Optional[str] = Field( default=None, - description="""Required. The name of the agent that sent the event, or user.""", + description="""Required. The user-defined name of the Dataset. The name can be up to 128 characters long and can consist of any UTF-8 characters.""", ) - error_code: Optional[str] = Field( + etag: Optional[str] = Field( default=None, - description="""Optional. Error code if the response is an error. Code varies by model.""", + description="""Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""", ) - error_message: Optional[str] = Field( + labels: Optional[dict[str, str]] = Field( default=None, - description="""Optional. Error message if the response is an error.""", + description="""The labels with user-defined metadata to organize your Datasets. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. No more than 64 user labels can be associated with one Dataset (System labels are excluded). See https://goo.gl/xmQnxf for more information and examples of labels. System reserved label keys are prefixed with "aiplatform.googleapis.com/" and are immutable. Following system labels exist for each Dataset: * "aiplatform.googleapis.com/dataset_metadata_schema": output only, its value is the metadata_schema's title.""", ) - event_metadata: Optional[EventMetadata] = Field( + metadata_artifact: Optional[str] = Field( default=None, - description="""Optional. Metadata relating to this event.""", + description="""Output only. The resource name of the Artifact that was created in MetadataStore when creating the Dataset. The Artifact resource name pattern is `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`.""", ) - invocation_id: Optional[str] = Field( + metadata_schema_uri: Optional[str] = Field( default=None, - description="""Required. The invocation id of the event, multiple events can have the same invocation id.""", + description="""Required. Points to a YAML file stored on Google Cloud Storage describing additional information about the Dataset. The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/metadata/.""", + ) + model_reference: Optional[str] = Field( + default=None, + description="""Optional. Reference to the public base model last used by the dataset. Only set for prompt datasets.""", ) name: Optional[str] = Field( default=None, - description="""Identifier. The resource name of the event. Format:`projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}/events/{event}`.""", + description="""Output only. Identifier. The resource name of the Dataset. Format: `projects/{project}/locations/{location}/datasets/{dataset}`""", ) - timestamp: Optional[datetime.datetime] = Field( + satisfies_pzi: Optional[bool] = Field( + default=None, description="""Output only. Reserved for future use.""" + ) + satisfies_pzs: Optional[bool] = Field( + default=None, description="""Output only. Reserved for future use.""" + ) + saved_queries: Optional[list[SavedQuery]] = Field( default=None, - description="""Required. Timestamp when the event was created on client side.""", + description="""All SavedQueries belong to the Dataset will be returned in List/Get Dataset response. The annotation_specs field will not be populated except for UI cases which will only use annotation_spec_count. In CreateDataset request, a SavedQuery is created together if this field is set, up to one SavedQuery can be set in CreateDatasetRequest. The SavedQuery should not contain any AnnotationSpec.""", + ) + update_time: Optional[datetime.datetime] = Field( + default=None, + description="""Output only. Timestamp when this Dataset was last updated.""", ) + model_config = ConfigDict(alias_generator=_camel_to_snake, populate_by_name=True) -class SessionEventDict(TypedDict, total=False): - """A session event.""" + @classmethod + def _from_response( + cls: typing.Type["Dataset"], + *, + response: dict[str, object], + kwargs: dict[str, object], + ) -> "Dataset": + """Converts a dictionary response into a Dataset object.""" + + # Some nested Dataset fields don't have converters, so we need to ensure camelCase fields from the API are converted to snake_case before calling _from_response. + # Instantiating a Dataset before calling _from_response ensures the model_config converting camel to snake is used + validated_dataset = Dataset.model_validate(response) + result = super()._from_response( + response=validated_dataset.model_dump(), kwargs=kwargs + ) + return result - content: Optional[genai_types.ContentDict] - """Optional. Content of the event provided by the author.""" - actions: Optional[EventActionsDict] - """Optional. Actions executed by the agent.""" +class DatasetDict(TypedDict, total=False): + """Represents a dataset resource to store prompts.""" - author: Optional[str] - """Required. The name of the agent that sent the event, or user.""" + metadata: Optional[SchemaTextPromptDatasetMetadataDict] + """Required. Additional information about the Dataset.""" - error_code: Optional[str] - """Optional. Error code if the response is an error. Code varies by model.""" + encryption_spec: Optional[genai_types.EncryptionSpecDict] + """Customer-managed encryption key spec for a Dataset. If set, this Dataset and all sub-resources of this Dataset will be secured by this key.""" - error_message: Optional[str] - """Optional. Error message if the response is an error.""" + create_time: Optional[datetime.datetime] + """Output only. Timestamp when this Dataset was created.""" - event_metadata: Optional[EventMetadataDict] - """Optional. Metadata relating to this event.""" + data_item_count: Optional[int] + """Output only. The number of DataItems in this Dataset. Only apply for non-structured Dataset.""" - invocation_id: Optional[str] - """Required. The invocation id of the event, multiple events can have the same invocation id.""" + description: Optional[str] + """The description of the Dataset.""" + + display_name: Optional[str] + """Required. The user-defined name of the Dataset. The name can be up to 128 characters long and can consist of any UTF-8 characters.""" + + etag: Optional[str] + """Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens.""" + + labels: Optional[dict[str, str]] + """The labels with user-defined metadata to organize your Datasets. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. No more than 64 user labels can be associated with one Dataset (System labels are excluded). See https://goo.gl/xmQnxf for more information and examples of labels. System reserved label keys are prefixed with "aiplatform.googleapis.com/" and are immutable. Following system labels exist for each Dataset: * "aiplatform.googleapis.com/dataset_metadata_schema": output only, its value is the metadata_schema's title.""" + + metadata_artifact: Optional[str] + """Output only. The resource name of the Artifact that was created in MetadataStore when creating the Dataset. The Artifact resource name pattern is `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`.""" + + metadata_schema_uri: Optional[str] + """Required. Points to a YAML file stored on Google Cloud Storage describing additional information about the Dataset. The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/metadata/.""" + + model_reference: Optional[str] + """Optional. Reference to the public base model last used by the dataset. Only set for prompt datasets.""" name: Optional[str] - """Identifier. The resource name of the event. Format:`projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}/sessions/{session}/events/{event}`.""" + """Output only. Identifier. The resource name of the Dataset. Format: `projects/{project}/locations/{location}/datasets/{dataset}`""" - timestamp: Optional[datetime.datetime] - """Required. Timestamp when the event was created on client side.""" + satisfies_pzi: Optional[bool] + """Output only. Reserved for future use.""" + satisfies_pzs: Optional[bool] + """Output only. Reserved for future use.""" -SessionEventOrDict = Union[SessionEvent, SessionEventDict] + saved_queries: Optional[list[SavedQueryDict]] + """All SavedQueries belong to the Dataset will be returned in List/Get Dataset response. The annotation_specs field will not be populated except for UI cases which will only use annotation_spec_count. In CreateDataset request, a SavedQuery is created together if this field is set, up to one SavedQuery can be set in CreateDatasetRequest. The SavedQuery should not contain any AnnotationSpec.""" + + update_time: Optional[datetime.datetime] + """Output only. Timestamp when this Dataset was last updated.""" -class ListAgentEngineSessionEventsResponse(_common.BaseModel): - """Response for listing agent engine session events.""" +DatasetOrDict = Union[Dataset, DatasetDict] - sdk_http_response: Optional[HttpResponse] = Field( - default=None, description="""Used to retain the full HTTP response.""" + +class _GetDatasetVersionParameters(_common.BaseModel): + """Parameters for getting a dataset resource to store prompts.""" + + config: Optional[VertexBaseConfig] = Field(default=None, description="""""") + dataset_id: Optional[str] = Field(default=None, description="""""") + dataset_version_id: Optional[str] = Field(default=None, description="""""") + + +class _GetDatasetVersionParametersDict(TypedDict, total=False): + """Parameters for getting a dataset resource to store prompts.""" + + config: Optional[VertexBaseConfigDict] + """""" + + dataset_id: Optional[str] + """""" + + dataset_version_id: Optional[str] + """""" + + +_GetDatasetVersionParametersOrDict = Union[ + _GetDatasetVersionParameters, _GetDatasetVersionParametersDict +] + + +class GetDatasetOperationConfig(_common.BaseModel): + """Config for getting a dataset version operation.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" ) - next_page_token: Optional[str] = Field(default=None, description="""""") - session_events: Optional[list[SessionEvent]] = Field( - default=None, description="""List of session events.""" + + +class GetDatasetOperationConfigDict(TypedDict, total=False): + """Config for getting a dataset version operation.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + +GetDatasetOperationConfigOrDict = Union[ + GetDatasetOperationConfig, GetDatasetOperationConfigDict +] + + +class _GetDatasetOperationParameters(_common.BaseModel): + """Parameters for getting a dataset resource to store prompts.""" + + config: Optional[GetDatasetOperationConfig] = Field( + default=None, description="""""" ) + dataset_id: Optional[str] = Field(default=None, description="""""") + operation_id: Optional[str] = Field(default=None, description="""""") -class ListAgentEngineSessionEventsResponseDict(TypedDict, total=False): - """Response for listing agent engine session events.""" +class _GetDatasetOperationParametersDict(TypedDict, total=False): + """Parameters for getting a dataset resource to store prompts.""" - sdk_http_response: Optional[HttpResponseDict] - """Used to retain the full HTTP response.""" + config: Optional[GetDatasetOperationConfigDict] + """""" - next_page_token: Optional[str] + dataset_id: Optional[str] """""" - session_events: Optional[list[SessionEventDict]] - """List of session events.""" + operation_id: Optional[str] + """""" -ListAgentEngineSessionEventsResponseOrDict = Union[ - ListAgentEngineSessionEventsResponse, - ListAgentEngineSessionEventsResponseDict, +_GetDatasetOperationParametersOrDict = Union[ + _GetDatasetOperationParameters, _GetDatasetOperationParametersDict ] @@ -7767,13 +8868,12 @@ def _get_default_evaluation_steps() -> dict[str, str]: """Returns the default evaluation steps for metric evaluation.""" return { "Step 1": ( - "Assess the response in aspects of all criteria provided." - " Provide assessment according to each criterion." + "Assess the response in aspects of all criteria provided. Provide" + " assessment according to each criterion." ), "Step 2": ( "Score based on the Rating Scores. Give a brief rationale to" - " explain your evaluation considering each individual" - " criterion." + " explain your evaluation considering each individual criterion." ), } @@ -7815,8 +8915,8 @@ def _prepare_fields_and_construct_text(cls, data: Any) -> Any: initialization. Args: - data: Input data for the model, either a dictionary or an existing - model instance. + data: Input data for the model, either a dictionary or an existing model + instance. Returns: Processed data dictionary with the 'text' field constructed. @@ -7826,14 +8926,14 @@ def _prepare_fields_and_construct_text(cls, data: Any) -> Any: if "text" in data: raise ValueError( - "The 'text' field is automatically constructed and should not" - " be provided manually." + "The 'text' field is automatically constructed and should not be" + " provided manually." ) if data.get("criteria") is None or data.get("rating_scores") is None: raise ValueError( - "Both 'criteria' and 'rating_scores' are required to construct" - " theLLM-based metric prompt template text." + "Both 'criteria' and 'rating_scores' are required to construct the" + "LLM-based metric prompt template text." ) instruction = data.get("instruction", cls._get_default_instruction()) @@ -7902,16 +9002,13 @@ class EvalRunInferenceConfig(_common.BaseModel): """Optional parameters for inference.""" dest: Optional[str] = Field( - default=None, - description="""The destination path for the inference results.""", + default=None, description="""The destination path for the inference results.""" ) prompt_template: Optional[Union[str, PromptTemplate]] = Field( - default=None, - description="""The prompt template to use for inference.""", + default=None, description="""The prompt template to use for inference.""" ) generate_content_config: Optional[genai_types.GenerateContentConfig] = Field( - default=None, - description="""The config for the generate content call.""", + default=None, description="""The config for the generate content call.""" ) @@ -7938,16 +9035,14 @@ class Message(_common.BaseModel): default=None, description="""Unique identifier for the message turn.""" ) content: Optional[genai_types.Content] = Field( - default=None, - description="""Content of the message, including function call.""", + default=None, description="""Content of the message, including function call.""" ) creation_timestamp: Optional[datetime.datetime] = Field( default=None, description="""Timestamp indicating when the message was created.""", ) author: Optional[str] = Field( - default=None, - description="""Name of the entity that produced the message.""", + default=None, description="""Name of the entity that produced the message.""" ) @@ -7974,8 +9069,7 @@ class AgentData(_common.BaseModel): """Container for all agent-specific data.""" tool_use_trajectory: Optional[list[Message]] = Field( - default=None, - description="""Tool use trajectory in chronological order.""", + default=None, description="""Tool use trajectory in chronological order.""" ) intermediate_responses: Optional[list[Message]] = Field( default=None, @@ -8026,8 +9120,7 @@ class EvalCase(_common.BaseModel): """A comprehensive representation of a GenAI interaction for evaluation.""" prompt: Optional[genai_types.Content] = Field( - default=None, - description="""The most recent user message (current input).""", + default=None, description="""The most recent user message (current input).""" ) responses: Optional[list[ResponseCandidate]] = Field( default=None, @@ -8049,8 +9142,7 @@ class EvalCase(_common.BaseModel): description="""Named groups of rubrics associated with this prompt. The key is a user-defined name for the rubric group.""", ) eval_case_id: Optional[str] = Field( - default=None, - description="""Unique identifier for the evaluation case.""", + default=None, description="""Unique identifier for the evaluation case.""" ) # Allow extra fields to support custom metric prompts and stay backward compatible. model_config = ConfigDict(frozen=True, extra="allow") @@ -8143,12 +9235,10 @@ class EvaluationDataset(_common.BaseModel): description="""The name of the candidate model or agent for this evaluation dataset.""", ) gcs_source: Optional[GcsSource] = Field( - default=None, - description="""The GCS source for the evaluation dataset.""", + default=None, description="""The GCS source for the evaluation dataset.""" ) bigquery_source: Optional[BigQuerySource] = Field( - default=None, - description="""The BigQuery source for the evaluation dataset.""", + default=None, description="""The BigQuery source for the evaluation dataset.""" ) @model_validator(mode="before") @@ -8157,12 +9247,64 @@ def _check_pandas_installed(cls, data: Any) -> Any: if isinstance(data, dict) and data.get("eval_dataset_df") is not None: if pd is None: logger.warning( - "Pandas is not installed, some evals features are not" - " available. Please install it with `pip install" + "Pandas is not installed, some evals features are not available." + " Please install it with `pip install" " google-cloud-aiplatform[evaluation]`." ) return data + @classmethod + def load_from_observability_eval_cases( + cls, cases: list["ObservabilityEvalCase"] + ) -> "EvaluationDataset": + """Fetches GenAI Observability data from GCS and parses into a DataFrame.""" + try: + import pandas as pd + from . import _evals_utils + + formats = [] + requests = [] + responses = [] + system_instructions = [] + + for case in cases: + gcs_utils = _evals_utils.GcsUtils( + case.api_client._api_client if case.api_client else None + ) + + # Associate "observability" data format for given sources + formats.append("observability") + + # Input source + request_data = gcs_utils.read_file_contents(case.input_src) + requests.append(request_data) + + # Output source + response_data = gcs_utils.read_file_contents(case.output_src) + responses.append(response_data) + + # System instruction source + system_instruction_data = "" + if case.system_instruction_src is not None: + system_instruction_data = gcs_utils.read_file_contents( + case.system_instruction_src + ) + system_instructions.append(system_instruction_data) + + eval_dataset_df = pd.DataFrame( + { + "format": formats, + "request": requests, + "response": responses, + "system_instruction": system_instructions, + } + ) + + except ImportError as e: + raise ImportError("Pandas DataFrame library is required.") from e + + return EvaluationDataset(eval_dataset_df=eval_dataset_df) + def show(self) -> None: """Shows the evaluation dataset.""" from . import _evals_visualization @@ -8330,8 +9472,7 @@ class AggregatedMetricResult(_common.BaseModel): default=None, description="""Number of valid cases in the dataset.""" ) num_cases_error: Optional[int] = Field( - default=None, - description="""Number of cases with errors in the dataset.""", + default=None, description="""Number of cases with errors in the dataset.""" ) mean_score: Optional[float] = Field( default=None, description="""Mean score of the metric.""" @@ -8385,8 +9526,7 @@ class EvaluationRunMetadata(_common.BaseModel): description="""Unique identifier for the evaluation dataset used for the evaluation run.""", ) creation_timestamp: Optional[datetime.datetime] = Field( - default=None, - description="""Creation timestamp of the evaluation run.""", + default=None, description="""Creation timestamp of the evaluation run.""" ) @@ -8437,13 +9577,18 @@ def show(self, candidate_names: Optional[List[str]] = None) -> None: Args: candidate_names: list of names for the evaluated candidates, used in - comparison reports. + comparison reports. """ from . import _evals_visualization _evals_visualization.display_evaluation_result(self, candidate_names) +PromptData = SchemaPromptSpecPromptMessage +PromptDataDict = SchemaPromptSpecPromptMessageDict +PromptDataOrDict = Union[PromptData, PromptDataDict] + + class EvaluationResultDict(TypedDict, total=False): """Result of an evaluation run for an evaluation dataset.""" @@ -8487,7 +9632,7 @@ class ContentMapContentsDict(TypedDict, total=False): class EvaluateMethodConfig(_common.BaseModel): """Optional parameters for the evaluate method.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) dataset_schema: Optional[Literal["GEMINI", "FLATTEN", "OPENAI"]] = Field( @@ -8497,15 +9642,14 @@ class EvaluateMethodConfig(_common.BaseModel): example in the dataset.""", ) dest: Optional[str] = Field( - default=None, - description="""The destination path for the evaluation results.""", + default=None, description="""The destination path for the evaluation results.""" ) class EvaluateMethodConfigDict(TypedDict, total=False): """Optional parameters for the evaluate method.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" dataset_schema: Optional[Literal["GEMINI", "FLATTEN", "OPENAI"]] @@ -8523,7 +9667,7 @@ class EvaluateMethodConfigDict(TypedDict, total=False): class EvaluateDatasetConfig(_common.BaseModel): """Config for evaluate instances.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) @@ -8531,7 +9675,7 @@ class EvaluateDatasetConfig(_common.BaseModel): class EvaluateDatasetConfigDict(TypedDict, total=False): """Config for evaluate instances.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" @@ -8620,6 +9764,45 @@ class EvaluateDatasetOperationDict(TypedDict, total=False): ] +class ObservabilityEvalCase(_common.BaseModel): + """A single evaluation case instance for data stored in GCP Observability.""" + + input_src: Optional[str] = Field( + default=None, + description="""String containing the GCS reference to the GenAI input content.""", + ) + output_src: Optional[str] = Field( + default=None, + description="""String containing the GCS reference to the GenAI response content.""", + ) + system_instruction_src: Optional[str] = Field( + default=None, + description="""An optional string containing the GCS reference to the GenAI system instruction.""", + ) + api_client: Optional[Any] = Field( + default=None, description="""The underlying API client.""" + ) + + +class ObservabilityEvalCaseDict(TypedDict, total=False): + """A single evaluation case instance for data stored in GCP Observability.""" + + input_src: Optional[str] + """String containing the GCS reference to the GenAI input content.""" + + output_src: Optional[str] + """String containing the GCS reference to the GenAI response content.""" + + system_instruction_src: Optional[str] + """An optional string containing the GCS reference to the GenAI system instruction.""" + + api_client: Optional[Any] + """The underlying API client.""" + + +ObservabilityEvalCaseOrDict = Union[ObservabilityEvalCase, ObservabilityEvalCaseDict] + + class RubricGroup(_common.BaseModel): """A group of rubrics, used for grouping rubrics based on a metric or a version.""" @@ -8698,12 +9881,12 @@ def delete( """Deletes the agent engine. Args: - force (bool): Optional. If set to True, child resources will also be - deleted. Otherwise, the request will fail with FAILED_PRECONDITION - error when the Agent Engine has undeleted child resources. Defaults - to False. - config (DeleteAgentEngineConfig): Optional. Additional configurations - for deleting the Agent Engine. + force (bool): + Optional. If set to True, child resources will also be deleted. + Otherwise, the request will fail with FAILED_PRECONDITION error when + the Agent Engine has undeleted child resources. Defaults to False. + config (DeleteAgentEngineConfig): + Optional. Additional configurations for deleting the Agent Engine. """ if not isinstance(self.api_resource, ReasoningEngine): raise ValueError("api_resource is not initialized.") @@ -8729,7 +9912,7 @@ class AgentEngineDict(TypedDict, total=False): class AgentEngineConfig(_common.BaseModel): """Config for agent engine methods.""" - http_options: Optional[HttpOptions] = Field( + http_options: Optional[genai_types.HttpOptions] = Field( default=None, description="""Used to override HTTP request options.""" ) staging_bucket: Optional[str] = Field( @@ -8773,6 +9956,12 @@ class AgentEngineConfig(_common.BaseModel): If it is a dictionary, the keys are the environment variable names, and the values are the corresponding values.""", ) + service_account: Optional[str] = Field( + default=None, + description="""The service account to be used for the Agent Engine. + + If not specified, the default Reasoning Engine P6SA service agent will be used.""", + ) context_spec: Optional[ReasoningEngineContextSpec] = Field( default=None, description="""The context spec to be used for the Agent Engine.""", @@ -8808,7 +9997,7 @@ class AgentEngineConfig(_common.BaseModel): Recommended value: 2 * cpu + 1. Defaults to 9. """, ) - encryption_spec: Optional[EncryptionSpec] = Field( + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( default=None, description="""The encryption spec to be used for the Agent Engine.""", ) @@ -8817,7 +10006,7 @@ class AgentEngineConfig(_common.BaseModel): class AgentEngineConfigDict(TypedDict, total=False): """Config for agent engine methods.""" - http_options: Optional[HttpOptionsDict] + http_options: Optional[genai_types.HttpOptionsDict] """Used to override HTTP request options.""" staging_bucket: Optional[str] @@ -8855,6 +10044,11 @@ class AgentEngineConfigDict(TypedDict, total=False): If it is a dictionary, the keys are the environment variable names, and the values are the corresponding values.""" + service_account: Optional[str] + """The service account to be used for the Agent Engine. + + If not specified, the default Reasoning Engine P6SA service agent will be used.""" + context_spec: Optional[ReasoningEngineContextSpecDict] """The context spec to be used for the Agent Engine.""" @@ -8884,8 +10078,249 @@ class AgentEngineConfigDict(TypedDict, total=False): Recommended value: 2 * cpu + 1. Defaults to 9. """ - encryption_spec: Optional[EncryptionSpecDict] + encryption_spec: Optional[genai_types.EncryptionSpecDict] """The encryption spec to be used for the Agent Engine.""" AgentEngineConfigOrDict = Union[AgentEngineConfig, AgentEngineConfigDict] + + +class Prompt(_common.BaseModel): + """Represents a prompt.""" + + prompt_data: Optional["PromptData"] = Field(default=None, description="""""") + _dataset: Optional["Dataset"] = PrivateAttr(default=None) + _dataset_version: Optional["DatasetVersion"] = PrivateAttr(default=None) + + @property + def dataset(self) -> "Dataset": + return self._dataset + + @property + def dataset_version(self) -> "DatasetVersion": + return self._dataset_version + + @property + def prompt_id(self) -> Optional[str]: + """Returns the ID associated with the prompt resource.""" + if self._dataset and self._dataset.name: + return self._dataset.name.split("/")[-1] + + @property + def version_id(self) -> Optional[str]: + """Returns the ID associated with the prompt version resource.""" + if self._dataset_version and self._dataset_version.name: + return self._dataset_version.name.split("/")[-1] + + def assemble_contents(self) -> list[genai_types.Content]: + """Transforms a Prompt object into a list with a single genai_types.Content object. + + This method replaces the variables in the prompt template with the values provided in prompt.prompt_data.variables. + If no variables are provided, prompt.prompt_data.contents is returned as is. Only single-turn prompts are supported. + + This can be used to call generate_content() in the Gen AI SDK. + + Example usage: + + my_prompt = types.Prompt( + prompt_data=types.PromptData( + model="gemini-2.0-flash-001", + contents=[ + genai_types.Content( + parts=[ + genai_types.Part(text="Hello {name}!"), + ], + ), + ], + variables=[ + { + "name": genai_types.Part(text="Alice"), + }, + ], + ), + ) + + from google import genai + + genai_client = genai.Client(vertexai=True, project="my-project", location="us-central1") + genai_client.models.generate_content( + model=my_prompt.prompt_data.model, + contents=my_prompt.assemble_contents(), + ) + + Returns: + A list with a single Content object that can be used to call + generate_content(). + """ + if not self.prompt_data or not self.prompt_data.contents: + return [] + + if not self.prompt_data.variables: + return self.prompt_data.contents + + if len(self.prompt_data.contents) > 1: + raise ValueError( + "Multiple contents are not supported. Use assemble_contents() for a prompt with a single Content item." + ) + + parts_to_process = self.prompt_data.contents[0].parts + if not isinstance(parts_to_process, list): + parts_to_process = [parts_to_process] + + has_placeholders = False + variable_regex = r"\{.*?\}" + for item in parts_to_process: + part = ( + item + if isinstance(item, genai_types.Part) + else genai_types.Part(text=str(item)) + ) + if part.text and re.search(variable_regex, part.text): + has_placeholders = True + break + + if not has_placeholders: + return [genai_types.Content(parts=parts_to_process)] + + all_rendered_parts: list[genai_types.Part] = [] + + for var_dict in self.prompt_data.variables: + for template_item in parts_to_process: + template_part = ( + template_item + if isinstance(template_item, genai_types.Part) + else genai_types.Part(text=str(template_item)) + ) + if template_part.text: + rendered_text = template_part.text + + for key, value in var_dict.items(): + placeholder = f"{{{key}}}" + replacement_text = None + + if isinstance(value, str): + replacement_text = value + elif isinstance(value, genai_types.Part): + if value.text: + replacement_text = value.text + else: + all_rendered_parts.append(value) + if ( + replacement_text is not None + and placeholder in rendered_text + ): + rendered_text = rendered_text.replace( + placeholder, replacement_text + ) + all_rendered_parts.append(genai_types.Part(text=rendered_text)) + else: + all_rendered_parts.append(template_part) + return [genai_types.Content(parts=all_rendered_parts, role="user")] + + +class PromptDict(TypedDict, total=False): + """Represents a prompt.""" + + prompt_data: Optional["PromptDataDict"] + """""" + + +PromptOrDict = Union[Prompt, PromptDict] + + +class SchemaPromptInstanceVariableValue(_common.BaseModel): + """Represents a prompt instance variable.""" + + part_list: Optional[SchemaPromptSpecPartList] = Field( + default=None, description="""The parts of the variable value.""" + ) + + +class SchemaPromptInstanceVariableValueDict(TypedDict, total=False): + """Represents a prompt instance variable.""" + + part_list: Optional[SchemaPromptSpecPartListDict] + """The parts of the variable value.""" + + +SchemaPromptInstanceVariableValueOrDict = Union[ + SchemaPromptInstanceVariableValue, SchemaPromptInstanceVariableValueDict +] + + +class CreatePromptConfig(_common.BaseModel): + """Config for creating a prompt version.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + prompt_id: Optional[str] = Field( + default=None, + description="""The dataset id of an existing prompt dataset to create the prompt version in. If not set, a new prompt dataset will be created.""", + ) + prompt_display_name: Optional[str] = Field( + default=None, + description="""The display name for the prompt. If not set, a default name with a timestamp will be used.""", + ) + version_display_name: Optional[str] = Field( + default=None, + description="""The display name for the prompt version. If not set, a default name with a timestamp will be used.""", + ) + timeout: Optional[int] = Field( + default=90, + description="""The timeout for the create_version request in seconds. If not set, the default timeout is 90 seconds.""", + ) + encryption_spec: Optional[genai_types.EncryptionSpec] = Field( + default=None, + description="""Customer-managed encryption key spec for a prompt dataset. If set, this prompt dataset and all sub-resources of this prompt dataset will be secured by this key.""", + ) + + +class CreatePromptConfigDict(TypedDict, total=False): + """Config for creating a prompt version.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + prompt_id: Optional[str] + """The dataset id of an existing prompt dataset to create the prompt version in. If not set, a new prompt dataset will be created.""" + + prompt_display_name: Optional[str] + """The display name for the prompt. If not set, a default name with a timestamp will be used.""" + + version_display_name: Optional[str] + """The display name for the prompt version. If not set, a default name with a timestamp will be used.""" + + timeout: Optional[int] + """The timeout for the create_version request in seconds. If not set, the default timeout is 90 seconds.""" + + encryption_spec: Optional[genai_types.EncryptionSpecDict] + """Customer-managed encryption key spec for a prompt dataset. If set, this prompt dataset and all sub-resources of this prompt dataset will be secured by this key.""" + + +CreatePromptConfigOrDict = Union[CreatePromptConfig, CreatePromptConfigDict] + + +class GetPromptConfig(_common.BaseModel): + """Config for getting a prompt.""" + + http_options: Optional[genai_types.HttpOptions] = Field( + default=None, description="""Used to override HTTP request options.""" + ) + version_id: Optional[str] = Field( + default=None, + description="""The version id of the prompt in the prompt dataset to get. For example, if the full prompt resource name is projects/123/locations/us-central1/datasets/456/datasetVersions/789, then the version id is '789'.""", + ) + + +class GetPromptConfigDict(TypedDict, total=False): + """Config for getting a prompt.""" + + http_options: Optional[genai_types.HttpOptionsDict] + """Used to override HTTP request options.""" + + version_id: Optional[str] + """The version id of the prompt in the prompt dataset to get. For example, if the full prompt resource name is projects/123/locations/us-central1/datasets/456/datasetVersions/789, then the version id is '789'.""" + + +GetPromptConfigOrDict = Union[GetPromptConfig, GetPromptConfigDict] diff --git a/vertexai/agent_engines/_agent_engines.py b/vertexai/agent_engines/_agent_engines.py index 1974a0185c..414de9efbe 100644 --- a/vertexai/agent_engines/_agent_engines.py +++ b/vertexai/agent_engines/_agent_engines.py @@ -16,6 +16,7 @@ import abc import inspect import io +import json import logging import os import sys @@ -37,8 +38,6 @@ Union, ) -import proto - from google.api_core import exceptions from google.cloud import storage from google.cloud.aiplatform import base @@ -47,6 +46,9 @@ from google.cloud.aiplatform_v1 import types as aip_types from google.cloud.aiplatform_v1.types import reasoning_engine_service from vertexai.agent_engines import _utils +import httpx +import proto + from google.protobuf import field_mask_pb2 @@ -60,6 +62,9 @@ _ASYNC_API_MODE = "async" _STREAM_API_MODE = "stream" _ASYNC_STREAM_API_MODE = "async_stream" +_BIDI_STREAM_API_MODE = "bidi_stream" +_A2A_EXTENSION_MODE = "a2a_extension" +_A2A_AGENT_CARD = "a2a_agent_card" _MODE_KEY_IN_SCHEMA = "api_mode" _METHOD_NAME_KEY_IN_SCHEMA = "name" _DEFAULT_METHOD_NAME = "query" @@ -111,6 +116,32 @@ except (ImportError, AttributeError): ADKAgent = None +try: + from a2a.types import ( + AgentCard, + TransportProtocol, + Message, + TaskIdParams, + TaskQueryParams, + ) + from a2a.client import ClientConfig, ClientFactory + + AgentCard = AgentCard + TransportProtocol = TransportProtocol + Message = Message + ClientConfig = ClientConfig + ClientFactory = ClientFactory + TaskIdParams = TaskIdParams + TaskQueryParams = TaskQueryParams +except (ImportError, AttributeError): + AgentCard = None + TransportProtocol = None + Message = None + ClientConfig = None + ClientFactory = None + TaskIdParams = None + TaskQueryParams = None + @typing.runtime_checkable class Queryable(Protocol): @@ -148,6 +179,15 @@ def stream_query(self, **kwargs) -> Iterable[Any]: """Stream responses to serve the user query.""" +@typing.runtime_checkable +class BidiStreamQueryable(Protocol): + """Protocol for Agent Engines that can stream requests and responses.""" + + @abc.abstractmethod + async def bidi_stream_query(self, **kwargs) -> AsyncIterable[Any]: + """Asynchronously stream requests and responses to serve the user query.""" + + @typing.runtime_checkable class Cloneable(Protocol): """Protocol for Agent Engines that can be cloned.""" @@ -170,6 +210,7 @@ def register_operations(self, **kwargs) -> Dict[str, Sequence[str]]: ADKAgent, AsyncQueryable, AsyncStreamQueryable, + BidiStreamQueryable, OperationRegistrable, Queryable, StreamQueryable, @@ -894,10 +935,12 @@ def _validate_agent_engine_or_raise( * a callable method named `query` * a callable method named `stream_query` * a callable method named `async_stream_query` + * a callable method named `bidi_stream_query` * a callable method named `register_operations` Args: agent_engine: The agent engine to be validated. + logger: The logger to use for logging. Returns: The validated agent engine. @@ -928,6 +971,9 @@ def _validate_agent_engine_or_raise( is_async_stream_queryable = isinstance( agent_engine, AsyncStreamQueryable ) and callable(agent_engine.async_stream_query) + is_bidi_stream_queryable = isinstance( + agent_engine, BidiStreamQueryable + ) and callable(agent_engine.bidi_stream_query) is_operation_registrable = isinstance( agent_engine, OperationRegistrable ) and callable(agent_engine.register_operations) @@ -938,11 +984,12 @@ def _validate_agent_engine_or_raise( or is_stream_queryable or is_operation_registrable or is_async_stream_queryable + or is_bidi_stream_queryable ): raise TypeError( "agent_engine has none of the following callable methods: " - "`query`, `async_query`, `stream_query`, `async_stream_query` or " - "`register_operations`." + "`query`, `async_query`, `stream_query`, `async_stream_query`, " + "`bidi_stream_query` or `register_operations`." ) if is_queryable: @@ -982,6 +1029,16 @@ def _validate_agent_engine_or_raise( "agent_engine.async_stream_query method." ) from err + if is_bidi_stream_queryable: + try: + inspect.signature(getattr(agent_engine, "bidi_stream_query")) + except ValueError as err: + raise ValueError( + "Invalid bidi_stream_query signature. This might be due to a " + " missing `self` argument in the " + "agent_engine.bidi_stream_query method." + ) from err + if is_operation_registrable: try: inspect.signature(getattr(agent_engine, "register_operations")) @@ -1521,6 +1578,147 @@ async def _method(self, **kwargs) -> AsyncIterable[Any]: return _method +def _wrap_bidi_stream_query_operation( + *, method_name: str +) -> Callable[..., AsyncIterable[Any]]: + """Wraps an Agent Engine method, creating an async callable for `bidi_stream_query` API. + + This function creates a callable object that executes the specified + Agent Engine method using the `bidi_stream_query` API. It handles the + creation of the API request and the processing of the API response. + + Args: + method_name: The name of the Agent Engine method to call. + + Returns: + A callable object that executes the method on the Agent Engine via + the `bidi_stream_query` API. + """ + + async def _method(self, **kwargs) -> AsyncIterable[Any]: + # Agent Engine bidi streaming query execution should use GenAI SDK Agent + # Engine live API client directly. + raise NotImplementedError( + f"{method_name} is not implemented, please use GenAI SDK Agent " + "Enginve live API client instead." + ) + + +def _wrap_a2a_operation(method_name: str, agent_card: str) -> Callable[..., list]: + """Wraps an Agent Engine method, creating a callable for A2A API. + + Args: + method_name: The name of the Agent Engine method to call. + agent_card: The agent card to use for the A2A API call. + Example: + {'additionalInterfaces': None, + 'capabilities': {'extensions': None, + 'pushNotifications': None, + 'stateTransitionHistory': None, + 'streaming': False}, + 'defaultInputModes': ['text'], + 'defaultOutputModes': ['text'], + 'description': ( + 'A helpful assistant agent that can answer questions.' + ), + 'documentationUrl': None, + 'iconUrl': None, + 'name': 'Q&A Agent', + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3.0', + 'provider': None, + 'security': None, + 'securitySchemes': None, + 'signatures': None, + 'skills': [{ + 'description': ( + 'A helpful assistant agent that can answer questions.' + ), + 'examples': ['Who is leading 2025 F1 Standings?', + 'Where can i find an active volcano?'], + 'id': 'question_answer', + 'inputModes': None, + 'name': 'Q&A Agent', + 'outputModes': None, + 'security': None, + 'tags': ['Question-Answer']}], + 'supportsAuthenticatedExtendedCard': True, + 'url': 'http://localhost:8080/', + 'version': '1.0.0'} + Returns: + A callable object that executes the method on the Agent Engine via + the A2A API. + """ + + async def _method(self, **kwargs) -> Any: + """Wraps an Agent Engine method, creating a callable for A2A API.""" + a2a_agent_card = AgentCard(**json.loads(agent_card)) + + # A2A + AE integration currently only supports Rest API. + if ( + a2a_agent_card.preferred_transport + and a2a_agent_card.preferred_transport != TransportProtocol.http_json + ): + raise ValueError( + "Only HTTP+JSON is supported for preferred transport on agent card " + ) + + # Set preferred transport to HTTP+JSON if not set. + if not hasattr(a2a_agent_card, "preferred_transport"): + a2a_agent_card.preferred_transport = TransportProtocol.http_json + + # AE cannot support streaming yet. Turn off streaming for now. + if a2a_agent_card.capabilities and a2a_agent_card.capabilities.streaming: + raise ValueError( + "Streaming is not supported in Agent Engine, please change " + "a2a_agent_card.capabilities.streaming to False." + ) + + if not hasattr(a2a_agent_card.capabilities, "streaming"): + a2a_agent_card.capabilities.streaming = False + + # agent_card is set on the class_methods before set_up is invoked. + # Ensure that the agent_card url is set correctly before the client is created. + a2a_agent_card.url = f"https://{initializer.global_config.api_endpoint}/v1beta1/%7Bself.resource_name%7D/a2a" + + # Using a2a client, inject the auth token from the global config. + config = ClientConfig( + supported_transports=[ + TransportProtocol.http_json, + ], + use_client_preference=True, + httpx_client=httpx.AsyncClient( + headers={ + "Authorization": ( + f"Bearer {initializer.global_config.credentials.token}" + ) + } + ), + ) + factory = ClientFactory(config) + client = factory.create(a2a_agent_card) + + # kokoro job uses python 3.9, replaced match with if else. + if method_name == "on_message_send": + response = client.send_message(Message(**kwargs)) + chunks = [] + async for chunk in response: + chunks.append(chunk) + return chunks + elif method_name == "on_get_task": + response = await client.get_task(TaskQueryParams(**kwargs)) + elif method_name == "on_cancel_task": + response = await client.cancel_task(TaskIdParams(**kwargs)) + elif method_name == "handle_authenticated_agent_card": + response = await client.get_card() + else: + raise ValueError(f"Unknown method name: {method_name}") + + return response + + return _method + + def _unregister_api_methods( obj: "AgentEngine", operation_schemas: Sequence[_utils.JsonDict] ): @@ -1596,6 +1794,8 @@ def _register_api_methods_or_raise( _ASYNC_API_MODE: _wrap_async_query_operation, _STREAM_API_MODE: _wrap_stream_query_operation, _ASYNC_STREAM_API_MODE: _wrap_async_stream_query_operation, + _BIDI_STREAM_API_MODE: _wrap_bidi_stream_query_operation, + _A2A_EXTENSION_MODE: _wrap_a2a_operation, } if isinstance(wrap_operation_fn, dict) and api_mode in wrap_operation_fn: # Override the default function with user-specified function if it exists. @@ -1612,7 +1812,11 @@ def _register_api_methods_or_raise( ) # Bind the method to the object. - method = _wrap_operation(method_name=method_name) + if api_mode == _A2A_EXTENSION_MODE: + agent_card = operation_schema.get(_A2A_AGENT_CARD) + method = _wrap_operation(method_name=method_name, agent_card=agent_card) + else: + method = _wrap_operation(method_name=method_name) method.__name__ = method_name method.__doc__ = method_description setattr(obj, method_name, types.MethodType(method, obj)) @@ -1669,6 +1873,13 @@ def _generate_class_methods_spec_or_raise( class_methods_spec = [] for mode, method_names in operations.items(): for method_name in method_names: + if mode == _BIDI_STREAM_API_MODE: + _LOGGER.warning( + "Bidi stream API mode is not supported yet in Vertex SDK, " + "please use the GenAI SDK instead. Skipping " + f"method {method_name}." + ) + continue if not hasattr(agent_engine, method_name): raise ValueError( f"Method `{method_name}` defined in `register_operations`" @@ -1684,6 +1895,11 @@ def _generate_class_methods_spec_or_raise( class_method = _utils.to_proto(schema_dict) class_method[_MODE_KEY_IN_SCHEMA] = mode + # A2A agent card is a special case, when running in A2A mode, + if hasattr(agent_engine, "agent_card"): + class_method[_A2A_AGENT_CARD] = getattr( + agent_engine, "agent_card" + ).model_dump_json() class_methods_spec.append(class_method) return class_methods_spec diff --git a/vertexai/agent_engines/_utils.py b/vertexai/agent_engines/_utils.py index 26f3980e70..f7c359c93d 100644 --- a/vertexai/agent_engines/_utils.py +++ b/vertexai/agent_engines/_utils.py @@ -71,9 +71,9 @@ _STDLIB_MODULE_NAMES: frozenset = frozenset() try: - _PACKAGE_DISTRIBUTIONS: Mapping[ - str, Sequence[str] - ] = importlib_metadata.packages_distributions() + _PACKAGE_DISTRIBUTIONS: Mapping[str, Sequence[str]] = ( + importlib_metadata.packages_distributions() + ) except AttributeError: _PACKAGE_DISTRIBUTIONS: Mapping[str, Sequence[str]] = {} @@ -501,6 +501,22 @@ def scan_requirements( return {module: importlib_metadata.version(module) for module in modules_found} +def _is_pydantic_serializable(param: inspect.Parameter) -> bool: + """Checks if the parameter is pydantic serializable.""" + + if param.annotation == inspect.Parameter.empty: + return True + + if isinstance(param.annotation, str): + return False + pydantic = _import_pydantic_or_raise() + try: + pydantic.TypeAdapter(param.annotation) + return True + except Exception: + return False + + def generate_schema( f: Callable[..., Any], *, @@ -560,6 +576,7 @@ def generate_schema( inspect.Parameter.KEYWORD_ONLY, inspect.Parameter.POSITIONAL_ONLY, ) + and _is_pydantic_serializable(param) } parameters = pydantic.create_model(f.__name__, **fields_dict).schema() # Postprocessing diff --git a/vertexai/agent_engines/templates/adk.py b/vertexai/agent_engines/templates/adk.py index 5d788a8bbc..e078c9a21a 100644 --- a/vertexai/agent_engines/templates/adk.py +++ b/vertexai/agent_engines/templates/adk.py @@ -492,7 +492,7 @@ def clone(self): """Returns a clone of the ADK application.""" import copy - return AdkApp( + return self.__class__( agent=copy.deepcopy(self._tmpl_attrs.get("agent")), enable_tracing=self._tmpl_attrs.get("enable_tracing"), app_name=self._tmpl_attrs.get("app_name"), diff --git a/vertexai/agent_engines/templates/langchain.py b/vertexai/agent_engines/templates/langchain.py index 717fa10396..49325b7480 100644 --- a/vertexai/agent_engines/templates/langchain.py +++ b/vertexai/agent_engines/templates/langchain.py @@ -148,9 +148,11 @@ def _default_runnable_builder( agent_executor = AgentExecutor( agent=prompt | model | output_parser, tools=[ - tool - if isinstance(tool, lc_tools.BaseTool) - else StructuredTool.from_function(tool) + ( + tool + if isinstance(tool, lc_tools.BaseTool) + else StructuredTool.from_function(tool) + ) for tool in tools if isinstance(tool, (Callable, lc_tools.BaseTool)) ], diff --git a/vertexai/caching/_caching.py b/vertexai/caching/_caching.py index aa3ecda64a..96ff9db7af 100644 --- a/vertexai/caching/_caching.py +++ b/vertexai/caching/_caching.py @@ -106,9 +106,9 @@ def _prepare_create_request( expire_time=expire_time, ttl=ttl, display_name=display_name, - encryption_spec=EncryptionSpec(kms_key_name=kms_key_name) - if kms_key_name - else None, + encryption_spec=( + EncryptionSpec(kms_key_name=kms_key_name) if kms_key_name else None + ), ), ) serialized_message_v1beta1 = type(request_v1beta1).serialize(request_v1beta1) diff --git a/vertexai/evaluation/_evaluation.py b/vertexai/evaluation/_evaluation.py index c4387d0855..432ffbc72b 100644 --- a/vertexai/evaluation/_evaluation.py +++ b/vertexai/evaluation/_evaluation.py @@ -223,9 +223,9 @@ def _compute_custom_metrics( for future in futures_list: metric_output = future.result() try: - row_dict[ - f"{custom_metric.name}/{constants.MetricResult.SCORE_KEY}" - ] = metric_output[custom_metric.name] + row_dict[f"{custom_metric.name}/{constants.MetricResult.SCORE_KEY}"] = ( + metric_output[custom_metric.name] + ) except KeyError: raise KeyError( f"Custom metric score `{custom_metric.name}` not found in" @@ -243,7 +243,10 @@ def _compute_custom_metrics( def _separate_custom_metrics( metrics: List[Union[str, metrics_base._Metric]], -) -> Tuple[List[Union[str, metrics_base._Metric]], List[metrics_base.CustomMetric],]: +) -> Tuple[ + List[Union[str, metrics_base._Metric]], + List[metrics_base.CustomMetric], +]: """Separates the metrics list into API and custom metrics.""" custom_metrics = [] api_metrics = [] @@ -498,9 +501,9 @@ def _run_model_inference( ) t2 = time.perf_counter() _LOGGER.info(f"Multithreaded Batch Inference took: {t2 - t1} seconds.") - evaluation_run_config.metric_column_mapping[ + evaluation_run_config.metric_column_mapping[response_column_name] = ( response_column_name - ] = response_column_name + ) else: raise ValueError( "Missing required input `prompt` column to start model inference." @@ -586,15 +589,15 @@ def _assemble_prompt_for_dataset( ) try: - evaluation_run_config.dataset[ - constants.Dataset.PROMPT_COLUMN - ] = evaluation_run_config.dataset.apply( - lambda row: str( - prompt_template.assemble( - **row[list(prompt_template.variables)].astype(str).to_dict(), - ) - ), - axis=1, + evaluation_run_config.dataset[constants.Dataset.PROMPT_COLUMN] = ( + evaluation_run_config.dataset.apply( + lambda row: str( + prompt_template.assemble( + **row[list(prompt_template.variables)].astype(str).to_dict(), + ) + ), + axis=1, + ) ) if ( constants.Dataset.PROMPT_COLUMN @@ -611,9 +614,9 @@ def _assemble_prompt_for_dataset( " parameter is provided. Please verify that you want to use" " the assembled `prompt` column for evaluation." ) - evaluation_run_config.metric_column_mapping[ + evaluation_run_config.metric_column_mapping[constants.Dataset.PROMPT_COLUMN] = ( constants.Dataset.PROMPT_COLUMN - ] = constants.Dataset.PROMPT_COLUMN + ) except Exception as e: raise ValueError( f"Failed to assemble prompt template: {e}. Please make sure all" diff --git a/vertexai/evaluation/eval_task.py b/vertexai/evaluation/eval_task.py index 2250440bb1..fac0ae3437 100644 --- a/vertexai/evaluation/eval_task.py +++ b/vertexai/evaluation/eval_task.py @@ -573,9 +573,9 @@ def _verify_and_set_response_column_name( """Verifies and sets the model response column names.""" if response_column_name: if response_column_name in self._dataset.columns: - self._metric_column_mapping[ - metric_column_mapping_key - ] = response_column_name + self._metric_column_mapping[metric_column_mapping_key] = ( + response_column_name + ) else: raise ValueError( f"(Baseline) Model response column {response_column_name} is not" diff --git a/vertexai/example_stores/_example_stores.py b/vertexai/example_stores/_example_stores.py index c9f249bc9d..788851e1c4 100644 --- a/vertexai/example_stores/_example_stores.py +++ b/vertexai/example_stores/_example_stores.py @@ -304,7 +304,7 @@ def _coerce_to_dict( _ExampleLikeOrDict, ExpectedContent, _ContentOrDict, - ] + ], ): if isinstance(obj, generative_models.Content): return obj.to_dict() @@ -388,7 +388,7 @@ def _coerce_to_dict( def _coerce_to_example( - example: Union[_ExampleLike, _ExampleLikeOrDict] + example: Union[_ExampleLike, _ExampleLikeOrDict], ) -> types.Example: if isinstance(example, types.ContentsExample): return types.Example( diff --git a/vertexai/generative_models/_generative_models.py b/vertexai/generative_models/_generative_models.py index e54d3c9e6a..11abb10f38 100644 --- a/vertexai/generative_models/_generative_models.py +++ b/vertexai/generative_models/_generative_models.py @@ -649,8 +649,7 @@ def generate_content( tool_config: Optional["ToolConfig"] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[False] = False, - ) -> "GenerationResponse": - ... + ) -> "GenerationResponse": ... @overload def generate_content( @@ -663,8 +662,7 @@ def generate_content( tool_config: Optional["ToolConfig"] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[True], - ) -> Iterable["GenerationResponse"]: - ... + ) -> Iterable["GenerationResponse"]: ... def generate_content( self, @@ -729,8 +727,7 @@ async def generate_content_async( tool_config: Optional["ToolConfig"] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[False] = False, - ) -> "GenerationResponse": - ... + ) -> "GenerationResponse": ... @overload async def generate_content_async( @@ -743,8 +740,7 @@ async def generate_content_async( tool_config: Optional["ToolConfig"] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[True] = True, - ) -> AsyncIterable["GenerationResponse"]: - ... + ) -> AsyncIterable["GenerationResponse"]: ... async def generate_content_async( self, @@ -1295,8 +1291,7 @@ def send_message( tools: Optional[List["Tool"]] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[False] = False, - ) -> "GenerationResponse": - ... + ) -> "GenerationResponse": ... @overload def send_message( @@ -1308,8 +1303,7 @@ def send_message( tools: Optional[List["Tool"]] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[True] = True, - ) -> Iterable["GenerationResponse"]: - ... + ) -> Iterable["GenerationResponse"]: ... def send_message( self, @@ -1369,8 +1363,7 @@ def send_message_async( tools: Optional[List["Tool"]] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[False] = False, - ) -> Awaitable["GenerationResponse"]: - ... + ) -> Awaitable["GenerationResponse"]: ... @overload def send_message_async( @@ -1382,8 +1375,7 @@ def send_message_async( tools: Optional[List["Tool"]] = None, labels: Optional[Dict[str, str]] = None, stream: Literal[True] = True, - ) -> Awaitable[AsyncIterable["GenerationResponse"]]: - ... + ) -> Awaitable[AsyncIterable["GenerationResponse"]]: ... def send_message_async( self, @@ -2974,9 +2966,11 @@ def __init__( ): """Initializes a Google Search Retrieval tool.""" self._raw_google_search_retrieval = gapic_tool_types.GoogleSearchRetrieval( - dynamic_retrieval_config=dynamic_retrieval_config._raw_dynamic_retrieval_config - if dynamic_retrieval_config - else None + dynamic_retrieval_config=( + dynamic_retrieval_config._raw_dynamic_retrieval_config + if dynamic_retrieval_config + else None + ) ) class Retrieval: diff --git a/vertexai/language_models/_distillation.py b/vertexai/language_models/_distillation.py index ae018f8a44..2fc8f691ae 100644 --- a/vertexai/language_models/_distillation.py +++ b/vertexai/language_models/_distillation.py @@ -106,20 +106,20 @@ def submit_distillation_pipeline_job( if evaluation_spec is not None: pipeline_arguments["evaluation_data_uri"] = evaluation_spec.evaluation_data pipeline_arguments["evaluation_interval"] = evaluation_spec.evaluation_interval - pipeline_arguments[ - "enable_early_stopping" - ] = evaluation_spec.enable_early_stopping - pipeline_arguments[ - "enable_checkpoint_selection" - ] = evaluation_spec.enable_checkpoint_selection + pipeline_arguments["enable_early_stopping"] = ( + evaluation_spec.enable_early_stopping + ) + pipeline_arguments["enable_checkpoint_selection"] = ( + evaluation_spec.enable_checkpoint_selection + ) pipeline_arguments["tensorboard_resource_id"] = evaluation_spec.tensorboard # pipeline_parameter_values["evaluation_output_root_dir"] = ... if accelerator_type is not None: pipeline_arguments["accelerator_type"] = accelerator_type if aiplatform_initializer.global_config.encryption_spec_key_name is not None: - pipeline_arguments[ - "encryption_spec_key_name" - ] = aiplatform_initializer.global_config.encryption_spec_key_name + pipeline_arguments["encryption_spec_key_name"] = ( + aiplatform_initializer.global_config.encryption_spec_key_name + ) if max_context_length is not None: pipeline_arguments["max_context_length"] = max_context_length if model_display_name is None: diff --git a/vertexai/language_models/_evaluatable_language_models.py b/vertexai/language_models/_evaluatable_language_models.py index da2d124da8..8c8e71f677 100644 --- a/vertexai/language_models/_evaluatable_language_models.py +++ b/vertexai/language_models/_evaluatable_language_models.py @@ -419,7 +419,7 @@ def _convert_metrics_dict_to_response_type( def _format_classification_metrics( - metrics: Dict[str, Any] + metrics: Dict[str, Any], ) -> EvaluationSlicedClassificationMetric: """Reformats classification metrics returned by the eval pipeline to make them more readable. diff --git a/vertexai/language_models/_language_models.py b/vertexai/language_models/_language_models.py index 47625d6ad2..79880b0937 100644 --- a/vertexai/language_models/_language_models.py +++ b/vertexai/language_models/_language_models.py @@ -312,9 +312,9 @@ def tune_model( if eval_spec.evaluation_data: if isinstance(eval_spec.evaluation_data, str): if eval_spec.evaluation_data.startswith("gs://"): - tuning_parameters[ - "evaluation_data_uri" - ] = eval_spec.evaluation_data + tuning_parameters["evaluation_data_uri"] = ( + eval_spec.evaluation_data + ) else: raise ValueError("evaluation_data should be a GCS URI") else: @@ -322,13 +322,13 @@ def tune_model( if eval_spec.evaluation_interval is not None: tuning_parameters["evaluation_interval"] = eval_spec.evaluation_interval if eval_spec.enable_early_stopping is not None: - tuning_parameters[ - "enable_early_stopping" - ] = eval_spec.enable_early_stopping + tuning_parameters["enable_early_stopping"] = ( + eval_spec.enable_early_stopping + ) if eval_spec.enable_checkpoint_selection is not None: - tuning_parameters[ - "enable_checkpoint_selection" - ] = eval_spec.enable_checkpoint_selection + tuning_parameters["enable_checkpoint_selection"] = ( + eval_spec.enable_checkpoint_selection + ) tensorboard_resource_id = _get_tensorboard_resource_id_from_evaluation_spec( eval_spec, tuning_job_location ) @@ -435,9 +435,9 @@ def _tune_model( } ) if aiplatform_initializer.global_config.encryption_spec_key_name: - tuning_parameters[ - "encryption_spec_key_name" - ] = aiplatform_initializer.global_config.encryption_spec_key_name + tuning_parameters["encryption_spec_key_name"] = ( + aiplatform_initializer.global_config.encryption_spec_key_name + ) if not model_info.tuning_pipeline_uri: raise RuntimeError(f"The {self._model_id} model does not support tuning") @@ -1671,7 +1671,9 @@ async def predict_streaming_async( ) prediction_service_async_client = self._endpoint._prediction_async_client - async for prediction_dict in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( + async for ( + prediction_dict + ) in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( prediction_service_async_client=prediction_service_async_client, endpoint_name=self._endpoint_name, instance=prediction_request.instance, @@ -1773,9 +1775,9 @@ def _create_text_generation_prediction_request( prediction_parameters["candidateCount"] = candidate_count if grounding_source is not None: - prediction_parameters[ - "groundingConfig" - ] = grounding_source._to_grounding_source_dict() + prediction_parameters["groundingConfig"] = ( + grounding_source._to_grounding_source_dict() + ) if logprobs is not None: prediction_parameters["logprobs"] = logprobs @@ -2056,9 +2058,11 @@ def send_message( response_obj = self._model.predict( prompt=new_history_text, - max_output_tokens=max_output_tokens - if max_output_tokens is not None - else self._max_output_tokens, + max_output_tokens=( + max_output_tokens + if max_output_tokens is not None + else self._max_output_tokens + ), temperature=temperature if temperature is not None else self._temperature, top_k=top_k if top_k is not None else self._top_k, top_p=top_p if top_p is not None else self._top_p, @@ -2831,9 +2835,9 @@ def _prepare_request( prediction_parameters["candidateCount"] = candidate_count if grounding_source is not None: - prediction_parameters[ - "groundingConfig" - ] = grounding_source._to_grounding_source_dict() + prediction_parameters["groundingConfig"] = ( + grounding_source._to_grounding_source_dict() + ) message_structs = [] for past_message in self._message_history: @@ -3169,7 +3173,9 @@ async def send_message_streaming_async( full_response_text = "" - async for prediction_dict in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( + async for ( + prediction_dict + ) in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( prediction_service_async_client=prediction_service_async_client, endpoint_name=self._model._endpoint_name, instance=prediction_request.instance, @@ -3672,7 +3678,9 @@ async def predict_streaming_async( ) prediction_service_async_client = self._endpoint._prediction_async_client - async for prediction_dict in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( + async for ( + prediction_dict + ) in _streaming_prediction.predict_stream_of_dicts_from_single_dict_async( prediction_service_async_client=prediction_service_async_client, endpoint_name=self._endpoint_name, instance=prediction_request.instance, diff --git a/vertexai/preview/evaluation/_evaluation.py b/vertexai/preview/evaluation/_evaluation.py index 54b2e51536..fbbc670332 100644 --- a/vertexai/preview/evaluation/_evaluation.py +++ b/vertexai/preview/evaluation/_evaluation.py @@ -262,9 +262,9 @@ def _compute_custom_metrics( for future in futures_list: metric_output = future.result() try: - row_dict[ - f"{custom_metric.name}/{constants.MetricResult.SCORE_KEY}" - ] = metric_output[custom_metric.name] + row_dict[f"{custom_metric.name}/{constants.MetricResult.SCORE_KEY}"] = ( + metric_output[custom_metric.name] + ) except KeyError: raise KeyError( f"Custom metric score `{custom_metric.name}` not found in" @@ -282,7 +282,10 @@ def _compute_custom_metrics( def _separate_custom_metrics( metrics: List[Union[str, metrics_base._Metric]], -) -> Tuple[List[Union[str, metrics_base._Metric]], List[metrics_base.CustomMetric],]: +) -> Tuple[ + List[Union[str, metrics_base._Metric]], + List[metrics_base.CustomMetric], +]: """Separates the metrics list into API and custom metrics.""" custom_metrics = [] api_metrics = [] @@ -419,9 +422,9 @@ def _run_model_inference( ) t2 = time.perf_counter() _LOGGER.info(f"Multithreaded Batch Inference took: {t2 - t1} seconds.") - evaluation_run_config.metric_column_mapping[ + evaluation_run_config.metric_column_mapping[response_column_name] = ( response_column_name - ] = response_column_name + ) else: raise ValueError( "Missing required input `prompt` column to start model inference." @@ -606,9 +609,9 @@ def _run_runnable_inference( evaluation_run_config.dataset = evaluation_run_config.dataset.assign( failure=failure_list ) - evaluation_run_config.metric_column_mapping[ + evaluation_run_config.metric_column_mapping[response_column_name] = ( response_column_name - ] = response_column_name + ) if trajectory_list: evaluation_run_config.dataset = evaluation_run_config.dataset.assign( predicted_trajectory=trajectory_list @@ -664,11 +667,11 @@ def _assemble_prompt_for_dataset( ) try: - evaluation_run_config.dataset[ - constants.Dataset.PROMPT_COLUMN - ] = evaluation_run_config.dataset.apply( - lambda row: _pre_eval_utils._assemble_prompt(row, prompt_template), - axis=1, + evaluation_run_config.dataset[constants.Dataset.PROMPT_COLUMN] = ( + evaluation_run_config.dataset.apply( + lambda row: _pre_eval_utils._assemble_prompt(row, prompt_template), + axis=1, + ) ) if ( constants.Dataset.PROMPT_COLUMN @@ -685,9 +688,9 @@ def _assemble_prompt_for_dataset( " parameter is provided. Please verify that you want to use" " the assembled `prompt` column for evaluation." ) - evaluation_run_config.metric_column_mapping[ + evaluation_run_config.metric_column_mapping[constants.Dataset.PROMPT_COLUMN] = ( constants.Dataset.PROMPT_COLUMN - ] = constants.Dataset.PROMPT_COLUMN + ) except Exception as e: raise ValueError( f"Failed to assemble prompt template: {e}. Please make sure all" @@ -1007,7 +1010,7 @@ def _convert_metric_prompt_template_example(metrics): def _get_rubric_metric_with_idx( - metrics: List[Union[str, metrics_base._Metric]] + metrics: List[Union[str, metrics_base._Metric]], ) -> Optional[Tuple[rubric_based_metric.RubricBasedMetric, int]]: """Gets the rubric metric with its index in the metrics list.""" for i, metric in enumerate(metrics): @@ -1016,6 +1019,24 @@ def _get_rubric_metric_with_idx( return None +def _safe_deepcopy_rubric_generation_config( + generation_config: metrics_base.RubricGenerationConfig, +) -> metrics_base.RubricGenerationConfig: + """Safely deepcopies the rubric generation config of RubricBasedMetric.""" + copied_generation_config = None + + if generation_config: + generation_config_model = generation_config.model + # Cannot deepcopy instance of GenerativeModel due to its cygrpc.Channel + generation_config.model = None + copied_generation_config = copy.deepcopy(generation_config) + + if generation_config_model: + copied_generation_config.model = generation_config_model + + return copied_generation_config + + def evaluate( dataset: "pd.DataFrame", metrics: List[Union[str, metrics_base._Metric]], @@ -1092,7 +1113,9 @@ def evaluate( elif isinstance(metric, rubric_based_metric.RubricBasedMetric): copied_metrics.append( rubric_based_metric.RubricBasedMetric( - generation_config=copy.deepcopy(metric.generation_config), + generation_config=_safe_deepcopy_rubric_generation_config( + metric.generation_config, + ), critique_metric=copy.deepcopy(metric.critique_metric), ) ) diff --git a/vertexai/preview/evaluation/eval_task.py b/vertexai/preview/evaluation/eval_task.py index 90f01de9d9..8065d5994c 100644 --- a/vertexai/preview/evaluation/eval_task.py +++ b/vertexai/preview/evaluation/eval_task.py @@ -611,9 +611,9 @@ def _verify_and_set_response_column_name( """Verifies and sets the model response column names.""" if response_column_name: if response_column_name in self._dataset.columns: - self._metric_column_mapping[ - metric_column_mapping_key - ] = response_column_name + self._metric_column_mapping[metric_column_mapping_key] = ( + response_column_name + ) else: raise ValueError( f"(Baseline) Model response column {response_column_name} is not" diff --git a/vertexai/preview/evaluation/metrics/_instance_evaluation.py b/vertexai/preview/evaluation/metrics/_instance_evaluation.py index 83738b5b76..2a4420c208 100644 --- a/vertexai/preview/evaluation/metrics/_instance_evaluation.py +++ b/vertexai/preview/evaluation/metrics/_instance_evaluation.py @@ -257,10 +257,10 @@ def build_request( _default_templates.PAIRWISE_MULTIMODAL_UNDERSTANDING_RUBRIC_CRITIQUE_TEMPLATE, _default_templates.PAIRWISE_TEXT_QUALITY_RUBRIC_CRITIQUE_TEMPLATE, ): - model_based_metric_instance_input[ - constants.Dataset.RUBRICS_COLUMN - ] = _format_rubrics( - model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] + model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] = ( + _format_rubrics( + model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] + ) ) if ( constants.Dataset.RUBRICS_COLUMN in model_based_metric_instance_input @@ -269,10 +269,10 @@ def build_request( List, ) ): - model_based_metric_instance_input[ - constants.Dataset.RUBRICS_COLUMN - ] = "\n".join( - model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] + model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] = ( + "\n".join( + model_based_metric_instance_input[constants.Dataset.RUBRICS_COLUMN] + ) ) if metric_name == constants.Metric.EXACT_MATCH: diff --git a/vertexai/preview/rag/utils/_gapic_utils.py b/vertexai/preview/rag/utils/_gapic_utils.py index 3501142c68..4730c37b95 100644 --- a/vertexai/preview/rag/utils/_gapic_utils.py +++ b/vertexai/preview/rag/utils/_gapic_utils.py @@ -437,7 +437,7 @@ def convert_path_to_resource_id( def convert_source_for_rag_import( - source: Union[SlackChannelsSource, JiraSource, SharePointSources] + source: Union[SlackChannelsSource, JiraSource, SharePointSources], ) -> Union[GapicSlackSource, GapicJiraSource]: """Converts a SlackChannelsSource or JiraSource to a GapicSlackSource or GapicJiraSource.""" if isinstance(source, SlackChannelsSource): diff --git a/vertexai/preview/reasoning_engines/__init__.py b/vertexai/preview/reasoning_engines/__init__.py index 66cabdb2c3..327a48d662 100644 --- a/vertexai/preview/reasoning_engines/__init__.py +++ b/vertexai/preview/reasoning_engines/__init__.py @@ -15,11 +15,14 @@ """Classes for working with reasoning engines.""" # We just want to re-export certain classes -# pylint: disable=g-multiple-import,g-importing-member +# pylint: disable=g-multiple-import,g-importing-member, g-bad-import-order from vertexai.reasoning_engines._reasoning_engines import ( Queryable, ReasoningEngine, ) +from vertexai.preview.reasoning_engines.templates.a2a import ( + A2aAgent, +) from vertexai.preview.reasoning_engines.templates.adk import ( AdkApp, ) @@ -37,6 +40,7 @@ ) __all__ = ( + "A2aAgent", "AdkApp", "AG2Agent", "LangchainAgent", diff --git a/vertexai/preview/reasoning_engines/templates/a2a.py b/vertexai/preview/reasoning_engines/templates/a2a.py new file mode 100644 index 0000000000..48e59cd55e --- /dev/null +++ b/vertexai/preview/reasoning_engines/templates/a2a.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +from typing import Any, Callable, Dict, List, Mapping, Optional, TYPE_CHECKING + + +if TYPE_CHECKING: + try: + from a2a.server.request_handlers import RequestHandler + from a2a.server.tasks import TaskStore + from a2a.types import AgentCard, AgentSkill + from a2a.server.agent_execution import AgentExecutor + from a2a.server.context import ServerCallContext + + RequestHandler = RequestHandler + TaskStore = TaskStore + AgentCard = AgentCard + AgentSkill = AgentSkill + AgentExecutor = AgentExecutor + ServerCallContext = ServerCallContext + except (ImportError, AttributeError): + RequestHandler = Any + TaskStore = Any + AgentCard = Any + AgentSkill = Any + AgentExecutor = Any + ServerCallContext = Any + + try: + from fastapi import Request + + Request = Request + except (ImportError, AttributeError): + Request = Any + + +def create_agent_card( + agent_name: Optional[str] = None, + description: Optional[str] = None, + skills: Optional[List["AgentSkill"]] = None, + agent_card: Optional[Dict[str, Any]] = None, +) -> "AgentCard": + """Creates an AgentCard object. + + The function can be called in two ways: + 1. By providing the individual parameters: agent_name, description, and skills. + 2. By providing a single dictionary containing all the data. + + If a dictionary is provided, the other parameters are ignored. + + Args: + agent_name (Optional[str]): The name of the agent. + description (Optional[str]): A description of the agent. + skills (Optional[List[AgentSkill]]): A list of AgentSkills. + agent_card (Optional[Dict[str, Any]]): Agent Card as a dictionary. + + Returns: + AgentCard: A fully constructed AgentCard object. + + Raises: + ValueError: If neither a dictionary nor the required parameters are provided. + """ + # pylint: disable=g-import-not-at-top + from a2a.types import AgentCard, AgentCapabilities, TransportProtocol + + # Check if a dictionary was provided. + if agent_card: + return AgentCard(**agent_card) + + # If no dictionary, use the individual parameters. + elif agent_name and description and skills: + return AgentCard( + name=agent_name, + description=description, + url="http://localhost:9999/", + version="1.0.0", + default_input_modes=["text"], + default_output_modes=["text"], + # Agent Engine does not support streaming yet + capabilities=AgentCapabilities(streaming=False), + skills=skills, + preferred_transport=TransportProtocol.http_json, # Http Only. + supports_authenticated_extended_card=True, + ) + + # Raise an error if insufficient data is provided. + else: + raise ValueError( + "Please provide either an agent_card or all of the required " + "parameters (agent_name, description, and skills)." + ) + + +def default_a2a_agent() -> "A2aAgent": + """Creates a default A2aAgent instance.""" + # pylint: disable=g-import-not-at-top + from a2a.server.agent_execution import AgentExecutor, RequestContext + from a2a.types import AgentSkill + from a2a.server.events import EventQueue + from a2a.utils import new_agent_text_message + + skill = AgentSkill( + id="hello_world", + name="Returns hello world", + description="just returns hello world", + tags=["hello world"], + examples=["hi", "hello world"], + ) + agent_card = create_agent_card( + agent_name="Hello World Agent", + description="Just a hello world agent", + skills=[skill], + ) + + class HelloWorldAgentExecutor(AgentExecutor): + """Hello World Agent Executor.""" + + def get_agent_response(self) -> str: + return "Hello World" + + async def execute( + self, + context: RequestContext, + event_queue: EventQueue, + ) -> None: + result = self.get_agent_response() + await event_queue.enqueue_event(new_agent_text_message(result)) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + raise Exception("cancel not supported") + + return A2aAgent( + agent_card=agent_card, + agent_executor_builder=HelloWorldAgentExecutor, + ) + + +class A2aAgent: + """A class to initialize and set up an Agent-to-Agent application.""" + + # TODO: Add instrumentation for the A2A agent. + def __init__( + self, + *, + agent_card: "AgentCard", + task_store_builder: Callable[..., "TaskStore"] = None, + task_store_kwargs: Optional[Mapping[str, Any]] = None, + agent_executor_kwargs: Optional[Mapping[str, Any]] = None, + agent_executor_builder: Optional[Callable[..., "AgentExecutor"]] = None, + request_handler_kwargs: Optional[Mapping[str, Any]] = None, + request_handler_builder: Optional[Callable[..., "RequestHandler"]] = None, + extended_agent_card: "AgentCard" = None, + ): + """Initializes the A2A agent.""" + # pylint: disable=g-import-not-at-top + from google.cloud.aiplatform import initializer + from a2a.types import TransportProtocol + + if ( + agent_card.preferred_transport + and agent_card.preferred_transport != TransportProtocol.http_json + ): + raise ValueError( + "Only HTTP+JSON is supported for preferred transport on agent card " + ) + if agent_card.capabilities and agent_card.capabilities.streaming: + raise ValueError("Streaming is not supported by Agent Engine") + + self._tmpl_attrs: dict[str, Any] = { + "project": initializer.global_config.project, + "location": initializer.global_config.location, + "agent_card": agent_card, + "agent_executor": None, + "agent_executor_kwargs": agent_executor_kwargs or {}, + "agent_executor_builder": agent_executor_builder, + "task_store": None, + "task_store_kwargs": task_store_kwargs or {}, + "task_store_builder": task_store_builder, + "request_handler": None, + "request_handler_kwargs": request_handler_kwargs or {}, + "request_handler_builder": request_handler_builder, + "extended_agent_card": extended_agent_card, + } + self.agent_card = agent_card + self.a2a_rest_adapter = None + self.request_handler = None + self.rest_handler = None + self.task_store = None + self.agent_executor = None + + def clone(self) -> "A2aAgent": + """Clones the A2A agent.""" + import copy + + return A2aAgent( + agent_card=copy.deepcopy(self.agent_card), + task_store_builder=self._tmpl_attrs.get("task_store_builder"), + task_store_kwargs=self._tmpl_attrs.get("task_store_kwargs"), + agent_executor_kwargs=self._tmpl_attrs.get("agent_executor_kwargs"), + agent_executor_builder=self._tmpl_attrs.get("agent_executor_builder"), + request_handler_kwargs=self._tmpl_attrs.get("request_handler_kwargs"), + request_handler_builder=self._tmpl_attrs.get("request_handler_builder"), + extended_agent_card=self._tmpl_attrs.get("extended_agent_card"), + ) + + def set_up(self): + """Sets up the A2A application.""" + # pylint: disable=g-import-not-at-top + from a2a.server.apps.rest.rest_adapter import RESTAdapter + from a2a.server.request_handlers.rest_handler import RESTHandler + from a2a.server.request_handlers import DefaultRequestHandler + from a2a.server.tasks import InMemoryTaskStore + + os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "1" + project = self._tmpl_attrs.get("project") + os.environ["GOOGLE_CLOUD_PROJECT"] = project + location = self._tmpl_attrs.get("location") + os.environ["GOOGLE_CLOUD_LOCATION"] = location + agent_engine_id = os.getenv("GOOGLE_CLOUD_AGENT_ENGINE_ID", "test-agent-engine") + version = "v1beta1" + + self.agent_card.url = f"https://{location}-aiplatform.googleapis.com/%7Bversion%7D/projects/%7Bproject%7D/locations/%7Blocation%7D/reasoningEngines/%7Bagent_engine_id%7D/a2a" + self._tmpl_attrs["agent_card"] = self.agent_card + + # Create the agent executor if a builder is provided. + agent_executor_builder = self._tmpl_attrs.get("agent_executor_builder") + if agent_executor_builder: + self._tmpl_attrs["agent_executor"] = agent_executor_builder( + **self._tmpl_attrs.get("agent_executor_kwargs") + ) + self.agent_executor = self._tmpl_attrs.get("agent_executor") + + # Create the task store if a builder is provided. + task_store_builder = self._tmpl_attrs.get("task_store_builder") + if task_store_builder: + self.task_store = task_store_builder( + **self._tmpl_attrs.get("task_store_kwargs") + ) + else: + # Use the default task store if not provided. This could potentially + # lead to unexpected behavior if the agent is running on + # multiple instances. + self.task_store = InMemoryTaskStore() + + self._tmpl_attrs["task_store"] = self.task_store + + # Create the request handler if a builder is provided. + request_handler_builder = self._tmpl_attrs.get("request_handler_builder") + if request_handler_builder: + self.request_handler = request_handler_builder( + **self._tmpl_attrs.get("request_handler_kwargs") + ) + else: + # Use the default request handler if not provided. + self.request_handler = DefaultRequestHandler( + agent_executor=self._tmpl_attrs.get("agent_executor"), + task_store=self.task_store, + ) + + self._tmpl_attrs["request_handler"] = self.request_handler + + # a2a_rest_adapter is used to register the A2A API routes in the + # Reasoning Engine API router. + self.a2a_rest_adapter = RESTAdapter( + agent_card=self.agent_card, + http_handler=self._tmpl_attrs.get("request_handler"), + extended_agent_card=self._tmpl_attrs.get("extended_agent_card"), + ) + + # rest_handler is used to handle the A2A API requests. + self.rest_handler = RESTHandler( + agent_card=self.agent_card, + request_handler=self._tmpl_attrs.get("request_handler"), + ) + + async def on_message_send( + self, + request: "Request", + context: "ServerCallContext", + ) -> dict[str, Any]: + return await self.rest_handler.on_message_send(request, context) + + async def on_cancel_task( + self, + request: "Request", + context: "ServerCallContext", + ) -> dict[str, Any]: + return await self.rest_handler.on_cancel_task(request, context) + + async def on_get_task( + self, + request: "Request", + context: "ServerCallContext", + ) -> dict[str, Any]: + return await self.rest_handler.on_get_task(request, context) + + async def handle_authenticated_agent_card( + self, + request: "Request", + context: "ServerCallContext", + ) -> dict[str, Any]: + return await self.a2a_rest_adapter.handle_authenticated_agent_card( + request, context + ) + + def register_operations(self) -> Dict[str, List[str]]: + """Registers the operations of the A2A Agent.""" + routes = { + "a2a_extension": [ + "on_message_send", + "on_get_task", + "on_cancel_task", + ] + } + if self.agent_card.supports_authenticated_extended_card: + routes["a2a_extension"].append("handle_authenticated_agent_card") + return routes diff --git a/vertexai/preview/reasoning_engines/templates/adk.py b/vertexai/preview/reasoning_engines/templates/adk.py index 37a000c234..b16e8f48fa 100644 --- a/vertexai/preview/reasoning_engines/templates/adk.py +++ b/vertexai/preview/reasoning_engines/templates/adk.py @@ -794,6 +794,91 @@ def _asyncio_thread_main(): finally: thread.join() + async def bidi_stream_query( + self, + request_queue: Any, + ) -> AsyncIterable[Any]: + """Bidi streaming query the ADK application. + + Args: + request_queue: + The queue of requests to stream responses for, with the type of + asyncio.Queue[Any]. + + Raises: + TypeError: If the request_queue is not an asyncio.Queue instance. + ValueError: If the first request does not have a user_id. + ValidationError: If failed to convert to LiveRequest. + + Yields: + The stream responses of querying the ADK application. + """ + from google.adk.agents.live_request_queue import LiveRequest + from google.adk.agents.live_request_queue import LiveRequestQueue + from vertexai.agent_engines import _utils + + # Manual type check needed as Pydantic doesn't support asyncio.Queue. + if not isinstance(request_queue, asyncio.Queue): + raise TypeError("request_queue must be an asyncio.Queue instance.") + + first_request = await request_queue.get() + user_id = first_request.get("user_id") + if not user_id: + raise ValueError("The first request must have a user_id.") + + session_id = first_request.get("session_id") + run_config = first_request.get("run_config") + first_live_request = first_request.get("live_request") + + if not self._tmpl_attrs.get("runner"): + self.set_up() + if not session_id: + session = await self.async_create_session(user_id=user_id) + session_id = session.id + run_config = _validate_run_config(run_config) + + live_request_queue = LiveRequestQueue() + + if first_live_request and isinstance(first_live_request, Dict): + live_request_queue.send(LiveRequest.model_validate(first_live_request)) + + # Forwards live requests to the agent. + async def _forward_requests(): + while True: + request = await request_queue.get() + live_request = LiveRequest.model_validate(request) + live_request_queue.send(live_request) + + # Forwards events to the client. + async def _forward_events(): + if run_config: + events_async = self._tmpl_attrs.get("runner").run_live( + user_id=user_id, + session_id=session_id, + live_request_queue=live_request_queue, + run_config=run_config, + ) + else: + events_async = self._tmpl_attrs.get("runner").run_live( + user_id=user_id, + session_id=session_id, + live_request_queue=live_request_queue, + ) + async for event in events_async: + yield _utils.dump_event_for_json(event) + + requests_task = asyncio.create_task(_forward_requests()) + + try: + async for event in _forward_events(): + yield event + finally: + requests_task.cancel() + try: + await requests_task + except asyncio.CancelledError: + pass + async def async_get_session( self, *, @@ -1116,4 +1201,5 @@ def register_operations(self) -> Dict[str, List[str]]: ], "stream": ["stream_query", "streaming_agent_run_with_events"], "async_stream": ["async_stream_query"], + "bidi_stream": ["bidi_stream_query"], } diff --git a/vertexai/preview/reasoning_engines/templates/langchain.py b/vertexai/preview/reasoning_engines/templates/langchain.py index a45c6210df..a8bb11305e 100644 --- a/vertexai/preview/reasoning_engines/templates/langchain.py +++ b/vertexai/preview/reasoning_engines/templates/langchain.py @@ -148,9 +148,11 @@ def _default_runnable_builder( agent_executor = AgentExecutor( agent=prompt | model | output_parser, tools=[ - tool - if isinstance(tool, lc_tools.BaseTool) - else StructuredTool.from_function(tool) + ( + tool + if isinstance(tool, lc_tools.BaseTool) + else StructuredTool.from_function(tool) + ) for tool in tools if isinstance(tool, (Callable, lc_tools.BaseTool)) ], diff --git a/vertexai/prompts/_prompt_management.py b/vertexai/prompts/_prompt_management.py index 88488f53bc..1ac4f626a3 100644 --- a/vertexai/prompts/_prompt_management.py +++ b/vertexai/prompts/_prompt_management.py @@ -157,10 +157,10 @@ def to_dict(self) -> Dict[str, Any]: tools = dct["multimodalPrompt"]["promptMessage"]["tools"] for tool in tools: for function_declaration in tool.get("function_declarations", []): - function_declaration[ - "parameters" - ] = _format_function_declaration_parameters( - function_declaration["parameters"] + function_declaration["parameters"] = ( + _format_function_declaration_parameters( + function_declaration["parameters"] + ) ) if self.executions and self.executions[0]: diff --git a/vertexai/prompts/_prompts.py b/vertexai/prompts/_prompts.py index 03c172d2c1..4870bbfbf6 100644 --- a/vertexai/prompts/_prompts.py +++ b/vertexai/prompts/_prompts.py @@ -550,7 +550,10 @@ def generate_content( tool_config: Optional["ToolConfig"] = None, stream: bool = False, system_instruction: Optional[PartsType] = None, - ) -> Union["GenerationResponse", Iterable["GenerationResponse"],]: + ) -> Union[ + "GenerationResponse", + Iterable["GenerationResponse"], + ]: """Generates content using the saved Prompt configs. Args: diff --git a/vertexai/rag/utils/_gapic_utils.py b/vertexai/rag/utils/_gapic_utils.py index fa8ce9bdfa..565bd845ff 100644 --- a/vertexai/rag/utils/_gapic_utils.py +++ b/vertexai/rag/utils/_gapic_utils.py @@ -302,7 +302,7 @@ def convert_path_to_resource_id( def convert_source_for_rag_import( - source: Union[SlackChannelsSource, JiraSource, SharePointSources] + source: Union[SlackChannelsSource, JiraSource, SharePointSources], ) -> Union[GapicSlackSource, GapicJiraSource]: """Converts a SlackChannelsSource or JiraSource to a GapicSlackSource or GapicJiraSource.""" if isinstance(source, SlackChannelsSource): diff --git a/vertexai/reasoning_engines/_reasoning_engines.py b/vertexai/reasoning_engines/_reasoning_engines.py index 2b9efbe06f..717a055a4d 100644 --- a/vertexai/reasoning_engines/_reasoning_engines.py +++ b/vertexai/reasoning_engines/_reasoning_engines.py @@ -524,7 +524,7 @@ def _validate_staging_bucket_or_raise(staging_bucket: str) -> str: def _validate_reasoning_engine_or_raise( - reasoning_engine: Union[Queryable, OperationRegistrable, StreamQueryable] + reasoning_engine: Union[Queryable, OperationRegistrable, StreamQueryable], ) -> Union[Queryable, OperationRegistrable, StreamQueryable]: """Tries to validate the reasoning engine. diff --git a/vertexai/reasoning_engines/_utils.py b/vertexai/reasoning_engines/_utils.py index 338cf3c9f6..dbb0938748 100644 --- a/vertexai/reasoning_engines/_utils.py +++ b/vertexai/reasoning_engines/_utils.py @@ -142,7 +142,7 @@ def to_json_serializable_llama_index_object( LlamaIndexBaseModel, LlamaIndexChatResponse, Sequence[LlamaIndexBaseModel], - ] + ], ) -> Union[str, Dict[str, Any], Sequence[Union[str, Dict[str, Any]]]]: """Converts a LlamaIndexResponse to a JSON serializable object.""" if isinstance(obj, LlamaIndexResponse): diff --git a/vertexai/resources/preview/feature_store/feature_monitor.py b/vertexai/resources/preview/feature_store/feature_monitor.py index f4135ae0c1..c5f6ac7276 100644 --- a/vertexai/resources/preview/feature_store/feature_monitor.py +++ b/vertexai/resources/preview/feature_store/feature_monitor.py @@ -127,9 +127,11 @@ def feature_selection_configs(self) -> List[Tuple[str, float]]: configs.append( ( feature_config.feature_id, - feature_config.drift_threshold - if feature_config.drift_threshold - else 0.3, + ( + feature_config.drift_threshold + if feature_config.drift_threshold + else 0.3 + ), ) ) return configs diff --git a/vertexai/resources/preview/feature_store/feature_online_store.py b/vertexai/resources/preview/feature_store/feature_online_store.py index 429ce28cd4..29194dc34b 100644 --- a/vertexai/resources/preview/feature_store/feature_online_store.py +++ b/vertexai/resources/preview/feature_store/feature_online_store.py @@ -570,9 +570,11 @@ def create_feature_view( big_query_source=big_query_source, vertex_rag_source=vertex_rag_source, feature_registry_source=feature_registry_source, - sync_config=gca_feature_view.FeatureView.SyncConfig(cron=sync_config) - if sync_config - else None, + sync_config=( + gca_feature_view.FeatureView.SyncConfig(cron=sync_config) + if sync_config + else None + ), ) if labels: diff --git a/vertexai/resources/preview/ml_monitoring/model_monitors.py b/vertexai/resources/preview/ml_monitoring/model_monitors.py index 52aa63d701..2719256222 100644 --- a/vertexai/resources/preview/ml_monitoring/model_monitors.py +++ b/vertexai/resources/preview/ml_monitoring/model_monitors.py @@ -306,9 +306,9 @@ class MetricsSearchResponse: next_page_token: str _search_metrics_response: Any - monitoring_stats: List[ - model_monitoring_stats.ModelMonitoringStats - ] = dataclasses.field(default_factory=list) + monitoring_stats: List[model_monitoring_stats.ModelMonitoringStats] = ( + dataclasses.field(default_factory=list) + ) @property def raw_search_metrics_response( @@ -335,9 +335,9 @@ class AlertsSearchResponse: next_page_token: str _search_alerts_response: Any total_alerts: int - model_monitoring_alerts: List[ - model_monitoring_alert.ModelMonitoringAlert - ] = dataclasses.field(default_factory=list) + model_monitoring_alerts: List[model_monitoring_alert.ModelMonitoringAlert] = ( + dataclasses.field(default_factory=list) + ) @property def raw_search_alerts_response( @@ -360,9 +360,9 @@ class ListJobsResponse: next_page_token: str _list_jobs_response: Any - list_jobs: List[ - gca_model_monitoring_job_compat.ModelMonitoringJob - ] = dataclasses.field(default_factory=list) + list_jobs: List[gca_model_monitoring_job_compat.ModelMonitoringJob] = ( + dataclasses.field(default_factory=list) + ) @property def raw_list_jobs_response( @@ -1622,9 +1622,11 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=model_monitoring_job_name, - parent_resource_name_fields={ModelMonitor._resource_noun: model_monitor_id} - if model_monitor_id - else model_monitor_id, + parent_resource_name_fields=( + {ModelMonitor._resource_noun: model_monitor_id} + if model_monitor_id + else model_monitor_id + ), ) @property diff --git a/vertexai/resources/preview/ml_monitoring/spec/schema.py b/vertexai/resources/preview/ml_monitoring/spec/schema.py index 3f2f596755..b069a3f9c8 100644 --- a/vertexai/resources/preview/ml_monitoring/spec/schema.py +++ b/vertexai/resources/preview/ml_monitoring/spec/schema.py @@ -146,12 +146,12 @@ def _as_proto(self) -> model_monitor.ModelMonitoringSchema: user_ground_truth_fields.append(field._as_proto()) return model_monitor.ModelMonitoringSchema( feature_fields=user_feature_fields, - prediction_fields=user_prediction_fields - if self.prediction_fields - else None, - ground_truth_fields=user_ground_truth_fields - if self.ground_truth_fields - else None, + prediction_fields=( + user_prediction_fields if self.prediction_fields else None + ), + ground_truth_fields=( + user_ground_truth_fields if self.ground_truth_fields else None + ), ) def to_json(self, output_dir: Optional[str] = None) -> str: diff --git a/vertexai/tuning/_distillation.py b/vertexai/tuning/_distillation.py index 5ebd5ece54..b29583278c 100644 --- a/vertexai/tuning/_distillation.py +++ b/vertexai/tuning/_distillation.py @@ -18,7 +18,9 @@ from typing import Optional from google.cloud.aiplatform.utils import gcs_utils -from google.cloud.aiplatform_v1beta1.types import tuning_job as gca_tuning_job_types +from google.cloud.aiplatform_v1beta1.types import ( + tuning_job as gca_tuning_job_types, +) from vertexai import generative_models from vertexai.tuning import _tuning diff --git a/vertexai/vision_models/_vision_models.py b/vertexai/vision_models/_vision_models.py index 69901d0198..ded12d5408 100644 --- a/vertexai/vision_models/_vision_models.py +++ b/vertexai/vision_models/_vision_models.py @@ -345,9 +345,11 @@ def __init__( segmentation_classes: List of class IDs for segmentation. Max of 5 IDs """ self.config = MaskImageConfig( - mask_mode=self.mask_mode_enum_map[mask_mode] - if mask_mode in self.mask_mode_enum_map - else "MASK_MODE_DEFAULT", + mask_mode=( + self.mask_mode_enum_map[mask_mode] + if mask_mode in self.mask_mode_enum_map + else "MASK_MODE_DEFAULT" + ), dilation=dilation, segmentation_classes=segmentation_classes, ) @@ -394,9 +396,11 @@ def __init__( """ super().__init__(reference_id, image) self.config = ControlImageConfig( - control_type=self.control_type_enum_map[control_type] - if control_type in self.control_type_enum_map - else "CONTROL_TYPE_DEFAULT", + control_type=( + self.control_type_enum_map[control_type] + if control_type in self.control_type_enum_map + else "CONTROL_TYPE_DEFAULT" + ), enable_control_image_computation=enable_control_image_computation, ) @@ -463,9 +467,11 @@ def __init__( super().__init__(reference_id, image) self.config = SubjectImageConfig( subject_description=subject_description, - subject_type=self.subject_type_enum_map[subject_type] - if subject_type in self.subject_type_enum_map - else "SUBJECT_TYPE_DEFAULT", + subject_type=( + self.subject_type_enum_map[subject_type] + if subject_type in self.subject_type_enum_map + else "SUBJECT_TYPE_DEFAULT" + ), ) @@ -800,9 +806,9 @@ class ID instance["image"] = { "gcsUri": base_image._gcs_uri # pylint: disable=protected-access } - shared_generation_parameters[ - "base_image_uri" - ] = base_image._gcs_uri # pylint: disable=protected-access + shared_generation_parameters["base_image_uri"] = ( + base_image._gcs_uri + ) # pylint: disable=protected-access else: instance["image"] = { "bytesBase64Encoded": base_image._as_base64_string() # pylint: disable=protected-access @@ -818,9 +824,9 @@ class ID "gcsUri": mask._gcs_uri # pylint: disable=protected-access }, } - shared_generation_parameters[ - "mask_uri" - ] = mask._gcs_uri # pylint: disable=protected-access + shared_generation_parameters["mask_uri"] = ( + mask._gcs_uri + ) # pylint: disable=protected-access else: instance["mask"] = { "image": { @@ -863,12 +869,12 @@ class ID reference_image.reference_image._image_bytes # pylint: disable=protected-access ).hexdigest() - reference_image_instance[ - "referenceId" - ] = reference_image.reference_id # pylint: disable=protected-access - reference_image_instance[ - "referenceType" - ] = reference_image.reference_type # pylint: disable=protected-access + reference_image_instance["referenceId"] = ( + reference_image.reference_id + ) # pylint: disable=protected-access + reference_image_instance["referenceType"] = ( + reference_image.reference_type + ) # pylint: disable=protected-access shared_generation_parameters[ f"reference_type_{reference_image.reference_id}" ] = reference_image.reference_type # pylint: disable=protected-access