From 0b5dc70d2f8f730e2405f2891c8007bdbbc344ad Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Sep 2023 15:53:34 -0700 Subject: [PATCH 1/5] docs: Minor formatting (#126) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/run_v2/services/executions/async_client.py | 2 +- google/cloud/run_v2/services/executions/client.py | 2 +- google/cloud/run_v2/services/executions/transports/rest.py | 1 - google/cloud/run_v2/services/jobs/async_client.py | 2 +- google/cloud/run_v2/services/jobs/client.py | 2 +- google/cloud/run_v2/services/jobs/transports/rest.py | 1 - google/cloud/run_v2/services/revisions/async_client.py | 2 +- google/cloud/run_v2/services/revisions/client.py | 2 +- google/cloud/run_v2/services/revisions/transports/rest.py | 1 - google/cloud/run_v2/services/services/async_client.py | 2 +- google/cloud/run_v2/services/services/client.py | 2 +- google/cloud/run_v2/services/services/transports/rest.py | 1 - google/cloud/run_v2/services/tasks/async_client.py | 2 +- google/cloud/run_v2/services/tasks/client.py | 2 +- google/cloud/run_v2/services/tasks/transports/base.py | 2 +- google/cloud/run_v2/services/tasks/transports/grpc.py | 2 +- google/cloud/run_v2/services/tasks/transports/grpc_asyncio.py | 2 +- google/cloud/run_v2/services/tasks/transports/rest.py | 3 ++- google/cloud/run_v2/types/job.py | 1 + google/cloud/run_v2/types/k8s_min.py | 1 + .../snippet_metadata_google.cloud.run.v2.json | 2 +- tests/unit/gapic/run_v2/test_executions.py | 2 +- tests/unit/gapic/run_v2/test_jobs.py | 2 +- tests/unit/gapic/run_v2/test_revisions.py | 2 +- tests/unit/gapic/run_v2/test_services.py | 2 +- tests/unit/gapic/run_v2/test_tasks.py | 2 +- 26 files changed, 23 insertions(+), 24 deletions(-) diff --git a/google/cloud/run_v2/services/executions/async_client.py b/google/cloud/run_v2/services/executions/async_client.py index 46ab6b8..9238da1 100644 --- a/google/cloud/run_v2/services/executions/async_client.py +++ b/google/cloud/run_v2/services/executions/async_client.py @@ -46,7 +46,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.executions import pagers diff --git a/google/cloud/run_v2/services/executions/client.py b/google/cloud/run_v2/services/executions/client.py index b4ad190..e8c3f8c 100644 --- a/google/cloud/run_v2/services/executions/client.py +++ b/google/cloud/run_v2/services/executions/client.py @@ -50,7 +50,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.executions import pagers diff --git a/google/cloud/run_v2/services/executions/transports/rest.py b/google/cloud/run_v2/services/executions/transports/rest.py index 7c4ead9..5d5aaf9 100644 --- a/google/cloud/run_v2/services/executions/transports/rest.py +++ b/google/cloud/run_v2/services/executions/transports/rest.py @@ -33,7 +33,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/google/cloud/run_v2/services/jobs/async_client.py b/google/cloud/run_v2/services/jobs/async_client.py index 1f70781..3f26841 100644 --- a/google/cloud/run_v2/services/jobs/async_client.py +++ b/google/cloud/run_v2/services/jobs/async_client.py @@ -48,7 +48,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.jobs import pagers diff --git a/google/cloud/run_v2/services/jobs/client.py b/google/cloud/run_v2/services/jobs/client.py index 956f6df..fba209d 100644 --- a/google/cloud/run_v2/services/jobs/client.py +++ b/google/cloud/run_v2/services/jobs/client.py @@ -52,7 +52,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.jobs import pagers diff --git a/google/cloud/run_v2/services/jobs/transports/rest.py b/google/cloud/run_v2/services/jobs/transports/rest.py index dcd9a01..678c03d 100644 --- a/google/cloud/run_v2/services/jobs/transports/rest.py +++ b/google/cloud/run_v2/services/jobs/transports/rest.py @@ -33,7 +33,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/google/cloud/run_v2/services/revisions/async_client.py b/google/cloud/run_v2/services/revisions/async_client.py index 7ada926..1fd2fa0 100644 --- a/google/cloud/run_v2/services/revisions/async_client.py +++ b/google/cloud/run_v2/services/revisions/async_client.py @@ -46,7 +46,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/run_v2/services/revisions/client.py b/google/cloud/run_v2/services/revisions/client.py index 56705e8..1ae72a8 100644 --- a/google/cloud/run_v2/services/revisions/client.py +++ b/google/cloud/run_v2/services/revisions/client.py @@ -50,7 +50,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/run_v2/services/revisions/transports/rest.py b/google/cloud/run_v2/services/revisions/transports/rest.py index 35f0e00..2983925 100644 --- a/google/cloud/run_v2/services/revisions/transports/rest.py +++ b/google/cloud/run_v2/services/revisions/transports/rest.py @@ -33,7 +33,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/google/cloud/run_v2/services/services/async_client.py b/google/cloud/run_v2/services/services/async_client.py index 09f0178..8b9e6e3 100644 --- a/google/cloud/run_v2/services/services/async_client.py +++ b/google/cloud/run_v2/services/services/async_client.py @@ -48,7 +48,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.services import pagers diff --git a/google/cloud/run_v2/services/services/client.py b/google/cloud/run_v2/services/services/client.py index 9c0cff2..7d9da55 100644 --- a/google/cloud/run_v2/services/services/client.py +++ b/google/cloud/run_v2/services/services/client.py @@ -52,7 +52,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.run_v2.services.services import pagers diff --git a/google/cloud/run_v2/services/services/transports/rest.py b/google/cloud/run_v2/services/services/transports/rest.py index 4cf556c..8f1b09c 100644 --- a/google/cloud/run_v2/services/services/transports/rest.py +++ b/google/cloud/run_v2/services/services/transports/rest.py @@ -33,7 +33,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/google/cloud/run_v2/services/tasks/async_client.py b/google/cloud/run_v2/services/tasks/async_client.py index 3837922..06857a2 100644 --- a/google/cloud/run_v2/services/tasks/async_client.py +++ b/google/cloud/run_v2/services/tasks/async_client.py @@ -43,7 +43,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/run_v2/services/tasks/client.py b/google/cloud/run_v2/services/tasks/client.py index 617087a..ac024c6 100644 --- a/google/cloud/run_v2/services/tasks/client.py +++ b/google/cloud/run_v2/services/tasks/client.py @@ -47,7 +47,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/google/cloud/run_v2/services/tasks/transports/base.py b/google/cloud/run_v2/services/tasks/transports/base.py index 11c2647..88f769e 100644 --- a/google/cloud/run_v2/services/tasks/transports/base.py +++ b/google/cloud/run_v2/services/tasks/transports/base.py @@ -23,7 +23,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.run_v2 import gapic_version as package_version diff --git a/google/cloud/run_v2/services/tasks/transports/grpc.py b/google/cloud/run_v2/services/tasks/transports/grpc.py index 005a787..41dcb4d 100644 --- a/google/cloud/run_v2/services/tasks/transports/grpc.py +++ b/google/cloud/run_v2/services/tasks/transports/grpc.py @@ -21,7 +21,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.run_v2.types import task diff --git a/google/cloud/run_v2/services/tasks/transports/grpc_asyncio.py b/google/cloud/run_v2/services/tasks/transports/grpc_asyncio.py index 3354356..5548856 100644 --- a/google/cloud/run_v2/services/tasks/transports/grpc_asyncio.py +++ b/google/cloud/run_v2/services/tasks/transports/grpc_asyncio.py @@ -20,7 +20,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/run_v2/services/tasks/transports/rest.py b/google/cloud/run_v2/services/tasks/transports/rest.py index fe9b1b7..03d7e5c 100644 --- a/google/cloud/run_v2/services/tasks/transports/rest.py +++ b/google/cloud/run_v2/services/tasks/transports/rest.py @@ -27,7 +27,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -38,6 +37,8 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore + from google.cloud.run_v2.types import task from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/google/cloud/run_v2/types/job.py b/google/cloud/run_v2/types/job.py index 69eba3e..6667bff 100644 --- a/google/cloud/run_v2/types/job.py +++ b/google/cloud/run_v2/types/job.py @@ -266,6 +266,7 @@ class Job(proto.Message): Attributes: name (str): The fully qualified name of this Job. + Format: projects/{project}/locations/{location}/jobs/{job} diff --git a/google/cloud/run_v2/types/k8s_min.py b/google/cloud/run_v2/types/k8s_min.py index 61de7d1..e123c70 100644 --- a/google/cloud/run_v2/types/k8s_min.py +++ b/google/cloud/run_v2/types/k8s_min.py @@ -77,6 +77,7 @@ class Container(proto.Message): specified ports must be listening on all interfaces (0.0.0.0) within the container to be accessible. + If omitted, a port number will be chosen and passed to the container through the PORT environment variable for the container to listen diff --git a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index 484c29e..da84c85 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-run", - "version": "0.9.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/run_v2/test_executions.py b/tests/unit/gapic/run_v2/test_executions.py index 22fbffd..ca4c3dd 100644 --- a/tests/unit/gapic/run_v2/test_executions.py +++ b/tests/unit/gapic/run_v2/test_executions.py @@ -43,7 +43,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore diff --git a/tests/unit/gapic/run_v2/test_jobs.py b/tests/unit/gapic/run_v2/test_jobs.py index 1a7b136..1d70644 100644 --- a/tests/unit/gapic/run_v2/test_jobs.py +++ b/tests/unit/gapic/run_v2/test_jobs.py @@ -46,7 +46,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/tests/unit/gapic/run_v2/test_revisions.py b/tests/unit/gapic/run_v2/test_revisions.py index bfd7878..e3aa1ac 100644 --- a/tests/unit/gapic/run_v2/test_revisions.py +++ b/tests/unit/gapic/run_v2/test_revisions.py @@ -43,7 +43,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import json_format diff --git a/tests/unit/gapic/run_v2/test_services.py b/tests/unit/gapic/run_v2/test_services.py index 4173393..2a86cb4 100644 --- a/tests/unit/gapic/run_v2/test_services.py +++ b/tests/unit/gapic/run_v2/test_services.py @@ -46,7 +46,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/tests/unit/gapic/run_v2/test_tasks.py b/tests/unit/gapic/run_v2/test_tasks.py index dcc4c08..e585506 100644 --- a/tests/unit/gapic/run_v2/test_tasks.py +++ b/tests/unit/gapic/run_v2/test_tasks.py @@ -33,7 +33,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import json_format From f2c76e16a6abf333e36cba50b3270211eb762fcf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 30 Sep 2023 07:28:31 -0400 Subject: [PATCH 2/5] fix: Removes accidentally exposed field service.traffic_tags_cleanup_threshold in Cloud Run Service (#130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Adds support for cancel Execution feat: Adds support for Execution overrides feat: Adds support for Direct VPC egress setting feat: New fields for multi-container feat: New field for Task's scheduled timestamp docs: General documentation fixes. PiperOrigin-RevId: 567710896 Source-Link: https://github.com/googleapis/googleapis/commit/c5985fe7f9a3bb391458e26c53724f2c25b2dccc Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e5c6ac7f359ba1776634e36c3c174ee72a07eac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGU1YzZhYzdmMzU5YmExNzc2NjM0ZTM2YzNjMTc0ZWU3MmEwN2VhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Removes accidentally exposed field service.traffic_tags_cleanup_threshold in Cloud Run Service docs: Minor comment fixes PiperOrigin-RevId: 568981386 Source-Link: https://github.com/googleapis/googleapis/commit/01711ae89e793a7b3630222047d8e7a0038c0808 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cc2e659f8cf5a458d6f7a2c0f8f7cfa54ef5402c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2MyZTY1OWY4Y2Y1YTQ1OGQ2ZjdhMmMwZjhmN2NmYTU0ZWY1NDAyYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/run/__init__.py | 4 + google/cloud/run_v2/__init__.py | 4 + google/cloud/run_v2/gapic_metadata.json | 15 + .../services/executions/async_client.py | 146 ++++- .../run_v2/services/executions/client.py | 146 ++++- .../services/executions/transports/base.py | 14 + .../services/executions/transports/grpc.py | 26 + .../executions/transports/grpc_asyncio.py | 28 + .../services/executions/transports/rest.py | 138 +++++ google/cloud/run_v2/services/jobs/client.py | 96 +++- google/cloud/run_v2/types/__init__.py | 4 + google/cloud/run_v2/types/condition.py | 3 + google/cloud/run_v2/types/execution.py | 60 ++- google/cloud/run_v2/types/job.py | 86 ++- google/cloud/run_v2/types/k8s_min.py | 84 ++- google/cloud/run_v2/types/task.py | 12 +- google/cloud/run_v2/types/vendor_settings.py | 62 ++- ...rated_executions_cancel_execution_async.py | 56 ++ ...erated_executions_cancel_execution_sync.py | 56 ++ .../snippet_metadata_google.cloud.run.v2.json | 161 ++++++ scripts/fixup_run_v2_keywords.py | 3 +- tests/unit/gapic/run_v2/test_executions.py | 500 ++++++++++++++++++ tests/unit/gapic/run_v2/test_jobs.py | 337 +++--------- tests/unit/gapic/run_v2/test_services.py | 56 +- 24 files changed, 1750 insertions(+), 347 deletions(-) create mode 100644 samples/generated_samples/run_v2_generated_executions_cancel_execution_async.py create mode 100644 samples/generated_samples/run_v2_generated_executions_cancel_execution_sync.py diff --git a/google/cloud/run/__init__.py b/google/cloud/run/__init__.py index b40948d..6d215f9 100644 --- a/google/cloud/run/__init__.py +++ b/google/cloud/run/__init__.py @@ -30,6 +30,7 @@ from google.cloud.run_v2.services.tasks.client import TasksClient from google.cloud.run_v2.types.condition import Condition from google.cloud.run_v2.types.execution import ( + CancelExecutionRequest, DeleteExecutionRequest, Execution, GetExecutionRequest, @@ -52,6 +53,7 @@ CloudSqlInstance, Container, ContainerPort, + EmptyDirVolumeSource, EnvVar, EnvVarSource, GRPCAction, @@ -117,6 +119,7 @@ "TasksClient", "TasksAsyncClient", "Condition", + "CancelExecutionRequest", "DeleteExecutionRequest", "Execution", "GetExecutionRequest", @@ -135,6 +138,7 @@ "CloudSqlInstance", "Container", "ContainerPort", + "EmptyDirVolumeSource", "EnvVar", "EnvVarSource", "GRPCAction", diff --git a/google/cloud/run_v2/__init__.py b/google/cloud/run_v2/__init__.py index f91c038..11fd2d7 100644 --- a/google/cloud/run_v2/__init__.py +++ b/google/cloud/run_v2/__init__.py @@ -25,6 +25,7 @@ from .services.tasks import TasksAsyncClient, TasksClient from .types.condition import Condition from .types.execution import ( + CancelExecutionRequest, DeleteExecutionRequest, Execution, GetExecutionRequest, @@ -47,6 +48,7 @@ CloudSqlInstance, Container, ContainerPort, + EmptyDirVolumeSource, EnvVar, EnvVarSource, GRPCAction, @@ -107,6 +109,7 @@ "ServicesAsyncClient", "TasksAsyncClient", "BinaryAuthorization", + "CancelExecutionRequest", "CloudSqlInstance", "Condition", "Container", @@ -117,6 +120,7 @@ "DeleteJobRequest", "DeleteRevisionRequest", "DeleteServiceRequest", + "EmptyDirVolumeSource", "EncryptionKeyRevocationAction", "EnvVar", "EnvVarSource", diff --git a/google/cloud/run_v2/gapic_metadata.json b/google/cloud/run_v2/gapic_metadata.json index 35162df..e4131ae 100644 --- a/google/cloud/run_v2/gapic_metadata.json +++ b/google/cloud/run_v2/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ExecutionsClient", "rpcs": { + "CancelExecution": { + "methods": [ + "cancel_execution" + ] + }, "DeleteExecution": { "methods": [ "delete_execution" @@ -30,6 +35,11 @@ "grpc-async": { "libraryClient": "ExecutionsAsyncClient", "rpcs": { + "CancelExecution": { + "methods": [ + "cancel_execution" + ] + }, "DeleteExecution": { "methods": [ "delete_execution" @@ -50,6 +60,11 @@ "rest": { "libraryClient": "ExecutionsClient", "rpcs": { + "CancelExecution": { + "methods": [ + "cancel_execution" + ] + }, "DeleteExecution": { "methods": [ "delete_execution" diff --git a/google/cloud/run_v2/services/executions/async_client.py b/google/cloud/run_v2/services/executions/async_client.py index 9238da1..4b3f7e6 100644 --- a/google/cloud/run_v2/services/executions/async_client.py +++ b/google/cloud/run_v2/services/executions/async_client.py @@ -267,12 +267,9 @@ async def sample_get_execution(): The request object. Request message for obtaining a Execution by its full name. name (:class:`str`): - Required. The full name of the - Execution. Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or - number. + Required. The full name of the Execution. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -377,13 +374,11 @@ async def sample_list_executions(): The request object. Request message for retrieving a list of Executions. parent (:class:`str`): - Required. The Execution from which - the Executions should be listed. To list - all Executions across Jobs, use "-" + Required. The Execution from which the Executions should + be listed. To list all Executions across Jobs, use "-" instead of Job name. Format: - projects/{project}/locations/{location}/jobs/{job}, - where {project} can be project id or - number. + ``projects/{project}/locations/{location}/jobs/{job}``, + where ``{project}`` can be project id or number. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -500,12 +495,9 @@ async def sample_delete_execution(): The request object. Request message for deleting an Execution. name (:class:`str`): - Required. The name of the Execution - to delete. Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or - number. + Required. The name of the Execution to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -575,6 +567,124 @@ async def sample_delete_execution(): # Done; return the response. return response + async def cancel_execution( + self, + request: Optional[Union[execution.CancelExecutionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancels an Execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + async def sample_cancel_execution(): + # Create a client + client = run_v2.ExecutionsAsyncClient() + + # Initialize request argument(s) + request = run_v2.CancelExecutionRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_execution(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.run_v2.types.CancelExecutionRequest, dict]]): + The request object. Request message for deleting an + Execution. + name (:class:`str`): + Required. The name of the Execution to cancel. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.run_v2.types.Execution` Execution represents the configuration of a single execution. A execution an + immutable resource that references a container image + which is run to completion. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = execution.CancelExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_execution, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + execution.Execution, + metadata_type=execution.Execution, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/google/cloud/run_v2/services/executions/client.py b/google/cloud/run_v2/services/executions/client.py index e8c3f8c..af50b7d 100644 --- a/google/cloud/run_v2/services/executions/client.py +++ b/google/cloud/run_v2/services/executions/client.py @@ -599,12 +599,9 @@ def sample_get_execution(): The request object. Request message for obtaining a Execution by its full name. name (str): - Required. The full name of the - Execution. Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or - number. + Required. The full name of the Execution. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -709,13 +706,11 @@ def sample_list_executions(): The request object. Request message for retrieving a list of Executions. parent (str): - Required. The Execution from which - the Executions should be listed. To list - all Executions across Jobs, use "-" + Required. The Execution from which the Executions should + be listed. To list all Executions across Jobs, use "-" instead of Job name. Format: - projects/{project}/locations/{location}/jobs/{job}, - where {project} can be project id or - number. + ``projects/{project}/locations/{location}/jobs/{job}``, + where ``{project}`` can be project id or number. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -832,12 +827,9 @@ def sample_delete_execution(): The request object. Request message for deleting an Execution. name (str): - Required. The name of the Execution - to delete. Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or - number. + Required. The name of the Execution to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -907,6 +899,124 @@ def sample_delete_execution(): # Done; return the response. return response + def cancel_execution( + self, + request: Optional[Union[execution.CancelExecutionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancels an Execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import run_v2 + + def sample_cancel_execution(): + # Create a client + client = run_v2.ExecutionsClient() + + # Initialize request argument(s) + request = run_v2.CancelExecutionRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_execution(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.run_v2.types.CancelExecutionRequest, dict]): + The request object. Request message for deleting an + Execution. + name (str): + Required. The name of the Execution to cancel. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.run_v2.types.Execution` Execution represents the configuration of a single execution. A execution an + immutable resource that references a container image + which is run to completion. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a execution.CancelExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, execution.CancelExecutionRequest): + request = execution.CancelExecutionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + execution.Execution, + metadata_type=execution.Execution, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ExecutionsClient": return self diff --git a/google/cloud/run_v2/services/executions/transports/base.py b/google/cloud/run_v2/services/executions/transports/base.py index 6f6a5c2..8d709ca 100644 --- a/google/cloud/run_v2/services/executions/transports/base.py +++ b/google/cloud/run_v2/services/executions/transports/base.py @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_execution: gapic_v1.method.wrap_method( + self.cancel_execution, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -185,6 +190,15 @@ def delete_execution( ]: raise NotImplementedError() + @property + def cancel_execution( + self, + ) -> Callable[ + [execution.CancelExecutionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/google/cloud/run_v2/services/executions/transports/grpc.py b/google/cloud/run_v2/services/executions/transports/grpc.py index 17c3c72..fdd562a 100644 --- a/google/cloud/run_v2/services/executions/transports/grpc.py +++ b/google/cloud/run_v2/services/executions/transports/grpc.py @@ -323,6 +323,32 @@ def delete_execution( ) return self._stubs["delete_execution"] + @property + def cancel_execution( + self, + ) -> Callable[[execution.CancelExecutionRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel execution method over gRPC. + + Cancels an Execution. + + Returns: + Callable[[~.CancelExecutionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_execution" not in self._stubs: + self._stubs["cancel_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Executions/CancelExecution", + request_serializer=execution.CancelExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_execution"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/run_v2/services/executions/transports/grpc_asyncio.py b/google/cloud/run_v2/services/executions/transports/grpc_asyncio.py index e0b31f0..725a2d1 100644 --- a/google/cloud/run_v2/services/executions/transports/grpc_asyncio.py +++ b/google/cloud/run_v2/services/executions/transports/grpc_asyncio.py @@ -332,6 +332,34 @@ def delete_execution( ) return self._stubs["delete_execution"] + @property + def cancel_execution( + self, + ) -> Callable[ + [execution.CancelExecutionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the cancel execution method over gRPC. + + Cancels an Execution. + + Returns: + Callable[[~.CancelExecutionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_execution" not in self._stubs: + self._stubs["cancel_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.run.v2.Executions/CancelExecution", + request_serializer=execution.CancelExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_execution"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/run_v2/services/executions/transports/rest.py b/google/cloud/run_v2/services/executions/transports/rest.py index 5d5aaf9..b9ba730 100644 --- a/google/cloud/run_v2/services/executions/transports/rest.py +++ b/google/cloud/run_v2/services/executions/transports/rest.py @@ -72,6 +72,14 @@ class ExecutionsRestInterceptor: .. code-block:: python class MyCustomExecutionsInterceptor(ExecutionsRestInterceptor): + def pre_cancel_execution(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_execution(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_execution(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -102,6 +110,29 @@ def post_list_executions(self, response): """ + def pre_cancel_execution( + self, + request: execution.CancelExecutionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[execution.CancelExecutionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_execution + + Override in a subclass to manipulate the request or metadata + before they are sent to the Executions server. + """ + return request, metadata + + def post_cancel_execution( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_execution + + Override in a subclass to manipulate the response + after it is returned by the Executions server but before + it is returned to user code. + """ + return response + def pre_delete_execution( self, request: execution.DeleteExecutionRequest, @@ -410,6 +441,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelExecution(ExecutionsRestStub): + def __hash__(self): + return hash("CancelExecution") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: execution.CancelExecutionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel execution method over HTTP. + + Args: + request (~.execution.CancelExecutionRequest): + The request object. Request message for deleting an + Execution. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/jobs/*/executions/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_execution( + request, metadata + ) + pb_request = execution.CancelExecutionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_execution(resp) + return resp + class _DeleteExecution(ExecutionsRestStub): def __hash__(self): return hash("DeleteExecution") @@ -681,6 +811,14 @@ def __call__( resp = self._interceptor.post_list_executions(resp) return resp + @property + def cancel_execution( + self, + ) -> Callable[[execution.CancelExecutionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelExecution(self._session, self._host, self._interceptor) # type: ignore + @property def delete_execution( self, diff --git a/google/cloud/run_v2/services/jobs/client.py b/google/cloud/run_v2/services/jobs/client.py index fba209d..d7ec18f 100644 --- a/google/cloud/run_v2/services/jobs/client.py +++ b/google/cloud/run_v2/services/jobs/client.py @@ -679,11 +679,19 @@ def sample_create_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_job] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -793,11 +801,19 @@ def sample_get_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_job] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)(?:/.*)?$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -902,11 +918,19 @@ def sample_list_jobs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_jobs] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)$" ) + regex_match = routing_param_regex.match(request.parent) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1019,11 +1043,19 @@ def sample_update_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_job] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("job.name", request.job.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)(?:/.*)?$" ) + regex_match = routing_param_regex.match(request.job.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1138,11 +1170,19 @@ def sample_delete_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_job] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)(?:/.*)?$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( @@ -1258,11 +1298,19 @@ def sample_run_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.run_job] - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)(?:/.*)?$" ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) # Send the request. response = rpc( diff --git a/google/cloud/run_v2/types/__init__.py b/google/cloud/run_v2/types/__init__.py index e7e58d2..3603276 100644 --- a/google/cloud/run_v2/types/__init__.py +++ b/google/cloud/run_v2/types/__init__.py @@ -15,6 +15,7 @@ # from .condition import Condition from .execution import ( + CancelExecutionRequest, DeleteExecutionRequest, Execution, GetExecutionRequest, @@ -37,6 +38,7 @@ CloudSqlInstance, Container, ContainerPort, + EmptyDirVolumeSource, EnvVar, EnvVarSource, GRPCAction, @@ -92,6 +94,7 @@ __all__ = ( "Condition", + "CancelExecutionRequest", "DeleteExecutionRequest", "Execution", "GetExecutionRequest", @@ -110,6 +113,7 @@ "CloudSqlInstance", "Container", "ContainerPort", + "EmptyDirVolumeSource", "EnvVar", "EnvVarSource", "GRPCAction", diff --git a/google/cloud/run_v2/types/condition.py b/google/cloud/run_v2/types/condition.py index 193dd12..3455937 100644 --- a/google/cloud/run_v2/types/condition.py +++ b/google/cloud/run_v2/types/condition.py @@ -247,12 +247,15 @@ class ExecutionReason(proto.Enum): CANCELLING (4): The execution is in the process of being cancelled. + DELETED (5): + The execution was deleted. """ EXECUTION_REASON_UNDEFINED = 0 JOB_STATUS_SERVICE_POLLING_ERROR = 1 NON_ZERO_EXIT_CODE = 2 CANCELLED = 3 CANCELLING = 4 + DELETED = 5 type_: str = proto.Field( proto.STRING, diff --git a/google/cloud/run_v2/types/execution.py b/google/cloud/run_v2/types/execution.py index c556f70..0f115be 100644 --- a/google/cloud/run_v2/types/execution.py +++ b/google/cloud/run_v2/types/execution.py @@ -30,6 +30,7 @@ "ListExecutionsRequest", "ListExecutionsResponse", "DeleteExecutionRequest", + "CancelExecutionRequest", "Execution", }, ) @@ -40,11 +41,9 @@ class GetExecutionRequest(proto.Message): Attributes: name (str): - Required. The full name of the Execution. - Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or number. + Required. The full name of the Execution. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. """ name: str = proto.Field( @@ -58,12 +57,11 @@ class ListExecutionsRequest(proto.Message): Attributes: parent (str): - Required. The Execution from which the - Executions should be listed. To list all - Executions across Jobs, use "-" instead of Job - name. Format: - projects/{project}/locations/{location}/jobs/{job}, - where {project} can be project id or number. + Required. The Execution from which the Executions should be + listed. To list all Executions across Jobs, use "-" instead + of Job name. Format: + ``projects/{project}/locations/{location}/jobs/{job}``, + where ``{project}`` can be project id or number. page_size (int): Maximum number of Executions to return in this call. @@ -124,11 +122,9 @@ class DeleteExecutionRequest(proto.Message): Attributes: name (str): - Required. The name of the Execution to - delete. Format: - - projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, - where {project} can be project id or number. + Required. The name of the Execution to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. validate_only (bool): Indicates that the request should be validated without actually deleting any @@ -153,6 +149,38 @@ class DeleteExecutionRequest(proto.Message): ) +class CancelExecutionRequest(proto.Message): + r"""Request message for deleting an Execution. + + Attributes: + name (str): + Required. The name of the Execution to cancel. Format: + ``projects/{project}/locations/{location}/jobs/{job}/executions/{execution}``, + where ``{project}`` can be project id or number. + validate_only (bool): + Indicates that the request should be + validated without actually cancelling any + resources. + etag (str): + A system-generated fingerprint for this + version of the resource. This may be used to + detect modification conflict during updates. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + class Execution(proto.Message): r"""Execution represents the configuration of a single execution. A execution an immutable resource that references a container diff --git a/google/cloud/run_v2/types/job.py b/google/cloud/run_v2/types/job.py index 6667bff..5071ea5 100644 --- a/google/cloud/run_v2/types/job.py +++ b/google/cloud/run_v2/types/job.py @@ -18,10 +18,16 @@ from typing import MutableMapping, MutableSequence from google.api import launch_stage_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.run_v2.types import condition, execution_template, vendor_settings +from google.cloud.run_v2.types import ( + condition, + execution_template, + k8s_min, + vendor_settings, +) __protobuf__ = proto.module( package="google.cloud.run.v2", @@ -243,8 +249,81 @@ class RunJobRequest(proto.Message): A system-generated fingerprint for this version of the resource. May be used to detect modification conflict during updates. + overrides (google.cloud.run_v2.types.RunJobRequest.Overrides): + Overrides specification for a given execution + of a job. If provided, overrides will be applied + to update the execution or task spec. """ + class Overrides(proto.Message): + r"""RunJob Overrides that contains Execution fields to be + overridden. + + Attributes: + container_overrides (MutableSequence[google.cloud.run_v2.types.RunJobRequest.Overrides.ContainerOverride]): + Per container override specification. + task_count (int): + Optional. The desired number of tasks the execution should + run. Will replace existing task_count value. + timeout (google.protobuf.duration_pb2.Duration): + Duration in seconds the task may be active before the system + will actively try to mark it failed and kill associated + containers. Will replace existing timeout_seconds value. + """ + + class ContainerOverride(proto.Message): + r"""Per-container override specification. + + Attributes: + name (str): + The name of the container specified as a DNS_LABEL. + args (MutableSequence[str]): + Optional. Arguments to the entrypoint. Will + replace existing args for override. + env (MutableSequence[google.cloud.run_v2.types.EnvVar]): + List of environment variables to set in the + container. Will be merged with existing env for + override. + clear_args (bool): + Optional. True if the intention is to clear + out existing args list. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + env: MutableSequence[k8s_min.EnvVar] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=k8s_min.EnvVar, + ) + clear_args: bool = proto.Field( + proto.BOOL, + number=4, + ) + + container_overrides: MutableSequence[ + "RunJobRequest.Overrides.ContainerOverride" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RunJobRequest.Overrides.ContainerOverride", + ) + task_count: int = proto.Field( + proto.INT32, + number=2, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -257,6 +336,11 @@ class RunJobRequest(proto.Message): proto.STRING, number=3, ) + overrides: Overrides = proto.Field( + proto.MESSAGE, + number=4, + message=Overrides, + ) class Job(proto.Message): diff --git a/google/cloud/run_v2/types/k8s_min.py b/google/cloud/run_v2/types/k8s_min.py index e123c70..e24e19f 100644 --- a/google/cloud/run_v2/types/k8s_min.py +++ b/google/cloud/run_v2/types/k8s_min.py @@ -33,6 +33,7 @@ "SecretVolumeSource", "VersionToPath", "CloudSqlInstance", + "EmptyDirVolumeSource", "Probe", "HTTPGetAction", "HTTPHeader", @@ -46,7 +47,7 @@ class Container(proto.Message): r"""A single application container. This specifies both the container to run, the command to run in the container and the arguments to supply to it. - Note that additional arguments may be supplied by the system to + Note that additional arguments can be supplied by the system to the container at runtime. Attributes: @@ -99,6 +100,9 @@ class Container(proto.Message): startup probe is provided, until it succeeds. Container will not be added to service endpoints if the probe fails. + depends_on (MutableSequence[str]): + Names of the containers that must start + before this container. """ name: str = proto.Field( @@ -151,6 +155,10 @@ class Container(proto.Message): number=11, message="Probe", ) + depends_on: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) class ResourceRequirements(proto.Message): @@ -207,8 +215,8 @@ class EnvVar(proto.Message): Attributes: name (str): - Required. Name of the environment variable. Must be a - C_IDENTIFIER, and mnay not exceed 32768 characters. + Required. Name of the environment variable. + Must not exceed 32768 characters. value (str): Variable references $(VAR_NAME) are expanded using the previous defined environment variables in the container and @@ -362,6 +370,10 @@ class Volume(proto.Message): for more information on how to connect Cloud SQL and Cloud Run. + This field is a member of `oneof`_ ``volume_type``. + empty_dir (google.cloud.run_v2.types.EmptyDirVolumeSource): + Ephemeral storage used as a shared volume. + This field is a member of `oneof`_ ``volume_type``. """ @@ -381,6 +393,12 @@ class Volume(proto.Message): oneof="volume_type", message="CloudSqlInstance", ) + empty_dir: "EmptyDirVolumeSource" = proto.Field( + proto.MESSAGE, + number=4, + oneof="volume_type", + message="EmptyDirVolumeSource", + ) class SecretVolumeSource(proto.Message): @@ -516,6 +534,60 @@ class CloudSqlInstance(proto.Message): ) +class EmptyDirVolumeSource(proto.Message): + r"""In memory (tmpfs) ephemeral storage. + It is ephemeral in the sense that when the sandbox is taken + down, the data is destroyed with it (it does not persist across + sandbox runs). + + Attributes: + medium (google.cloud.run_v2.types.EmptyDirVolumeSource.Medium): + The medium on which the data is stored. + Acceptable values today is only MEMORY or none. + When none, the default will currently be backed + by memory but could change over time. +optional + size_limit (str): + Limit on the storage usable by this EmptyDir + volume. The size limit is also applicable for + memory medium. The maximum usage on memory + medium EmptyDir would be the minimum value + between the SizeLimit specified here and the sum + of memory limits of all containers. The default + is nil which means that the limit is undefined. + More info: + + https://cloud.google.com/run/docs/configuring/in-memory-volumes#configure-volume. + Info in Kubernetes: + + https://kubernetes.io/docs/concepts/storage/volumes/#emptydir + """ + + class Medium(proto.Enum): + r"""The different types of medium supported for EmptyDir. + + Values: + MEDIUM_UNSPECIFIED (0): + When not specified, falls back to the default + implementation which is currently in memory + (this may change over time). + MEMORY (1): + Explicitly set the EmptyDir to be in memory. + Uses tmpfs. + """ + MEDIUM_UNSPECIFIED = 0 + MEMORY = 1 + + medium: Medium = proto.Field( + proto.ENUM, + number=1, + enum=Medium, + ) + size_limit: str = proto.Field( + proto.STRING, + number=2, + ) + + class Probe(proto.Message): r"""Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive @@ -686,9 +758,9 @@ class GRPCAction(proto.Message): service (str): Service is the name of the service to place in the gRPC HealthCheckRequest (see - https://github.com/grpc/grpc/blob/master/doc/health-checking.md). - If this is not specified, the default behavior - is defined by gRPC. + https://github.com/grpc/grpc/blob/master/doc/health-checking.md + ). If this is not specified, the default + behavior is defined by gRPC. """ port: int = proto.Field( diff --git a/google/cloud/run_v2/types/task.py b/google/cloud/run_v2/types/task.py index 346f34c..629e811 100644 --- a/google/cloud/run_v2/types/task.py +++ b/google/cloud/run_v2/types/task.py @@ -153,7 +153,12 @@ class Task(proto.Message): should be preserved when modifying objects. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Represents time when the task - was created by the job controller. It is not + was created by the system. It is not guaranteed + to be set in happens-before order across + separate operations. + scheduled_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Represents time when the task + was scheduled to run by the system. It is not guaranteed to be set in happens-before order across separate operations. start_time (google.protobuf.timestamp_pb2.Timestamp): @@ -278,6 +283,11 @@ class Task(proto.Message): number=6, message=timestamp_pb2.Timestamp, ) + scheduled_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=34, + message=timestamp_pb2.Timestamp, + ) start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=27, diff --git a/google/cloud/run_v2/types/vendor_settings.py b/google/cloud/run_v2/types/vendor_settings.py index ee868f9..64c5041 100644 --- a/google/cloud/run_v2/types/vendor_settings.py +++ b/google/cloud/run_v2/types/vendor_settings.py @@ -87,12 +87,9 @@ class EncryptionKeyRevocationAction(proto.Enum): class VpcAccess(proto.Message): - r"""VPC Access settings. For more information on creating a VPC - Connector, visit - https://cloud.google.com/vpc/docs/configure-serverless-vpc-access - For information on how to configure Cloud Run with an existing - VPC Connector, visit - https://cloud.google.com/run/docs/configuring/connecting-vpc + r"""VPC Access settings. For more information on sending traffic + to a VPC network, visit + https://cloud.google.com/run/docs/configuring/connecting-vpc. Attributes: connector (str): @@ -100,8 +97,15 @@ class VpcAccess(proto.Message): Format: projects/{project}/locations/{location}/connectors/{connector}, where {project} can be project id or number. + For more information on sending traffic to a VPC + network via a connector, visit + https://cloud.google.com/run/docs/configuring/vpc-connectors. egress (google.cloud.run_v2.types.VpcAccess.VpcEgress): - Traffic VPC egress settings. + Traffic VPC egress settings. If not provided, it defaults to + PRIVATE_RANGES_ONLY. + network_interfaces (MutableSequence[google.cloud.run_v2.types.VpcAccess.NetworkInterface]): + Direct VPC egress settings. Currently only + single network interface is supported. """ class VpcEgress(proto.Enum): @@ -121,6 +125,45 @@ class VpcEgress(proto.Enum): ALL_TRAFFIC = 1 PRIVATE_RANGES_ONLY = 2 + class NetworkInterface(proto.Message): + r"""Direct VPC egress settings. + + Attributes: + network (str): + The VPC network that the Cloud Run resource + will be able to send traffic to. At least one of + network or subnetwork must be specified. If both + network and subnetwork are specified, the given + VPC subnetwork must belong to the given VPC + network. If network is not specified, it will be + looked up from the subnetwork. + subnetwork (str): + The VPC subnetwork that the Cloud Run + resource will get IPs from. At least one of + network or subnetwork must be specified. If both + network and subnetwork are specified, the given + VPC subnetwork must belong to the given VPC + network. If subnetwork is not specified, the + subnetwork with the same name with the network + will be used. + tags (MutableSequence[str]): + Network tags applied to this Cloud Run + resource. + """ + + network: str = proto.Field( + proto.STRING, + number=1, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + connector: str = proto.Field( proto.STRING, number=1, @@ -130,6 +173,11 @@ class VpcEgress(proto.Enum): number=2, enum=VpcEgress, ) + network_interfaces: MutableSequence[NetworkInterface] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=NetworkInterface, + ) class BinaryAuthorization(proto.Message): diff --git a/samples/generated_samples/run_v2_generated_executions_cancel_execution_async.py b/samples/generated_samples/run_v2_generated_executions_cancel_execution_async.py new file mode 100644 index 0000000..1bc91b2 --- /dev/null +++ b/samples/generated_samples/run_v2_generated_executions_cancel_execution_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelExecution +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Executions_CancelExecution_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +async def sample_cancel_execution(): + # Create a client + client = run_v2.ExecutionsAsyncClient() + + # Initialize request argument(s) + request = run_v2.CancelExecutionRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_execution(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END run_v2_generated_Executions_CancelExecution_async] diff --git a/samples/generated_samples/run_v2_generated_executions_cancel_execution_sync.py b/samples/generated_samples/run_v2_generated_executions_cancel_execution_sync.py new file mode 100644 index 0000000..a1cd334 --- /dev/null +++ b/samples/generated_samples/run_v2_generated_executions_cancel_execution_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelExecution +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-run + + +# [START run_v2_generated_Executions_CancelExecution_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import run_v2 + + +def sample_cancel_execution(): + # Create a client + client = run_v2.ExecutionsClient() + + # Initialize request argument(s) + request = run_v2.CancelExecutionRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_execution(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END run_v2_generated_Executions_CancelExecution_sync] diff --git a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index da84c85..796aa84 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -11,6 +11,167 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.run_v2.ExecutionsAsyncClient", + "shortName": "ExecutionsAsyncClient" + }, + "fullName": "google.cloud.run_v2.ExecutionsAsyncClient.cancel_execution", + "method": { + "fullName": "google.cloud.run.v2.Executions.CancelExecution", + "service": { + "fullName": "google.cloud.run.v2.Executions", + "shortName": "Executions" + }, + "shortName": "CancelExecution" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.CancelExecutionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_execution" + }, + "description": "Sample for CancelExecution", + "file": "run_v2_generated_executions_cancel_execution_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Executions_CancelExecution_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_executions_cancel_execution_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.run_v2.ExecutionsClient", + "shortName": "ExecutionsClient" + }, + "fullName": "google.cloud.run_v2.ExecutionsClient.cancel_execution", + "method": { + "fullName": "google.cloud.run.v2.Executions.CancelExecution", + "service": { + "fullName": "google.cloud.run.v2.Executions", + "shortName": "Executions" + }, + "shortName": "CancelExecution" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.run_v2.types.CancelExecutionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_execution" + }, + "description": "Sample for CancelExecution", + "file": "run_v2_generated_executions_cancel_execution_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "run_v2_generated_Executions_CancelExecution_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "run_v2_generated_executions_cancel_execution_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_run_v2_keywords.py b/scripts/fixup_run_v2_keywords.py index 37b1d95..2390c76 100644 --- a/scripts/fixup_run_v2_keywords.py +++ b/scripts/fixup_run_v2_keywords.py @@ -39,6 +39,7 @@ def partition( class runCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_execution': ('name', 'validate_only', 'etag', ), 'create_job': ('parent', 'job', 'job_id', 'validate_only', ), 'create_service': ('parent', 'service', 'service_id', 'validate_only', ), 'delete_execution': ('name', 'validate_only', 'etag', ), @@ -56,7 +57,7 @@ class runCallTransformer(cst.CSTTransformer): 'list_revisions': ('parent', 'page_size', 'page_token', 'show_deleted', ), 'list_services': ('parent', 'page_size', 'page_token', 'show_deleted', ), 'list_tasks': ('parent', 'page_size', 'page_token', 'show_deleted', ), - 'run_job': ('name', 'validate_only', 'etag', ), + 'run_job': ('name', 'validate_only', 'etag', 'overrides', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_job': ('job', 'validate_only', 'allow_missing', ), diff --git a/tests/unit/gapic/run_v2/test_executions.py b/tests/unit/gapic/run_v2/test_executions.py index ca4c3dd..b6991bd 100644 --- a/tests/unit/gapic/run_v2/test_executions.py +++ b/tests/unit/gapic/run_v2/test_executions.py @@ -1632,6 +1632,232 @@ async def test_delete_execution_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + execution.CancelExecutionRequest, + dict, + ], +) +def test_cancel_execution(request_type, transport: str = "grpc"): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == execution.CancelExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_cancel_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + client.cancel_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == execution.CancelExecutionRequest() + + +@pytest.mark.asyncio +async def test_cancel_execution_async( + transport: str = "grpc_asyncio", request_type=execution.CancelExecutionRequest +): + client = ExecutionsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == execution.CancelExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_cancel_execution_async_from_dict(): + await test_cancel_execution_async(request_type=dict) + + +def test_cancel_execution_field_headers(): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = execution.CancelExecutionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_execution_field_headers_async(): + client = ExecutionsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = execution.CancelExecutionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_execution_flattened(): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_execution( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_execution_flattened_error(): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_execution( + execution.CancelExecutionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_execution_flattened_async(): + client = ExecutionsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_execution( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_execution_flattened_error_async(): + client = ExecutionsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_execution( + execution.CancelExecutionRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2556,6 +2782,276 @@ def test_delete_execution_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + execution.CancelExecutionRequest, + dict, + ], +) +def test_cancel_execution_rest(request_type): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/jobs/sample3/executions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_execution(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_cancel_execution_rest_required_fields( + request_type=execution.CancelExecutionRequest, +): + transport_class = transports.ExecutionsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_execution._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_execution._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_execution(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_execution_rest_unset_required_fields(): + transport = transports.ExecutionsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_execution._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_execution_rest_interceptors(null_interceptor): + transport = transports.ExecutionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ExecutionsRestInterceptor(), + ) + client = ExecutionsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ExecutionsRestInterceptor, "post_cancel_execution" + ) as post, mock.patch.object( + transports.ExecutionsRestInterceptor, "pre_cancel_execution" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = execution.CancelExecutionRequest.pb( + execution.CancelExecutionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = execution.CancelExecutionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.cancel_execution( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_execution_rest_bad_request( + transport: str = "rest", request_type=execution.CancelExecutionRequest +): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/jobs/sample3/executions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_execution(request) + + +def test_cancel_execution_rest_flattened(): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/jobs/sample3/executions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_execution(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/jobs/*/executions/*}:cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_execution_rest_flattened_error(transport: str = "rest"): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_execution( + execution.CancelExecutionRequest(), + name="name_value", + ) + + +def test_cancel_execution_rest_error(): + client = ExecutionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ExecutionsGrpcTransport( @@ -2698,6 +3194,7 @@ def test_executions_base_transport(): "get_execution", "list_executions", "delete_execution", + "cancel_execution", "get_operation", "wait_operation", "delete_operation", @@ -2986,6 +3483,9 @@ def test_executions_client_transport_session_collision(transport_name): session1 = client1.transport.delete_execution._session session2 = client2.transport.delete_execution._session assert session1 != session2 + session1 = client1.transport.cancel_execution._session + session2 = client2.transport.cancel_execution._session + assert session1 != session2 def test_executions_grpc_transport_channel(): diff --git a/tests/unit/gapic/run_v2/test_jobs.py b/tests/unit/gapic/run_v2/test_jobs.py index 1d70644..a2ecdaf 100644 --- a/tests/unit/gapic/run_v2/test_jobs.py +++ b/tests/unit/gapic/run_v2/test_jobs.py @@ -744,16 +744,16 @@ async def test_create_job_async_from_dict(): await test_create_job_async(request_type=dict) -def test_create_job_field_headers(): +def test_create_job_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcr_job.CreateJobRequest() - - request.parent = "parent_value" + request = gcr_job.CreateJobRequest( + **{"parent": "projects/sample1/locations/sample2"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_job), "__call__") as call: @@ -765,44 +765,9 @@ def test_create_job_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcr_job.CreateJobRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_create_job_flattened(): @@ -1044,16 +1009,16 @@ async def test_get_job_async_from_dict(): await test_get_job_async(request_type=dict) -def test_get_job_field_headers(): +def test_get_job_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = job.GetJobRequest() - - request.name = "name_value" + request = job.GetJobRequest( + **{"name": "projects/sample1/locations/sample2/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1065,42 +1030,9 @@ def test_get_job_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = job.GetJobRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_get_job_flattened(): @@ -1272,16 +1204,14 @@ async def test_list_jobs_async_from_dict(): await test_list_jobs_async(request_type=dict) -def test_list_jobs_field_headers(): +def test_list_jobs_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = job.ListJobsRequest() - - request.parent = "parent_value" + request = job.ListJobsRequest(**{"parent": "projects/sample1/locations/sample2"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1293,44 +1223,9 @@ def test_list_jobs_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = job.ListJobsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - job.ListJobsResponse() - ) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_list_jobs_flattened(): @@ -1453,9 +1348,6 @@ def test_list_jobs_pager(transport_name: str = "grpc"): ) metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) pager = client.list_jobs(request={}) assert pager._metadata == metadata @@ -1688,16 +1580,16 @@ async def test_update_job_async_from_dict(): await test_update_job_async(request_type=dict) -def test_update_job_field_headers(): +def test_update_job_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcr_job.UpdateJobRequest() - - request.job.name = "name_value" + request = gcr_job.UpdateJobRequest( + **{"job": {"name": "projects/sample1/locations/sample2/sample3"}} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_job), "__call__") as call: @@ -1709,44 +1601,9 @@ def test_update_job_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "job.name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_update_job_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcr_job.UpdateJobRequest() - - request.job.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "job.name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_update_job_flattened(): @@ -1914,16 +1771,16 @@ async def test_delete_job_async_from_dict(): await test_delete_job_async(request_type=dict) -def test_delete_job_field_headers(): +def test_delete_job_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = job.DeleteJobRequest() - - request.name = "name_value" + request = job.DeleteJobRequest( + **{"name": "projects/sample1/locations/sample2/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1935,44 +1792,9 @@ def test_delete_job_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = job.DeleteJobRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_delete_job_flattened(): @@ -2140,16 +1962,16 @@ async def test_run_job_async_from_dict(): await test_run_job_async(request_type=dict) -def test_run_job_field_headers(): +def test_run_job_routing_parameters(): client = JobsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = job.RunJobRequest() - - request.name = "name_value" + request = job.RunJobRequest( + **{"name": "projects/sample1/locations/sample2/sample3"} + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_job), "__call__") as call: @@ -2161,44 +1983,9 @@ def test_run_job_field_headers(): _, args, _ = call.mock_calls[0] assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_job_field_headers_async(): - client = JobsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = job.RunJobRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.run_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.run_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_run_job_flattened(): @@ -2886,6 +2673,7 @@ def test_create_job_rest(request_type): "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -2905,6 +2693,7 @@ def test_create_job_rest(request_type): "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "max_retries": 1187, @@ -2912,7 +2701,17 @@ def test_create_job_rest(request_type): "service_account": "service_account_value", "execution_environment": 1, "encryption_key": "encryption_key_value", - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, }, }, "observed_generation": 2021, @@ -3208,6 +3007,7 @@ def test_create_job_rest_bad_request( "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -3227,6 +3027,7 @@ def test_create_job_rest_bad_request( "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "max_retries": 1187, @@ -3234,7 +3035,17 @@ def test_create_job_rest_bad_request( "service_account": "service_account_value", "execution_environment": 1, "encryption_key": "encryption_key_value", - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, }, }, "observed_generation": 2021, @@ -4035,6 +3846,7 @@ def test_update_job_rest(request_type): "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -4054,6 +3866,7 @@ def test_update_job_rest(request_type): "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "max_retries": 1187, @@ -4061,7 +3874,17 @@ def test_update_job_rest(request_type): "service_account": "service_account_value", "execution_environment": 1, "encryption_key": "encryption_key_value", - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, }, }, "observed_generation": 2021, @@ -4333,6 +4156,7 @@ def test_update_job_rest_bad_request( "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -4352,6 +4176,7 @@ def test_update_job_rest_bad_request( "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "max_retries": 1187, @@ -4359,7 +4184,17 @@ def test_update_job_rest_bad_request( "service_account": "service_account_value", "execution_environment": 1, "encryption_key": "encryption_key_value", - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, }, }, "observed_generation": 2021, diff --git a/tests/unit/gapic/run_v2/test_services.py b/tests/unit/gapic/run_v2/test_services.py index 2a86cb4..d1cbefb 100644 --- a/tests/unit/gapic/run_v2/test_services.py +++ b/tests/unit/gapic/run_v2/test_services.py @@ -2489,7 +2489,17 @@ def test_create_service_rest(request_type): "labels": {}, "annotations": {}, "scaling": {"min_instance_count": 1920, "max_instance_count": 1922}, - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, "timeout": {"seconds": 751, "nanos": 543}, "service_account": "service_account_value", "containers": [ @@ -2536,6 +2546,7 @@ def test_create_service_rest(request_type): "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -2555,6 +2566,7 @@ def test_create_service_rest(request_type): "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "execution_environment": 1, @@ -2831,7 +2843,17 @@ def test_create_service_rest_bad_request( "labels": {}, "annotations": {}, "scaling": {"min_instance_count": 1920, "max_instance_count": 1922}, - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, "timeout": {"seconds": 751, "nanos": 543}, "service_account": "service_account_value", "containers": [ @@ -2878,6 +2900,7 @@ def test_create_service_rest_bad_request( "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -2897,6 +2920,7 @@ def test_create_service_rest_bad_request( "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "execution_environment": 1, @@ -3688,7 +3712,17 @@ def test_update_service_rest(request_type): "labels": {}, "annotations": {}, "scaling": {"min_instance_count": 1920, "max_instance_count": 1922}, - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, "timeout": {"seconds": 751, "nanos": 543}, "service_account": "service_account_value", "containers": [ @@ -3735,6 +3769,7 @@ def test_update_service_rest(request_type): "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -3754,6 +3789,7 @@ def test_update_service_rest(request_type): "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "execution_environment": 1, @@ -4008,7 +4044,17 @@ def test_update_service_rest_bad_request( "labels": {}, "annotations": {}, "scaling": {"min_instance_count": 1920, "max_instance_count": 1922}, - "vpc_access": {"connector": "connector_value", "egress": 1}, + "vpc_access": { + "connector": "connector_value", + "egress": 1, + "network_interfaces": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "tags": ["tags_value1", "tags_value2"], + } + ], + }, "timeout": {"seconds": 751, "nanos": 543}, "service_account": "service_account_value", "containers": [ @@ -4055,6 +4101,7 @@ def test_update_service_rest_bad_request( "grpc": {"port": 453, "service": "service_value"}, }, "startup_probe": {}, + "depends_on": ["depends_on_value1", "depends_on_value2"], } ], "volumes": [ @@ -4074,6 +4121,7 @@ def test_update_service_rest_bad_request( "cloud_sql_instance": { "instances": ["instances_value1", "instances_value2"] }, + "empty_dir": {"medium": 1, "size_limit": "size_limit_value"}, } ], "execution_environment": 1, From 6b3c75f743c2f1f0b63bc94181f02cd9bef8eadc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 2 Oct 2023 21:44:19 +0000 Subject: [PATCH 3/5] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#131) Source-Link: https://togithub.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb --- .github/.OwlBot.lock.yaml | 4 ++-- .gitignore | 1 + .kokoro/requirements.txt | 49 ++++++++++++++++++++------------------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a3da1b0..a9bdb1b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/.gitignore b/.gitignore index b4243ce..d083ea1 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 029bd34..96d593c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From a8e1f905d8962ba2dfeb40064bbfc07f290cc050 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:16:24 +0000 Subject: [PATCH 4/5] chore: [autoapprove] Update `black` and `isort` to latest versions (#133) Source-Link: https://togithub.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 6 ++-- .pre-commit-config.yaml | 2 +- .../services/executions/transports/rest.py | 4 --- .../run_v2/services/jobs/transports/rest.py | 4 --- .../services/revisions/transports/rest.py | 4 --- .../services/services/transports/rest.py | 4 --- .../run_v2/services/tasks/transports/rest.py | 4 --- noxfile.py | 35 ++++++++++--------- 9 files changed, 24 insertions(+), 43 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a9bdb1b..dd98abb 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 96d593c..0332d32 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19409cb..6a8e169 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/google/cloud/run_v2/services/executions/transports/rest.py b/google/cloud/run_v2/services/executions/transports/rest.py index b9ba730..2d1d507 100644 --- a/google/cloud/run_v2/services/executions/transports/rest.py +++ b/google/cloud/run_v2/services/executions/transports/rest.py @@ -856,7 +856,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -919,7 +918,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -986,7 +984,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1053,7 +1050,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/run_v2/services/jobs/transports/rest.py b/google/cloud/run_v2/services/jobs/transports/rest.py index 678c03d..bd891cd 100644 --- a/google/cloud/run_v2/services/jobs/transports/rest.py +++ b/google/cloud/run_v2/services/jobs/transports/rest.py @@ -1647,7 +1647,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1710,7 +1709,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1777,7 +1775,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1844,7 +1841,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/run_v2/services/revisions/transports/rest.py b/google/cloud/run_v2/services/revisions/transports/rest.py index 2983925..3bfc783 100644 --- a/google/cloud/run_v2/services/revisions/transports/rest.py +++ b/google/cloud/run_v2/services/revisions/transports/rest.py @@ -717,7 +717,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -780,7 +779,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -847,7 +845,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -914,7 +911,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/run_v2/services/services/transports/rest.py b/google/cloud/run_v2/services/services/transports/rest.py index 8f1b09c..29b6190 100644 --- a/google/cloud/run_v2/services/services/transports/rest.py +++ b/google/cloud/run_v2/services/services/transports/rest.py @@ -1533,7 +1533,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -1596,7 +1595,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -1663,7 +1661,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -1730,7 +1727,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/google/cloud/run_v2/services/tasks/transports/rest.py b/google/cloud/run_v2/services/tasks/transports/rest.py index 03d7e5c..5c6d286 100644 --- a/google/cloud/run_v2/services/tasks/transports/rest.py +++ b/google/cloud/run_v2/services/tasks/transports/rest.py @@ -518,7 +518,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -581,7 +580,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -648,7 +646,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: @@ -715,7 +712,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the wait operation method over HTTP. Args: diff --git a/noxfile.py b/noxfile.py index b59c84b..fc49ce9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -22,18 +22,19 @@ import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -41,23 +42,23 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -70,6 +71,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -188,7 +190,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 From 6a908ec97e6766f199b21d368cf9dc07d89077bd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 16:52:27 -0400 Subject: [PATCH 5/5] chore(main): release 0.10.0 (#127) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 17 +++++++++++++++++ google/cloud/run/gapic_version.py | 2 +- google/cloud/run_v2/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.run.v2.json | 2 +- 5 files changed, 21 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b28fea9..7d9b009 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.9.1" + ".": "0.10.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 944f5db..1d71387 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.10.0](https://github.com/googleapis/python-run/compare/v0.9.1...v0.10.0) (2023-10-09) + + +### Features + +* Adds support for cancel Execution ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) +* Adds support for Direct VPC egress setting ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) +* Adds support for Execution overrides ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) +* New field for Task's scheduled timestamp ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) +* New fields for multi-container ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) + + +### Documentation + +* General documentation fixes. ([f2c76e1](https://github.com/googleapis/python-run/commit/f2c76e16a6abf333e36cba50b3270211eb762fcf)) +* Minor formatting ([0b5dc70](https://github.com/googleapis/python-run/commit/0b5dc70d2f8f730e2405f2891c8007bdbbc344ad)) + ## [0.9.1](https://github.com/googleapis/python-run/compare/v0.9.0...v0.9.1) (2023-08-02) diff --git a/google/cloud/run/gapic_version.py b/google/cloud/run/gapic_version.py index f7e843c..267ed2e 100644 --- a/google/cloud/run/gapic_version.py +++ b/google/cloud/run/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.1" # {x-release-please-version} +__version__ = "0.10.0" # {x-release-please-version} diff --git a/google/cloud/run_v2/gapic_version.py b/google/cloud/run_v2/gapic_version.py index f7e843c..267ed2e 100644 --- a/google/cloud/run_v2/gapic_version.py +++ b/google/cloud/run_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.1" # {x-release-please-version} +__version__ = "0.10.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json index 796aa84..5193efe 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.run.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-run", - "version": "0.1.0" + "version": "0.10.0" }, "snippets": [ {