diff --git a/.flake8 b/.flake8 index 29227d4c..2e438749 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7c..757c9dca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6116b837..cac51240 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/firestore-dpe +# @googleapis/yoshi-python @googleapis/cloud-native-db-dpes are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cloud-native-db-dpes -# @googleapis/python-samples-reviewers @googleapis/firestore-dpe are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/firestore-dpe +# @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-native-db-dpes diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 00000000..311ebbb8 --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 00000000..41bff0b5 --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e5..29601ad4 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,11 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb8..238b87b9 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a77..46d23716 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index a5bf20b2..44c2f180 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -11,6 +11,6 @@ "distribution_name": "google-cloud-datastore", "api_id": "datastore.googleapis.com", "default_version": "v1", - "codeowner_team": "@googleapis/firestore-dpe", + "codeowner_team": "@googleapis/cloud-native-db-dpes", "api_shortname": "datastore" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 8064ab65..5c7d5bb1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.6.0](https://github.com/googleapis/python-datastore/compare/v2.5.1...v2.6.0) (2022-05-05) + + +### Features + +* expose new read_time API fields, currently only available in private preview ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + + +### Documentation + +* fix type in docstring for map fields ([8d2bd17](https://github.com/googleapis/python-datastore/commit/8d2bd1788d8dc7da57ab9272b274a29082878ece)) + ### [2.5.1](https://github.com/googleapis/python-datastore/compare/v2.5.0...v2.5.1) (2022-03-05) diff --git a/docs/conf.py b/docs/conf.py index d51558be..febe857a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/datastore/_http.py b/google/cloud/datastore/_http.py index f92c76f0..60b8af89 100644 --- a/google/cloud/datastore/_http.py +++ b/google/cloud/datastore/_http.py @@ -53,7 +53,14 @@ def _make_request_pb(request, request_pb_type): def _request( - http, project, method, data, base_url, client_info, retry=None, timeout=None, + http, + project, + method, + data, + base_url, + client_info, + retry=None, + timeout=None, ): """Make a request over the Http transport to the Cloud Datastore API. @@ -103,7 +110,11 @@ def _request( if timeout is not None: response = requester( - url=api_url, method="POST", headers=headers, data=data, timeout=timeout, + url=api_url, + method="POST", + headers=headers, + data=data, + timeout=timeout, ) else: response = requester(url=api_url, method="POST", headers=headers, data=data) diff --git a/google/cloud/datastore/helpers.py b/google/cloud/datastore/helpers.py index 85dfc240..f976070e 100644 --- a/google/cloud/datastore/helpers.py +++ b/google/cloud/datastore/helpers.py @@ -418,7 +418,10 @@ def _get_value_from_value_pb(pb): ] elif value_type == "geo_point_value": - result = GeoPoint(pb.geo_point_value.latitude, pb.geo_point_value.longitude,) + result = GeoPoint( + pb.geo_point_value.latitude, + pb.geo_point_value.longitude, + ) elif value_type == "null_value": result = None diff --git a/google/cloud/datastore/key.py b/google/cloud/datastore/key.py index 76f18455..1a8e3645 100644 --- a/google/cloud/datastore/key.py +++ b/google/cloud/datastore/key.py @@ -361,7 +361,7 @@ def from_legacy_urlsafe(cls, urlsafe): reference.ParseFromString(raw_bytes) project = _clean_app(reference.app) - namespace = _get_empty(reference.name_space, u"") + namespace = _get_empty(reference.name_space, "") _check_database_id(reference.database_id) flat_path = _get_flat_path(reference.path) return cls(*flat_path, project=project, namespace=namespace) @@ -554,7 +554,7 @@ def _check_database_id(database_id): :raises: :exc:`ValueError` if the ``database_id`` is not empty. """ - if database_id != u"": + if database_id != "": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg) @@ -580,13 +580,13 @@ def _add_id_or_name(flat_path, element_pb, empty_allowed): # NOTE: Below 0 and the empty string are the "null" values for their # respective types, indicating that the value is unset. if id_ == 0: - if name == u"": + if name == "": if not empty_allowed: raise ValueError(_EMPTY_ELEMENT) else: flat_path.append(name) else: - if name == u"": + if name == "": flat_path.append(id_) else: msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py index aa1cc6e9..ae34a9fb 100644 --- a/google/cloud/datastore/version.py +++ b/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.5.1" +__version__ = "2.6.0" diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index ebac62bd..0f6be699 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -267,7 +267,7 @@ async def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -285,7 +285,6 @@ async def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -321,7 +320,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -411,7 +410,12 @@ def sample_export_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -429,7 +433,7 @@ async def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -444,7 +448,6 @@ async def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -480,7 +483,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Dict[str, str]`): + labels (:class:`Mapping[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -574,7 +577,12 @@ def sample_import_entities(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -611,7 +619,6 @@ async def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -665,7 +672,12 @@ def sample_create_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -701,7 +713,6 @@ async def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -755,7 +766,12 @@ def sample_delete_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -832,7 +848,12 @@ def sample_get_index(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -850,7 +871,6 @@ async def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -911,12 +931,20 @@ def sample_list_indexes(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListIndexesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index 4f4f9211..8f5364a7 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -59,7 +59,10 @@ class DatastoreAdminClientMeta(type): _transport_registry["grpc"] = DatastoreAdminGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreAdminGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreAdminTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreAdminTransport]: """Returns an appropriate transport class. Args: @@ -219,7 +222,9 @@ def transport(self) -> DatastoreAdminTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -232,9 +237,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -243,9 +252,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -254,9 +267,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -265,10 +282,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -447,7 +468,7 @@ def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -465,7 +486,6 @@ def export_entities( before completion it may leave partial data behind in Google Cloud Storage. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -501,7 +521,7 @@ def sample_export_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -590,7 +610,12 @@ def sample_export_entities(): rpc = self._transport._wrapped_methods[self._transport.export_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -608,7 +633,7 @@ def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Dict[str, str] = None, + labels: Mapping[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -623,7 +648,6 @@ def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -659,7 +683,7 @@ def sample_import_entities(): This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -752,7 +776,12 @@ def sample_import_entities(): rpc = self._transport._wrapped_methods[self._transport.import_entities] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -789,7 +818,6 @@ def create_index( Indexes with a single property cannot be created. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -844,7 +872,12 @@ def sample_create_index(): rpc = self._transport._wrapped_methods[self._transport.create_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -880,7 +913,6 @@ def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -935,7 +967,12 @@ def sample_delete_index(): rpc = self._transport._wrapped_methods[self._transport.delete_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1003,7 +1040,12 @@ def sample_get_index(): rpc = self._transport._wrapped_methods[self._transport.get_index] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1021,7 +1063,6 @@ def list_indexes( the list of indexes and may occasionally return stale results. - .. code-block:: python from google.cloud import datastore_admin_v1 @@ -1073,12 +1114,20 @@ def sample_list_indexes(): rpc = self._transport._wrapped_methods[self._transport.list_indexes] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListIndexesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 1b47ae2b..618a990c 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -86,6 +86,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -127,16 +128,24 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.export_entities: gapic_v1.method.wrap_method( - self.export_entities, default_timeout=60.0, client_info=client_info, + self.export_entities, + default_timeout=60.0, + client_info=client_info, ), self.import_entities: gapic_v1.method.wrap_method( - self.import_entities, default_timeout=60.0, client_info=client_info, + self.import_entities, + default_timeout=60.0, + client_info=client_info, ), self.create_index: gapic_v1.method.wrap_method( - self.create_index, default_timeout=60.0, client_info=client_info, + self.create_index, + default_timeout=60.0, + client_info=client_info, ), self.delete_index: gapic_v1.method.wrap_method( - self.delete_index, default_timeout=60.0, client_info=client_info, + self.delete_index, + default_timeout=60.0, + client_info=client_info, ), self.get_index: gapic_v1.method.wrap_method( self.get_index, @@ -173,9 +182,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -240,5 +249,9 @@ def list_indexes( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreAdminTransport",) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index e27734f8..e4193366 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -285,8 +285,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -507,5 +506,9 @@ def list_indexes( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreAdminGrpcTransport",) diff --git a/google/cloud/datastore_admin_v1/types/__init__.py b/google/cloud/datastore_admin_v1/types/__init__.py index fbc4f65f..f194f3cf 100644 --- a/google/cloud/datastore_admin_v1/types/__init__.py +++ b/google/cloud/datastore_admin_v1/types/__init__.py @@ -31,7 +31,9 @@ Progress, OperationType, ) -from .index import Index +from .index import ( + Index, +) from .migration import ( MigrationProgressEvent, MigrationStateEvent, diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index 4e5ad0da..82bacec1 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -64,7 +64,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Dict[str, str]): + labels (Mapping[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -83,11 +83,31 @@ class State(proto.Enum): FAILED = 6 CANCELLED = 7 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - operation_type = proto.Field(proto.ENUM, number=3, enum="OperationType",) - labels = proto.MapField(proto.STRING, proto.STRING, number=4,) - state = proto.Field(proto.ENUM, number=5, enum=State,) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + operation_type = proto.Field( + proto.ENUM, + number=3, + enum="OperationType", + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) class Progress(proto.Message): @@ -103,8 +123,14 @@ class Progress(proto.Message): unavailable. """ - work_completed = proto.Field(proto.INT64, number=1,) - work_estimated = proto.Field(proto.INT64, number=2,) + work_completed = proto.Field( + proto.INT64, + number=1, + ) + work_estimated = proto.Field( + proto.INT64, + number=2, + ) class ExportEntitiesRequest(proto.Message): @@ -115,7 +141,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -145,10 +171,24 @@ class ExportEntitiesRequest(proto.Message): without conflict. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - entity_filter = proto.Field(proto.MESSAGE, number=3, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=3, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=4, + ) class ImportEntitiesRequest(proto.Message): @@ -159,7 +199,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Dict[str, str]): + labels (Mapping[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage @@ -185,10 +225,24 @@ class ImportEntitiesRequest(proto.Message): specified then all entities from the export are imported. """ - project_id = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) - input_url = proto.Field(proto.STRING, number=3,) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) + project_id = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + input_url = proto.Field( + proto.STRING, + number=3, + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) class ExportEntitiesResponse(proto.Message): @@ -204,7 +258,10 @@ class ExportEntitiesResponse(proto.Message): Only present if the operation completed successfully. """ - output_url = proto.Field(proto.STRING, number=1,) + output_url = proto.Field( + proto.STRING, + number=1, + ) class ExportEntitiesMetadata(proto.Message): @@ -230,11 +287,30 @@ class ExportEntitiesMetadata(proto.Message): [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - output_url_prefix = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + output_url_prefix = proto.Field( + proto.STRING, + number=5, + ) class ImportEntitiesMetadata(proto.Message): @@ -259,11 +335,30 @@ class ImportEntitiesMetadata(proto.Message): field. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=3, message="Progress",) - entity_filter = proto.Field(proto.MESSAGE, number=4, message="EntityFilter",) - input_url = proto.Field(proto.STRING, number=5,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + progress_bytes = proto.Field( + proto.MESSAGE, + number=3, + message="Progress", + ) + entity_filter = proto.Field( + proto.MESSAGE, + number=4, + message="EntityFilter", + ) + input_url = proto.Field( + proto.STRING, + number=5, + ) class EntityFilter(proto.Message): @@ -298,8 +393,14 @@ class EntityFilter(proto.Message): Each namespace in this list must be unique. """ - kinds = proto.RepeatedField(proto.STRING, number=1,) - namespace_ids = proto.RepeatedField(proto.STRING, number=2,) + kinds = proto.RepeatedField( + proto.STRING, + number=1, + ) + namespace_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) class CreateIndexRequest(proto.Message): @@ -316,8 +417,15 @@ class CreateIndexRequest(proto.Message): deleted. """ - project_id = proto.Field(proto.STRING, number=1,) - index = proto.Field(proto.MESSAGE, number=3, message=gda_index.Index,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index = proto.Field( + proto.MESSAGE, + number=3, + message=gda_index.Index, + ) class DeleteIndexRequest(proto.Message): @@ -331,8 +439,14 @@ class DeleteIndexRequest(proto.Message): The resource ID of the index to delete. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class GetIndexRequest(proto.Message): @@ -346,8 +460,14 @@ class GetIndexRequest(proto.Message): The resource ID of the index to get. """ - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class ListIndexesRequest(proto.Message): @@ -367,10 +487,22 @@ class ListIndexesRequest(proto.Message): request, if any. """ - project_id = proto.Field(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ListIndexesResponse(proto.Message): @@ -388,8 +520,15 @@ class ListIndexesResponse(proto.Message): def raw_page(self): return self - indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gda_index.Index,) - next_page_token = proto.Field(proto.STRING, number=2,) + indexes = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gda_index.Index, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class IndexOperationMetadata(proto.Message): @@ -407,9 +546,20 @@ class IndexOperationMetadata(proto.Message): acting on. """ - common = proto.Field(proto.MESSAGE, number=1, message="CommonMetadata",) - progress_entities = proto.Field(proto.MESSAGE, number=2, message="Progress",) - index_id = proto.Field(proto.STRING, number=3,) + common = proto.Field( + proto.MESSAGE, + number=1, + message="CommonMetadata", + ) + progress_entities = proto.Field( + proto.MESSAGE, + number=2, + message="Progress", + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) class DatastoreFirestoreMigrationMetadata(proto.Message): @@ -431,8 +581,16 @@ class DatastoreFirestoreMigrationMetadata(proto.Message): Datastore to Cloud Firestore in Datastore mode. """ - migration_state = proto.Field(proto.ENUM, number=1, enum=migration.MigrationState,) - migration_step = proto.Field(proto.ENUM, number=2, enum=migration.MigrationStep,) + migration_state = proto.Field( + proto.ENUM, + number=1, + enum=migration.MigrationState, + ) + migration_step = proto.Field( + proto.ENUM, + number=2, + enum=migration.MigrationStep, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_admin_v1/types/index.py b/google/cloud/datastore_admin_v1/types/index.py index 8d50f03a..e00c3bb7 100644 --- a/google/cloud/datastore_admin_v1/types/index.py +++ b/google/cloud/datastore_admin_v1/types/index.py @@ -16,7 +16,12 @@ import proto # type: ignore -__protobuf__ = proto.module(package="google.datastore.admin.v1", manifest={"Index",},) +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "Index", + }, +) class Index(proto.Message): @@ -73,15 +78,43 @@ class IndexedProperty(proto.Message): DIRECTION_UNSPECIFIED. """ - name = proto.Field(proto.STRING, number=1,) - direction = proto.Field(proto.ENUM, number=2, enum="Index.Direction",) + name = proto.Field( + proto.STRING, + number=1, + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum="Index.Direction", + ) - project_id = proto.Field(proto.STRING, number=1,) - index_id = proto.Field(proto.STRING, number=3,) - kind = proto.Field(proto.STRING, number=4,) - ancestor = proto.Field(proto.ENUM, number=5, enum=AncestorMode,) - properties = proto.RepeatedField(proto.MESSAGE, number=6, message=IndexedProperty,) - state = proto.Field(proto.ENUM, number=7, enum=State,) + project_id = proto.Field( + proto.STRING, + number=1, + ) + index_id = proto.Field( + proto.STRING, + number=3, + ) + kind = proto.Field( + proto.STRING, + number=4, + ) + ancestor = proto.Field( + proto.ENUM, + number=5, + enum=AncestorMode, + ) + properties = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=IndexedProperty, + ) + state = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_admin_v1/types/migration.py b/google/cloud/datastore_admin_v1/types/migration.py index 18cdd8d6..97d4145f 100644 --- a/google/cloud/datastore_admin_v1/types/migration.py +++ b/google/cloud/datastore_admin_v1/types/migration.py @@ -57,7 +57,11 @@ class MigrationStateEvent(proto.Message): The new state of the migration. """ - state = proto.Field(proto.ENUM, number=1, enum="MigrationState",) + state = proto.Field( + proto.ENUM, + number=1, + enum="MigrationState", + ) class MigrationProgressEvent(proto.Message): @@ -105,7 +109,9 @@ class PrepareStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) class RedirectWritesStepDetails(proto.Message): @@ -117,12 +123,21 @@ class RedirectWritesStepDetails(proto.Message): """ concurrency_mode = proto.Field( - proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + proto.ENUM, + number=1, + enum="MigrationProgressEvent.ConcurrencyMode", ) - step = proto.Field(proto.ENUM, number=1, enum="MigrationStep",) + step = proto.Field( + proto.ENUM, + number=1, + enum="MigrationStep", + ) prepare_step_details = proto.Field( - proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, + proto.MESSAGE, + number=2, + oneof="step_details", + message=PrepareStepDetails, ) redirect_writes_step_details = proto.Field( proto.MESSAGE, diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index c6f8431b..ab4d60cc 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -34,6 +34,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport from .client import DatastoreClient @@ -309,7 +310,12 @@ def sample_lookup(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -381,7 +387,12 @@ def sample_run_query(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -465,7 +476,12 @@ def sample_begin_transaction(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -485,7 +501,6 @@ async def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -596,7 +611,12 @@ def sample_commit(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -692,7 +712,12 @@ def sample_rollback(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -710,7 +735,6 @@ async def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -790,7 +814,12 @@ def sample_allocate_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -808,7 +837,6 @@ async def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 @@ -897,7 +925,12 @@ def sample_reserve_ids(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -911,7 +944,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 49c741de..5b012a2f 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -37,6 +37,7 @@ from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatastoreGrpcTransport from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport @@ -54,7 +55,10 @@ class DatastoreClientMeta(type): _transport_registry["grpc"] = DatastoreGrpcTransport _transport_registry["grpc_asyncio"] = DatastoreGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatastoreTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatastoreTransport]: """Returns an appropriate transport class. Args: @@ -166,7 +170,9 @@ def transport(self) -> DatastoreTransport: return self._transport @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -179,9 +185,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -190,9 +200,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -201,9 +215,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -212,10 +230,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -486,7 +508,12 @@ def sample_lookup(): rpc = self._transport._wrapped_methods[self._transport.lookup] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -549,7 +576,12 @@ def sample_run_query(): rpc = self._transport._wrapped_methods[self._transport.run_query] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -633,7 +665,12 @@ def sample_begin_transaction(): rpc = self._transport._wrapped_methods[self._transport.begin_transaction] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -653,7 +690,6 @@ def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. - .. code-block:: python from google.cloud import datastore_v1 @@ -764,7 +800,12 @@ def sample_commit(): rpc = self._transport._wrapped_methods[self._transport.commit] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -860,7 +901,12 @@ def sample_rollback(): rpc = self._transport._wrapped_methods[self._transport.rollback] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -878,7 +924,6 @@ def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. - .. code-block:: python from google.cloud import datastore_v1 @@ -958,7 +1003,12 @@ def sample_allocate_ids(): rpc = self._transport._wrapped_methods[self._transport.allocate_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -976,7 +1026,6 @@ def reserve_ids( r"""Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore. - .. code-block:: python from google.cloud import datastore_v1 @@ -1055,7 +1104,12 @@ def sample_reserve_ids(): rpc = self._transport._wrapped_methods[self._transport.reserve_ids] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1076,7 +1130,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index 487a1a45..22a4c167 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -29,7 +29,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-datastore", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -81,6 +83,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -152,16 +155,24 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, default_timeout=60.0, client_info=client_info, + self.begin_transaction, + default_timeout=60.0, + client_info=client_info, ), self.commit: gapic_v1.method.wrap_method( - self.commit, default_timeout=60.0, client_info=client_info, + self.commit, + default_timeout=60.0, + client_info=client_info, ), self.rollback: gapic_v1.method.wrap_method( - self.rollback, default_timeout=60.0, client_info=client_info, + self.rollback, + default_timeout=60.0, + client_info=client_info, ), self.allocate_ids: gapic_v1.method.wrap_method( - self.allocate_ids, default_timeout=60.0, client_info=client_info, + self.allocate_ids, + default_timeout=60.0, + client_info=client_info, ), self.reserve_ids: gapic_v1.method.wrap_method( self.reserve_ids, @@ -183,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -255,5 +266,9 @@ def reserve_ids( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DatastoreTransport",) diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index 410aa89d..16938b68 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -230,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -420,5 +419,9 @@ def reserve_ids( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DatastoreGrpcTransport",) diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index e77ad1e9..f4907298 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -17,6 +17,7 @@ from google.cloud.datastore_v1.types import entity from google.cloud.datastore_v1.types import query as gd_query +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -58,9 +59,20 @@ class LookupRequest(proto.Message): Required. Keys of entities to look up. """ - project_id = proto.Field(proto.STRING, number=8,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) - keys = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, + ) class LookupResponse(proto.Message): @@ -81,13 +93,31 @@ class LookupResponse(proto.Message): resource constraints. The order of results in this field is undefined and has no relation to the order of the keys in the input. + read_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which these entities were read or + found missing. """ - found = proto.RepeatedField(proto.MESSAGE, number=1, message=gd_query.EntityResult,) + found = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gd_query.EntityResult, + ) missing = proto.RepeatedField( - proto.MESSAGE, number=2, message=gd_query.EntityResult, + proto.MESSAGE, + number=2, + message=gd_query.EntityResult, + ) + deferred = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=entity.Key, + ) + read_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, ) - deferred = proto.RepeatedField(proto.MESSAGE, number=3, message=entity.Key,) class RunQueryRequest(proto.Message): @@ -123,14 +153,31 @@ class RunQueryRequest(proto.Message): This field is a member of `oneof`_ ``query_type``. """ - project_id = proto.Field(proto.STRING, number=8,) - partition_id = proto.Field(proto.MESSAGE, number=2, message=entity.PartitionId,) - read_options = proto.Field(proto.MESSAGE, number=1, message="ReadOptions",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + partition_id = proto.Field( + proto.MESSAGE, + number=2, + message=entity.PartitionId, + ) + read_options = proto.Field( + proto.MESSAGE, + number=1, + message="ReadOptions", + ) query = proto.Field( - proto.MESSAGE, number=3, oneof="query_type", message=gd_query.Query, + proto.MESSAGE, + number=3, + oneof="query_type", + message=gd_query.Query, ) gql_query = proto.Field( - proto.MESSAGE, number=7, oneof="query_type", message=gd_query.GqlQuery, + proto.MESSAGE, + number=7, + oneof="query_type", + message=gd_query.GqlQuery, ) @@ -146,8 +193,16 @@ class RunQueryResponse(proto.Message): was set. """ - batch = proto.Field(proto.MESSAGE, number=1, message=gd_query.QueryResultBatch,) - query = proto.Field(proto.MESSAGE, number=2, message=gd_query.Query,) + batch = proto.Field( + proto.MESSAGE, + number=1, + message=gd_query.QueryResultBatch, + ) + query = proto.Field( + proto.MESSAGE, + number=2, + message=gd_query.Query, + ) class BeginTransactionRequest(proto.Message): @@ -162,9 +217,14 @@ class BeginTransactionRequest(proto.Message): Options for a new transaction. """ - project_id = proto.Field(proto.STRING, number=8,) + project_id = proto.Field( + proto.STRING, + number=8, + ) transaction_options = proto.Field( - proto.MESSAGE, number=10, message="TransactionOptions", + proto.MESSAGE, + number=10, + message="TransactionOptions", ) @@ -177,7 +237,10 @@ class BeginTransactionResponse(proto.Message): The transaction identifier (always present). """ - transaction = proto.Field(proto.BYTES, number=1,) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackRequest(proto.Message): @@ -193,8 +256,14 @@ class RollbackRequest(proto.Message): [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. """ - project_id = proto.Field(proto.STRING, number=8,) - transaction = proto.Field(proto.BYTES, number=1,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + ) class RollbackResponse(proto.Message): @@ -248,10 +317,25 @@ class Mode(proto.Enum): TRANSACTIONAL = 1 NON_TRANSACTIONAL = 2 - project_id = proto.Field(proto.STRING, number=8,) - mode = proto.Field(proto.ENUM, number=5, enum=Mode,) - transaction = proto.Field(proto.BYTES, number=1, oneof="transaction_selector",) - mutations = proto.RepeatedField(proto.MESSAGE, number=6, message="Mutation",) + project_id = proto.Field( + proto.STRING, + number=8, + ) + mode = proto.Field( + proto.ENUM, + number=5, + enum=Mode, + ) + transaction = proto.Field( + proto.BYTES, + number=1, + oneof="transaction_selector", + ) + mutations = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Mutation", + ) class CommitResponse(proto.Message): @@ -266,12 +350,25 @@ class CommitResponse(proto.Message): index_updates (int): The number of index entries updated during the commit, or zero if none were updated. + commit_time (google.protobuf.timestamp_pb2.Timestamp): + The transaction commit timestamp. Not set for + non-transactional commits. """ mutation_results = proto.RepeatedField( - proto.MESSAGE, number=3, message="MutationResult", + proto.MESSAGE, + number=3, + message="MutationResult", + ) + index_updates = proto.Field( + proto.INT32, + number=4, + ) + commit_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, ) - index_updates = proto.Field(proto.INT32, number=4,) class AllocateIdsRequest(proto.Message): @@ -288,8 +385,15 @@ class AllocateIdsRequest(proto.Message): reserved/read-only. """ - project_id = proto.Field(proto.STRING, number=8,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class AllocateIdsResponse(proto.Message): @@ -303,7 +407,11 @@ class AllocateIdsResponse(proto.Message): with a newly allocated ID. """ - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsRequest(proto.Message): @@ -323,9 +431,19 @@ class ReserveIdsRequest(proto.Message): auto-allocated. """ - project_id = proto.Field(proto.STRING, number=8,) - database_id = proto.Field(proto.STRING, number=9,) - keys = proto.RepeatedField(proto.MESSAGE, number=1, message=entity.Key,) + project_id = proto.Field( + proto.STRING, + number=8, + ) + database_id = proto.Field( + proto.STRING, + number=9, + ) + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity.Key, + ) class ReserveIdsResponse(proto.Message): @@ -376,23 +494,50 @@ class Mutation(proto.Message): current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity that this + mutation is being applied to. If this does not + match the current update time on the server, the + mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ insert = proto.Field( - proto.MESSAGE, number=4, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=4, + oneof="operation", + message=entity.Entity, ) update = proto.Field( - proto.MESSAGE, number=5, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=5, + oneof="operation", + message=entity.Entity, ) upsert = proto.Field( - proto.MESSAGE, number=6, oneof="operation", message=entity.Entity, + proto.MESSAGE, + number=6, + oneof="operation", + message=entity.Entity, ) delete = proto.Field( - proto.MESSAGE, number=7, oneof="operation", message=entity.Key, + proto.MESSAGE, + number=7, + oneof="operation", + message=entity.Key, ) base_version = proto.Field( - proto.INT64, number=8, oneof="conflict_detection_strategy", + proto.INT64, + number=8, + oneof="conflict_detection_strategy", + ) + update_time = proto.Field( + proto.MESSAGE, + number=11, + oneof="conflict_detection_strategy", + message=timestamp_pb2.Timestamp, ) @@ -412,15 +557,37 @@ class MutationResult(proto.Message): greater than the version of any previous entity and less than the version of any possible future entity. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the entity on the server + after processing the mutation. If the mutation + doesn't change anything on the server, then the + timestamp will be the update timestamp of the + current entity. This field will not be set after + a 'delete'. conflict_detected (bool): Whether a conflict was detected for this mutation. Always false when a conflict detection strategy field is not set in the mutation. """ - key = proto.Field(proto.MESSAGE, number=3, message=entity.Key,) - version = proto.Field(proto.INT64, number=4,) - conflict_detected = proto.Field(proto.BOOL, number=5,) + key = proto.Field( + proto.MESSAGE, + number=3, + message=entity.Key, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + conflict_detected = proto.Field( + proto.BOOL, + number=5, + ) class ReadOptions(proto.Message): @@ -444,6 +611,13 @@ class ReadOptions(proto.Message): transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities as they were at the given + time. This may not be older than 270 seconds. + This value is only supported for Cloud Firestore + in Datastore mode. + This field is a member of `oneof`_ ``consistency_type``. """ @@ -454,9 +628,22 @@ class ReadConsistency(proto.Enum): EVENTUAL = 2 read_consistency = proto.Field( - proto.ENUM, number=1, oneof="consistency_type", enum=ReadConsistency, + proto.ENUM, + number=1, + oneof="consistency_type", + enum=ReadConsistency, + ) + transaction = proto.Field( + proto.BYTES, + number=2, + oneof="consistency_type", + ) + read_time = proto.Field( + proto.MESSAGE, + number=4, + oneof="consistency_type", + message=timestamp_pb2.Timestamp, ) - transaction = proto.Field(proto.BYTES, number=2, oneof="consistency_type",) class TransactionOptions(proto.Message): @@ -496,14 +683,38 @@ class ReadWrite(proto.Message): being retried. """ - previous_transaction = proto.Field(proto.BYTES, number=1,) + previous_transaction = proto.Field( + proto.BYTES, + number=1, + ) class ReadOnly(proto.Message): r"""Options specific to read-only transactions. + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Reads entities at the given time. + This may not be older than 60 seconds. """ - read_write = proto.Field(proto.MESSAGE, number=1, oneof="mode", message=ReadWrite,) - read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) + read_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + read_write = proto.Field( + proto.MESSAGE, + number=1, + oneof="mode", + message=ReadWrite, + ) + read_only = proto.Field( + proto.MESSAGE, + number=2, + oneof="mode", + message=ReadOnly, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index 1c432ee6..e949a56a 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -22,7 +22,13 @@ __protobuf__ = proto.module( package="google.datastore.v1", - manifest={"PartitionId", "Key", "ArrayValue", "Value", "Entity",}, + manifest={ + "PartitionId", + "Key", + "ArrayValue", + "Value", + "Entity", + }, ) @@ -56,8 +62,14 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field(proto.STRING, number=2,) - namespace_id = proto.Field(proto.STRING, number=4,) + project_id = proto.Field( + proto.STRING, + number=2, + ) + namespace_id = proto.Field( + proto.STRING, + number=4, + ) class Key(proto.Message): @@ -127,12 +139,31 @@ class PathElement(proto.Message): This field is a member of `oneof`_ ``id_type``. """ - kind = proto.Field(proto.STRING, number=1,) - id = proto.Field(proto.INT64, number=2, oneof="id_type",) - name = proto.Field(proto.STRING, number=3, oneof="id_type",) - - partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) + kind = proto.Field( + proto.STRING, + number=1, + ) + id = proto.Field( + proto.INT64, + number=2, + oneof="id_type", + ) + name = proto.Field( + proto.STRING, + number=3, + oneof="id_type", + ) + + partition_id = proto.Field( + proto.MESSAGE, + number=1, + message="PartitionId", + ) + path = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) class ArrayValue(proto.Message): @@ -145,7 +176,11 @@ class ArrayValue(proto.Message): 'exclude_from_indexes'. """ - values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) + values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) class Value(proto.Message): @@ -189,7 +224,7 @@ class Value(proto.Message): This field is a member of `oneof`_ ``value_type``. string_value (str): A UTF-8 encoded string value. When ``exclude_from_indexes`` - is false (it is indexed), may have at most 1500 bytes. + is false (it is indexed) , may have at most 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. This field is a member of `oneof`_ ``value_type``. @@ -226,28 +261,74 @@ class Value(proto.Message): """ null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, + proto.ENUM, + number=11, + oneof="value_type", + enum=struct_pb2.NullValue, + ) + boolean_value = proto.Field( + proto.BOOL, + number=1, + oneof="value_type", + ) + integer_value = proto.Field( + proto.INT64, + number=2, + oneof="value_type", + ) + double_value = proto.Field( + proto.DOUBLE, + number=3, + oneof="value_type", ) - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type",) - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type",) - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type",) timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + oneof="value_type", + message=timestamp_pb2.Timestamp, + ) + key_value = proto.Field( + proto.MESSAGE, + number=5, + oneof="value_type", + message="Key", + ) + string_value = proto.Field( + proto.STRING, + number=17, + oneof="value_type", + ) + blob_value = proto.Field( + proto.BYTES, + number=18, + oneof="value_type", ) - key_value = proto.Field(proto.MESSAGE, number=5, oneof="value_type", message="Key",) - string_value = proto.Field(proto.STRING, number=17, oneof="value_type",) - blob_value = proto.Field(proto.BYTES, number=18, oneof="value_type",) geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, + proto.MESSAGE, + number=8, + oneof="value_type", + message=latlng_pb2.LatLng, ) entity_value = proto.Field( - proto.MESSAGE, number=6, oneof="value_type", message="Entity", + proto.MESSAGE, + number=6, + oneof="value_type", + message="Entity", ) array_value = proto.Field( - proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", + proto.MESSAGE, + number=9, + oneof="value_type", + message="ArrayValue", + ) + meaning = proto.Field( + proto.INT32, + number=14, + ) + exclude_from_indexes = proto.Field( + proto.BOOL, + number=19, ) - meaning = proto.Field(proto.INT32, number=14,) - exclude_from_indexes = proto.Field(proto.BOOL, number=19,) class Entity(proto.Message): @@ -265,7 +346,7 @@ class Entity(proto.Message): example, an entity in ``Value.entity_value`` may have no key). An entity's kind is its key path's last element's kind, or null if it has no key. - properties (Sequence[google.cloud.datastore_v1.types.Entity.PropertiesEntry]): + properties (Mapping[str, google.cloud.datastore_v1.types.Value]): The entity's properties. The map's keys are property names. A property name matching regex ``__.*__`` is reserved. A reserved property name is forbidden in certain documented @@ -273,8 +354,17 @@ class Entity(proto.Message): characters. The name cannot be ``""``. """ - key = proto.Field(proto.MESSAGE, number=1, message="Key",) - properties = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="Value",) + key = proto.Field( + proto.MESSAGE, + number=1, + message="Key", + ) + properties = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="Value", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 46147f05..1179efce 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -16,6 +16,7 @@ import proto # type: ignore from google.cloud.datastore_v1.types import entity as gd_entity +from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore @@ -56,6 +57,12 @@ class EntityResult(proto.Message): entities in ``LookupResponse``, this is the version of the snapshot that was used to look up the entity, and it is always set except for eventually consistent reads. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the entity was last changed. This field is + set for + [``FULL``][google.datastore.v1.EntityResult.ResultType.FULL] + entity results. If this entity is missing, this field will + not be set. cursor (bytes): A cursor that points to the position after the result entity. Set only when the ``EntityResult`` is part of a @@ -75,9 +82,24 @@ class ResultType(proto.Enum): PROJECTION = 2 KEY_ONLY = 3 - entity = proto.Field(proto.MESSAGE, number=1, message=gd_entity.Entity,) - version = proto.Field(proto.INT64, number=4,) - cursor = proto.Field(proto.BYTES, number=3,) + entity = proto.Field( + proto.MESSAGE, + number=1, + message=gd_entity.Entity, + ) + version = proto.Field( + proto.INT64, + number=4, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + ) class Query(proto.Message): @@ -122,17 +144,48 @@ class Query(proto.Message): Must be >= 0 if specified. """ - projection = proto.RepeatedField(proto.MESSAGE, number=2, message="Projection",) - kind = proto.RepeatedField(proto.MESSAGE, number=3, message="KindExpression",) - filter = proto.Field(proto.MESSAGE, number=4, message="Filter",) - order = proto.RepeatedField(proto.MESSAGE, number=5, message="PropertyOrder",) + projection = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Projection", + ) + kind = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="KindExpression", + ) + filter = proto.Field( + proto.MESSAGE, + number=4, + message="Filter", + ) + order = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="PropertyOrder", + ) distinct_on = proto.RepeatedField( - proto.MESSAGE, number=6, message="PropertyReference", + proto.MESSAGE, + number=6, + message="PropertyReference", + ) + start_cursor = proto.Field( + proto.BYTES, + number=7, + ) + end_cursor = proto.Field( + proto.BYTES, + number=8, + ) + offset = proto.Field( + proto.INT32, + number=10, + ) + limit = proto.Field( + proto.MESSAGE, + number=12, + message=wrappers_pb2.Int32Value, ) - start_cursor = proto.Field(proto.BYTES, number=7,) - end_cursor = proto.Field(proto.BYTES, number=8,) - offset = proto.Field(proto.INT32, number=10,) - limit = proto.Field(proto.MESSAGE, number=12, message=wrappers_pb2.Int32Value,) class KindExpression(proto.Message): @@ -143,7 +196,10 @@ class KindExpression(proto.Message): The name of the kind. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class PropertyReference(proto.Message): @@ -156,7 +212,10 @@ class PropertyReference(proto.Message): a property name path. """ - name = proto.Field(proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=2, + ) class Projection(proto.Message): @@ -167,7 +226,11 @@ class Projection(proto.Message): The property to project. """ - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) class PropertyOrder(proto.Message): @@ -186,8 +249,16 @@ class Direction(proto.Enum): ASCENDING = 1 DESCENDING = 2 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - direction = proto.Field(proto.ENUM, number=2, enum=Direction,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + direction = proto.Field( + proto.ENUM, + number=2, + enum=Direction, + ) class Filter(proto.Message): @@ -212,10 +283,16 @@ class Filter(proto.Message): """ composite_filter = proto.Field( - proto.MESSAGE, number=1, oneof="filter_type", message="CompositeFilter", + proto.MESSAGE, + number=1, + oneof="filter_type", + message="CompositeFilter", ) property_filter = proto.Field( - proto.MESSAGE, number=2, oneof="filter_type", message="PropertyFilter", + proto.MESSAGE, + number=2, + oneof="filter_type", + message="PropertyFilter", ) @@ -236,8 +313,16 @@ class Operator(proto.Enum): OPERATOR_UNSPECIFIED = 0 AND = 1 - op = proto.Field(proto.ENUM, number=1, enum=Operator,) - filters = proto.RepeatedField(proto.MESSAGE, number=2, message="Filter",) + op = proto.Field( + proto.ENUM, + number=1, + enum=Operator, + ) + filters = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Filter", + ) class PropertyFilter(proto.Message): @@ -260,11 +345,26 @@ class Operator(proto.Enum): GREATER_THAN = 3 GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 + IN = 6 + NOT_EQUAL = 9 HAS_ANCESTOR = 11 + NOT_IN = 13 - property = proto.Field(proto.MESSAGE, number=1, message="PropertyReference",) - op = proto.Field(proto.ENUM, number=2, enum=Operator,) - value = proto.Field(proto.MESSAGE, number=3, message=gd_entity.Value,) + property = proto.Field( + proto.MESSAGE, + number=1, + message="PropertyReference", + ) + op = proto.Field( + proto.ENUM, + number=2, + enum=Operator, + ) + value = proto.Field( + proto.MESSAGE, + number=3, + message=gd_entity.Value, + ) class GqlQuery(proto.Message): @@ -280,7 +380,7 @@ class GqlQuery(proto.Message): and instead must bind all values. For example, ``SELECT * FROM Kind WHERE a = 'string literal'`` is not allowed, while ``SELECT * FROM Kind WHERE a = @value`` is. - named_bindings (Sequence[google.cloud.datastore_v1.types.GqlQuery.NamedBindingsEntry]): + named_bindings (Mapping[str, google.cloud.datastore_v1.types.GqlQueryParameter]): For each non-reserved named binding site in the query string, there must be a named parameter with that name, but not necessarily the inverse. @@ -297,13 +397,24 @@ class GqlQuery(proto.Message): true. """ - query_string = proto.Field(proto.STRING, number=1,) - allow_literals = proto.Field(proto.BOOL, number=2,) + query_string = proto.Field( + proto.STRING, + number=1, + ) + allow_literals = proto.Field( + proto.BOOL, + number=2, + ) named_bindings = proto.MapField( - proto.STRING, proto.MESSAGE, number=5, message="GqlQueryParameter", + proto.STRING, + proto.MESSAGE, + number=5, + message="GqlQueryParameter", ) positional_bindings = proto.RepeatedField( - proto.MESSAGE, number=4, message="GqlQueryParameter", + proto.MESSAGE, + number=4, + message="GqlQueryParameter", ) @@ -330,9 +441,16 @@ class GqlQueryParameter(proto.Message): """ value = proto.Field( - proto.MESSAGE, number=2, oneof="parameter_type", message=gd_entity.Value, + proto.MESSAGE, + number=2, + oneof="parameter_type", + message=gd_entity.Value, + ) + cursor = proto.Field( + proto.BYTES, + number=3, + oneof="parameter_type", ) - cursor = proto.Field(proto.BYTES, number=3, oneof="parameter_type",) class QueryResultBatch(proto.Message): @@ -367,6 +485,17 @@ class QueryResultBatch(proto.Message): Each batch's snapshot version is valid for all preceding batches. The value will be zero for eventually consistent queries. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Read timestamp this batch was returned from. This applies to + the range of results from the query's ``start_cursor`` (or + the beginning of the query if no cursor was given) to this + batch's ``end_cursor`` (not the query's ``end_cursor``). + + In a single transaction, subsequent query result batches for + the same query can have a greater timestamp. Each batch's + read timestamp is valid for all preceding batches. This + value will not be set for eventually consistent queries in + Cloud Datastore. """ class MoreResultsType(proto.Enum): @@ -377,17 +506,42 @@ class MoreResultsType(proto.Enum): MORE_RESULTS_AFTER_CURSOR = 4 NO_MORE_RESULTS = 3 - skipped_results = proto.Field(proto.INT32, number=6,) - skipped_cursor = proto.Field(proto.BYTES, number=3,) + skipped_results = proto.Field( + proto.INT32, + number=6, + ) + skipped_cursor = proto.Field( + proto.BYTES, + number=3, + ) entity_result_type = proto.Field( - proto.ENUM, number=1, enum="EntityResult.ResultType", + proto.ENUM, + number=1, + enum="EntityResult.ResultType", ) entity_results = proto.RepeatedField( - proto.MESSAGE, number=2, message="EntityResult", + proto.MESSAGE, + number=2, + message="EntityResult", + ) + end_cursor = proto.Field( + proto.BYTES, + number=4, + ) + more_results = proto.Field( + proto.ENUM, + number=5, + enum=MoreResultsType, + ) + snapshot_version = proto.Field( + proto.INT64, + number=7, + ) + read_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, ) - end_cursor = proto.Field(proto.BYTES, number=4,) - more_results = proto.Field(proto.ENUM, number=5, enum=MoreResultsType,) - snapshot_version = proto.Field(proto.INT64, number=7,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index c8cc8070..27e2a51e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,16 +20,41 @@ import os import pathlib import shutil +import warnings import nox - -BLACK_VERSION = "black==19.10b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -59,7 +84,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -69,7 +96,28 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) @@ -90,23 +138,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -130,6 +196,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) @nox.parametrize("disable_grpc", [False, True]) def system(session, disable_grpc): @@ -153,13 +248,7 @@ def system(session, disable_grpc): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) env = {} if disable_grpc: diff --git a/owlbot.py b/owlbot.py index fbf8c131..63214a43 100644 --- a/owlbot.py +++ b/owlbot.py @@ -107,6 +107,8 @@ def get_staging_dirs( python.py_samples(skip_readmes=True) +python.configure_previous_major_version_branches() + # Preserve system tests w/ GOOGLE_DISABLE_GRPC set (#133, PR #136) assert 1 == s.replace( "noxfile.py", diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e9..91b59676 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 61f8c1f0..b0547f83 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -34,7 +34,9 @@ def datastore_client(test_namespace): if _helpers.EMULATOR_DATASET is not None: http = requests.Session() # Un-authorized. return datastore.Client( - project=_helpers.EMULATOR_DATASET, namespace=test_namespace, _http=http, + project=_helpers.EMULATOR_DATASET, + namespace=test_namespace, + _http=http, ) else: return datastore.Client(namespace=test_namespace) diff --git a/tests/system/test_allocate_reserve_ids.py b/tests/system/test_allocate_reserve_ids.py index 8c40538f..f934d067 100644 --- a/tests/system/test_allocate_reserve_ids.py +++ b/tests/system/test_allocate_reserve_ids.py @@ -18,7 +18,8 @@ def test_client_allocate_ids(datastore_client): num_ids = 10 allocated_keys = datastore_client.allocate_ids( - datastore_client.key("Kind"), num_ids, + datastore_client.key("Kind"), + num_ids, ) assert len(allocated_keys) == num_ids diff --git a/tests/system/test_put.py b/tests/system/test_put.py index 5e884cf3..2f8de3a0 100644 --- a/tests/system/test_put.py +++ b/tests/system/test_put.py @@ -29,10 +29,10 @@ def parent_key(datastore_client): def _get_post(datastore_client, id_or_name=None, post_content=None): post_content = post_content or { - "title": u"How to make the perfect pizza in your grill", - "tags": [u"pizza", u"grill"], + "title": "How to make the perfect pizza in your grill", + "tags": ["pizza", "grill"], "publishedAt": datetime.datetime(2001, 1, 1, tzinfo=UTC), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 400, "rating": 5.0, @@ -77,15 +77,18 @@ def test_client_put_w_multiple_in_txn(datastore_client, entities_to_delete): entities_to_delete.append(entity1) second_post_content = { - "title": u"How to make the perfect homemade pasta", - "tags": [u"pasta", u"homemade"], + "title": "How to make the perfect homemade pasta", + "tags": ["pasta", "homemade"], "publishedAt": datetime.datetime(2001, 1, 1), - "author": u"Silvano", + "author": "Silvano", "isDraft": False, "wordCount": 450, "rating": 4.5, } - entity2 = _get_post(datastore_client, post_content=second_post_content,) + entity2 = _get_post( + datastore_client, + post_content=second_post_content, + ) xact.put(entity2) # Register entity to be deleted. entities_to_delete.append(entity2) @@ -111,7 +114,7 @@ def test_client_put_w_all_value_types(datastore_client, entities_to_delete): entity["truthy"] = True entity["float"] = 2.718281828 entity["int"] = 3735928559 - entity["words"] = u"foo" + entity["words"] = "foo" entity["blob"] = b"seekretz" entity_stored = datastore.Entity(key=key_stored) entity_stored["hi"] = "bye" @@ -133,7 +136,7 @@ def test_client_put_w_entity_w_self_reference(datastore_client, entities_to_dele parent_key = datastore_client.key("Residence", "NewYork") key = datastore_client.key("Person", "name", parent=parent_key) entity = datastore.Entity(key=key) - entity["fullName"] = u"Full name" + entity["fullName"] = "Full name" entity["linkedTo"] = key # Self reference. datastore_client.put(entity) diff --git a/tests/system/test_query.py b/tests/system/test_query.py index c5921bc9..499bc507 100644 --- a/tests/system/test_query.py +++ b/tests/system/test_query.py @@ -301,7 +301,8 @@ def test_query_distinct_on(ancestor_query): @pytest.fixture(scope="session") def large_query_client(datastore_client): large_query_client = _helpers.clone_client( - datastore_client, namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, + datastore_client, + namespace=populate_datastore.LARGE_CHARACTER_NAMESPACE, ) # Populate the datastore if necessary. populate_datastore.add_large_character_entities(client=large_query_client) @@ -322,11 +323,23 @@ def large_query(large_query_client): "limit,offset,expected", [ # with no offset there are the correct # of results - (None, None, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS,), + ( + None, + None, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS, + ), # with no limit there are results (offset provided) - (None, 900, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900,), + ( + None, + 900, + populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 900, + ), # Offset beyond items larger: verify 200 items found - (200, 1100, 200,), + ( + 200, + 1100, + 200, + ), # offset within range, expect 50 despite larger limit") (100, populate_datastore.LARGE_CHARACTER_TOTAL_OBJECTS - 50, 50), # Offset beyond items larger Verify no items found") diff --git a/tests/system/test_transaction.py b/tests/system/test_transaction.py index d27bc439..b380561f 100644 --- a/tests/system/test_transaction.py +++ b/tests/system/test_transaction.py @@ -23,7 +23,7 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): key = datastore_client.key("Company", "Google") entity = datastore.Entity(key=key) - entity["url"] = u"www.google.com" + entity["url"] = "www.google.com" with datastore_client.transaction() as xact: result = datastore_client.get(entity.key) @@ -39,7 +39,8 @@ def test_transaction_via_with_statement(datastore_client, entities_to_delete): def test_transaction_via_explicit_begin_get_commit( - datastore_client, entities_to_delete, + datastore_client, + entities_to_delete, ): # See # github.com/GoogleCloudPlatform/google-cloud-python/issues/1859 @@ -87,7 +88,7 @@ def test_failure_with_contention(datastore_client, entities_to_delete): # and updated outside it with a contentious value. key = local_client.key("BreakTxn", 1234) orig_entity = datastore.Entity(key=key) - orig_entity["foo"] = u"bar" + orig_entity["foo"] = "bar" local_client.put(orig_entity) entities_to_delete.append(orig_entity) @@ -97,10 +98,10 @@ def test_failure_with_contention(datastore_client, entities_to_delete): entity_in_txn = local_client.get(key) # Update the original entity outside the transaction. - orig_entity[contention_prop_name] = u"outside" + orig_entity[contention_prop_name] = "outside" datastore_client.put(orig_entity) # Try to update the entity which we already updated outside the # transaction. - entity_in_txn[contention_prop_name] = u"inside" + entity_in_txn[contention_prop_name] = "inside" txn.put(entity_in_txn) diff --git a/tests/system/utils/populate_datastore.py b/tests/system/utils/populate_datastore.py index 52f453f6..47395070 100644 --- a/tests/system/utils/populate_datastore.py +++ b/tests/system/utils/populate_datastore.py @@ -40,19 +40,19 @@ EDDARD + ("Character", "Jon Snow"), ) CHARACTERS = ( - {"name": u"Rickard", "family": u"Stark", "appearances": 0, "alive": False}, - {"name": u"Eddard", "family": u"Stark", "appearances": 9, "alive": False}, + {"name": "Rickard", "family": "Stark", "appearances": 0, "alive": False}, + {"name": "Eddard", "family": "Stark", "appearances": 9, "alive": False}, { - "name": u"Catelyn", - "family": [u"Stark", u"Tully"], + "name": "Catelyn", + "family": ["Stark", "Tully"], "appearances": 26, "alive": False, }, - {"name": u"Arya", "family": u"Stark", "appearances": 33, "alive": True}, - {"name": u"Sansa", "family": u"Stark", "appearances": 31, "alive": True}, - {"name": u"Robb", "family": u"Stark", "appearances": 22, "alive": False}, - {"name": u"Bran", "family": u"Stark", "appearances": 25, "alive": True}, - {"name": u"Jon Snow", "family": u"Stark", "appearances": 32, "alive": True}, + {"name": "Arya", "family": "Stark", "appearances": 33, "alive": True}, + {"name": "Sansa", "family": "Stark", "appearances": 31, "alive": True}, + {"name": "Robb", "family": "Stark", "appearances": 22, "alive": False}, + {"name": "Bran", "family": "Stark", "appearances": 25, "alive": True}, + {"name": "Jon Snow", "family": "Stark", "appearances": 32, "alive": True}, ) LARGE_CHARACTER_TOTAL_OBJECTS = 2500 LARGE_CHARACTER_NAMESPACE = "LargeCharacterEntity" diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index e6ed5508..fd1fc14c 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -95,20 +95,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class,transport_name", + [ + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), + ], ) -def test_datastore_admin_client_from_service_account_info(client_class): +def test_datastore_admin_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -137,23 +141,31 @@ def test_datastore_admin_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient,] + "client_class,transport_name", + [ + (DatastoreAdminClient, "grpc"), + (DatastoreAdminAsyncClient, "grpc_asyncio"), + ], ) -def test_datastore_admin_client_from_service_account_file(client_class): +def test_datastore_admin_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_admin_client_get_transport_class(): @@ -501,7 +513,9 @@ def test_datastore_admin_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -644,10 +658,17 @@ def test_datastore_admin_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore_admin.ExportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ExportEntitiesRequest, + dict, + ], +) def test_export_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -673,7 +694,8 @@ def test_export_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -689,7 +711,8 @@ async def test_export_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ExportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -719,7 +742,9 @@ async def test_export_entities_async_from_dict(): def test_export_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_entities), "__call__") as call: @@ -753,7 +778,9 @@ def test_export_entities_flattened(): def test_export_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -826,10 +853,17 @@ async def test_export_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.ImportEntitiesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ImportEntitiesRequest, + dict, + ], +) def test_import_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -855,7 +889,8 @@ def test_import_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -871,7 +906,8 @@ async def test_import_entities_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ImportEntitiesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -901,7 +937,9 @@ async def test_import_entities_async_from_dict(): def test_import_entities_flattened(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_entities), "__call__") as call: @@ -935,7 +973,9 @@ def test_import_entities_flattened(): def test_import_entities_flattened_error(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1008,10 +1048,17 @@ async def test_import_entities_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore_admin.CreateIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.CreateIndexRequest, + dict, + ], +) def test_create_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1037,7 +1084,8 @@ def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1053,7 +1101,8 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.CreateIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1082,10 +1131,17 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.DeleteIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.DeleteIndexRequest, + dict, + ], +) def test_delete_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1111,7 +1167,8 @@ def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1127,7 +1184,8 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.DeleteIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1156,10 +1214,17 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.GetIndexRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.GetIndexRequest, + dict, + ], +) def test_get_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1196,7 +1261,8 @@ def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1212,7 +1278,8 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=datastore_admin.GetIndexRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1252,10 +1319,17 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore_admin.ListIndexesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore_admin.ListIndexesRequest, + dict, + ], +) def test_list_indexes(request_type, transport: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1284,7 +1358,8 @@ def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1300,7 +1375,8 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=datastore_admin.ListIndexesRequest ): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1334,7 +1410,8 @@ async def test_list_indexes_async_from_dict(): def test_list_indexes_pager(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1342,15 +1419,28 @@ def test_list_indexes_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1367,7 +1457,8 @@ def test_list_indexes_pager(transport_name: str = "grpc"): def test_list_indexes_pages(transport_name: str = "grpc"): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1375,15 +1466,28 @@ def test_list_indexes_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) @@ -1394,7 +1498,9 @@ def test_list_indexes_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_indexes_async_pager(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1403,22 +1509,37 @@ async def test_list_indexes_async_pager(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) - async_pager = await client.list_indexes(request={},) + async_pager = await client.list_indexes( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1427,7 +1548,9 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): - client = DatastoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DatastoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1436,20 +1559,35 @@ async def test_list_indexes_async_pages(): # Set the response to a series of pages. call.side_effect = ( datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(), index.Index(),], + indexes=[ + index.Index(), + index.Index(), + index.Index(), + ], next_page_token="abc", ), - datastore_admin.ListIndexesResponse(indexes=[], next_page_token="def",), datastore_admin.ListIndexesResponse( - indexes=[index.Index(),], next_page_token="ghi", + indexes=[], + next_page_token="def", ), datastore_admin.ListIndexesResponse( - indexes=[index.Index(), index.Index(),], + indexes=[ + index.Index(), + ], + next_page_token="ghi", + ), + datastore_admin.ListIndexesResponse( + indexes=[ + index.Index(), + index.Index(), + ], ), RuntimeError, ) pages = [] - async for page_ in (await client.list_indexes(request={})).pages: + async for page_ in ( + await client.list_indexes(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1462,7 +1600,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1482,7 +1621,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreAdminClient(client_options=options, transport=transport,) + client = DatastoreAdminClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1498,7 +1640,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreAdminClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1541,10 +1684,28 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreAdminClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreAdminGrpcTransport,) + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreAdminGrpcTransport, + ) def test_datastore_admin_base_transport_error(): @@ -1588,6 +1749,14 @@ def test_datastore_admin_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -1599,7 +1768,8 @@ def test_datastore_admin_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreAdminTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1744,24 +1914,40 @@ def test_datastore_admin_grpc_transport_client_cert_source_for_mtls(transport_cl ) -def test_datastore_admin_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_no_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_admin_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_admin_host_with_port(transport_name): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_admin_grpc_transport_channel(): @@ -1769,7 +1955,8 @@ def test_datastore_admin_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1781,7 +1968,8 @@ def test_datastore_admin_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1890,12 +2078,16 @@ def test_datastore_admin_transport_channel_mtls_with_adc(transport_class): def test_datastore_admin_grpc_lro_client(): client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1903,12 +2095,16 @@ def test_datastore_admin_grpc_lro_client(): def test_datastore_admin_grpc_lro_async_client(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1936,7 +2132,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreAdminClient.common_folder_path(folder) assert expected == actual @@ -1954,7 +2152,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreAdminClient.common_organization_path(organization) assert expected == actual @@ -1972,7 +2172,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreAdminClient.common_project_path(project) assert expected == actual @@ -1992,7 +2194,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreAdminClient.common_location_path(project, location) assert expected == actual @@ -2017,7 +2220,8 @@ def test_client_with_default_client_info(): transports.DatastoreAdminTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreAdminClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2026,7 +2230,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreAdminClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2034,7 +2239,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index fee5a408..4106b217 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -84,19 +84,25 @@ def test__get_default_mtls_endpoint(): assert DatastoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) -def test_datastore_client_from_service_account_info(client_class): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), + ], +) +def test_datastore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") @pytest.mark.parametrize( @@ -124,22 +130,32 @@ def test_datastore_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient,]) -def test_datastore_client_from_service_account_file(client_class): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DatastoreClient, "grpc"), + (DatastoreAsyncClient, "grpc_asyncio"), + ], +) +def test_datastore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") def test_datastore_client_get_transport_class(): @@ -477,7 +493,9 @@ def test_datastore_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -608,10 +626,17 @@ def test_datastore_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [datastore.LookupRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.LookupRequest, + dict, + ], +) def test_lookup(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -637,7 +662,8 @@ def test_lookup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -653,7 +679,8 @@ async def test_lookup_async( transport: str = "grpc_asyncio", request_type=datastore.LookupRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -683,7 +710,9 @@ async def test_lookup_async_from_dict(): def test_lookup_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -723,7 +752,9 @@ def test_lookup_flattened(): def test_lookup_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -744,7 +775,9 @@ def test_lookup_flattened_error(): @pytest.mark.asyncio async def test_lookup_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup), "__call__") as call: @@ -789,7 +822,9 @@ async def test_lookup_flattened_async(): @pytest.mark.asyncio async def test_lookup_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -808,10 +843,17 @@ async def test_lookup_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RunQueryRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RunQueryRequest, + dict, + ], +) def test_run_query(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -837,7 +879,8 @@ def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -853,7 +896,8 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=datastore.RunQueryRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -882,10 +926,17 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -@pytest.mark.parametrize("request_type", [datastore.BeginTransactionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.BeginTransactionRequest, + dict, + ], +) def test_begin_transaction(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -916,7 +967,8 @@ def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -934,7 +986,8 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=datastore.BeginTransactionRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -947,7 +1000,9 @@ async def test_begin_transaction_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.BeginTransactionResponse(transaction=b"transaction_blob",) + datastore.BeginTransactionResponse( + transaction=b"transaction_blob", + ) ) response = await client.begin_transaction(request) @@ -967,7 +1022,9 @@ async def test_begin_transaction_async_from_dict(): def test_begin_transaction_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -977,7 +1034,9 @@ def test_begin_transaction_flattened(): call.return_value = datastore.BeginTransactionResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.begin_transaction(project_id="project_id_value",) + client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -989,19 +1048,24 @@ def test_begin_transaction_flattened(): def test_begin_transaction_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1015,7 +1079,9 @@ async def test_begin_transaction_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.begin_transaction(project_id="project_id_value",) + response = await client.begin_transaction( + project_id="project_id_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1028,20 +1094,30 @@ async def test_begin_transaction_flattened_async(): @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.begin_transaction( - datastore.BeginTransactionRequest(), project_id="project_id_value", + datastore.BeginTransactionRequest(), + project_id="project_id_value", ) -@pytest.mark.parametrize("request_type", [datastore.CommitRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.CommitRequest, + dict, + ], +) def test_commit(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1051,7 +1127,9 @@ def test_commit(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = datastore.CommitResponse(index_updates=1389,) + call.return_value = datastore.CommitResponse( + index_updates=1389, + ) response = client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -1068,7 +1146,8 @@ def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1084,7 +1163,8 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=datastore.CommitRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1095,7 +1175,9 @@ async def test_commit_async( with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datastore.CommitResponse(index_updates=1389,) + datastore.CommitResponse( + index_updates=1389, + ) ) response = await client.commit(request) @@ -1115,7 +1197,9 @@ async def test_commit_async_from_dict(): def test_commit_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1165,7 +1249,9 @@ def test_commit_flattened(): def test_commit_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1191,7 +1277,9 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1246,7 +1334,9 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1270,10 +1360,17 @@ async def test_commit_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.RollbackRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.RollbackRequest, + dict, + ], +) def test_rollback(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1299,7 +1396,8 @@ def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1315,7 +1413,8 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=datastore.RollbackRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1345,7 +1444,9 @@ async def test_rollback_async_from_dict(): def test_rollback_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1354,7 +1455,8 @@ def test_rollback_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1370,7 +1472,9 @@ def test_rollback_flattened(): def test_rollback_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1384,7 +1488,9 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -1397,7 +1503,8 @@ async def test_rollback_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.rollback( - project_id="project_id_value", transaction=b"transaction_blob", + project_id="project_id_value", + transaction=b"transaction_blob", ) # Establish that the underlying call was made with the expected @@ -1414,7 +1521,9 @@ async def test_rollback_flattened_async(): @pytest.mark.asyncio async def test_rollback_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1426,10 +1535,17 @@ async def test_rollback_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.AllocateIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.AllocateIdsRequest, + dict, + ], +) def test_allocate_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1455,7 +1571,8 @@ def test_allocate_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1471,7 +1588,8 @@ async def test_allocate_ids_async( transport: str = "grpc_asyncio", request_type=datastore.AllocateIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1501,7 +1619,9 @@ async def test_allocate_ids_async_from_dict(): def test_allocate_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1533,7 +1653,9 @@ def test_allocate_ids_flattened(): def test_allocate_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1551,7 +1673,9 @@ def test_allocate_ids_flattened_error(): @pytest.mark.asyncio async def test_allocate_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.allocate_ids), "__call__") as call: @@ -1588,7 +1712,9 @@ async def test_allocate_ids_flattened_async(): @pytest.mark.asyncio async def test_allocate_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1604,10 +1730,17 @@ async def test_allocate_ids_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datastore.ReserveIdsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datastore.ReserveIdsRequest, + dict, + ], +) def test_reserve_ids(request_type, transport: str = "grpc"): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1633,7 +1766,8 @@ def test_reserve_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1649,7 +1783,8 @@ async def test_reserve_ids_async( transport: str = "grpc_asyncio", request_type=datastore.ReserveIdsRequest ): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1679,7 +1814,9 @@ async def test_reserve_ids_async_from_dict(): def test_reserve_ids_flattened(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1711,7 +1848,9 @@ def test_reserve_ids_flattened(): def test_reserve_ids_flattened_error(): - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1729,7 +1868,9 @@ def test_reserve_ids_flattened_error(): @pytest.mark.asyncio async def test_reserve_ids_flattened_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.reserve_ids), "__call__") as call: @@ -1766,7 +1907,9 @@ async def test_reserve_ids_flattened_async(): @pytest.mark.asyncio async def test_reserve_ids_flattened_error_async(): - client = DatastoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DatastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1789,7 +1932,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1809,7 +1953,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DatastoreClient(client_options=options, transport=transport,) + client = DatastoreClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1825,7 +1972,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatastoreClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1855,7 +2003,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1865,10 +2016,28 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DatastoreClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatastoreClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatastoreGrpcTransport,) + client = DatastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastoreGrpcTransport, + ) def test_datastore_base_transport_error(): @@ -1908,6 +2077,14 @@ def test_datastore_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_datastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -1919,7 +2096,8 @@ def test_datastore_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DatastoreTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1960,7 +2138,10 @@ def test_datastore_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.DatastoreGrpcTransport, transports.DatastoreGrpcAsyncIOTransport,], + [ + transports.DatastoreGrpcTransport, + transports.DatastoreGrpcAsyncIOTransport, + ], ) def test_datastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -2058,24 +2239,40 @@ def test_datastore_grpc_transport_client_cert_source_for_mtls(transport_class): ) -def test_datastore_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_no_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:443" + assert client.transport._host == ("datastore.googleapis.com:443") -def test_datastore_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_datastore_host_with_port(transport_name): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datastore.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datastore.googleapis.com:8000" + assert client.transport._host == ("datastore.googleapis.com:8000") def test_datastore_grpc_transport_channel(): @@ -2083,7 +2280,8 @@ def test_datastore_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2095,7 +2293,8 @@ def test_datastore_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatastoreGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2216,7 +2415,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatastoreClient.common_folder_path(folder) assert expected == actual @@ -2234,7 +2435,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatastoreClient.common_organization_path(organization) assert expected == actual @@ -2252,7 +2455,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatastoreClient.common_project_path(project) assert expected == actual @@ -2272,7 +2477,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatastoreClient.common_location_path(project, location) assert expected == actual @@ -2297,7 +2503,8 @@ def test_client_with_default_client_info(): transports.DatastoreTransport, "_prep_wrapped_messages" ) as prep: client = DatastoreClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2306,7 +2513,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DatastoreClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2314,7 +2522,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DatastoreAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index e7f0b690..b72a68b5 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -48,7 +48,9 @@ def test_live_api(make_chan, mock_transport, mock_klass): mock_transport.assert_called_once_with(channel=mock.sentinel.channel) make_chan.assert_called_once_with( - mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443", + mock.sentinel.credentials, + DEFAULT_USER_AGENT, + "datastore.googleapis.com:443", ) mock_klass.assert_called_once_with( diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index 67f28ffe..a03397d5 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -240,7 +240,11 @@ def test_api_ctor(): def _lookup_single_helper( - read_consistency=None, transaction=None, empty=True, retry=None, timeout=None, + read_consistency=None, + transaction=None, + empty=True, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -295,7 +299,11 @@ def _lookup_single_helper( uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) if retry is not None: @@ -336,7 +344,11 @@ def test_api_lookup_single_key_hit_w_timeout(): def _lookup_multiple_helper( - found=0, missing=0, deferred=0, retry=None, timeout=None, + found=0, + missing=0, + deferred=0, + retry=None, + timeout=None, ): from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -399,7 +411,11 @@ def _lookup_multiple_helper( uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22lookup") request = _verify_protobuf_call( - http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.LookupRequest(), + retry=retry, + timeout=timeout, ) assert list(request.keys) == [key_pb1._pb, key_pb2._pb] assert request.read_options == read_options._pb @@ -499,7 +515,11 @@ def _run_query_helper( uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22runQuery") request = _verify_protobuf_call( - http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RunQueryRequest(), + retry=retry, + timeout=timeout, ) assert request.partition_id == partition_id._pb assert request.query == query_pb._pb @@ -615,7 +635,7 @@ def _commit_helper(transaction=None, retry=None, timeout=None): insert = mutation.upsert insert.key.CopyFrom(key_pb._pb) value_pb = _new_value_pb(insert, "foo") - value_pb.string_value = u"Foo" + value_pb.string_value = "Foo" http = _make_requests_session( [_make_response(content=rsp_pb._pb.SerializeToString())] @@ -647,7 +667,11 @@ def _commit_helper(transaction=None, retry=None, timeout=None): uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22commit") request = _verify_protobuf_call( - http, uri, rq_class(), retry=retry, timeout=timeout, + http, + uri, + rq_class(), + retry=retry, + timeout=timeout, ) assert list(request.mutations) == [mutation] assert request.mode == mode @@ -709,7 +733,11 @@ def _rollback_helper(retry=None, timeout=None): uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22rollback") request = _verify_protobuf_call( - http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.RollbackRequest(), + retry=retry, + timeout=timeout, ) assert request.transaction == transaction @@ -765,7 +793,11 @@ def _allocate_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22allocateIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): @@ -822,7 +854,11 @@ def _reserve_ids_helper(count=0, retry=None, timeout=None): uri = _build_expected_url(https://github.com/client._base_url,%20project,%20%22reserveIds") request = _verify_protobuf_call( - http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout, + http, + uri, + datastore_pb2.AllocateIdsRequest(), + retry=retry, + timeout=timeout, ) assert len(request.keys) == len(before_key_pbs) for key_before, key_after in zip(before_key_pbs, request.keys): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index da253deb..51cddb6a 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -130,7 +130,8 @@ def test_client_ctor_w_implicit_inputs(): other = "other" patch1 = mock.patch( - "google.cloud.datastore.client._determine_default_project", return_value=other, + "google.cloud.datastore.client._determine_default_project", + return_value=other, ) creds = _make_credentials() @@ -151,7 +152,9 @@ def test_client_ctor_w_implicit_inputs(): assert client.current_batch is None assert client.current_transaction is None - default.assert_called_once_with(scopes=Client.SCOPE,) + default.assert_called_once_with( + scopes=Client.SCOPE, + ) _determine_default_project.assert_called_once_with(None) @@ -258,7 +261,10 @@ def test_client_base_url_property_w_client_options(): creds = _make_credentials() client_options = {"api_endpoint": "endpoint"} - client = _make_client(credentials=creds, client_options=client_options,) + client = _make_client( + credentials=creds, + client_options=client_options, + ) assert client.base_url == "endpoint" client.base_url = alternate_url @@ -784,7 +790,7 @@ def test_client_put_multi_w_single_empty_entity(): def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): from google.cloud.datastore_v1.types import datastore as datastore_pb2 - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key(_Key.kind, None) retry = mock.Mock() timeout = 100000 @@ -817,13 +823,13 @@ def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_put_multi_existing_batch_w_completed_key(): creds = _make_credentials() client = _make_client(credentials=creds) - entity = _Entity(foo=u"bar") + entity = _Entity(foo="bar") key = entity.key = _Key() with _NoCommitBatch(client) as CURR_BATCH: @@ -837,7 +843,7 @@ def test_client_put_multi_existing_batch_w_completed_key(): assert len(prop_list) == 1 name, value_pb = prop_list[0] assert name == "foo" - assert value_pb.string_value == u"bar" + assert value_pb.string_value == "bar" def test_client_delete(): diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py index 4c1861a2..a8477f2d 100644 --- a/tests/unit/test_helpers.py +++ b/tests/unit/test_helpers.py @@ -131,7 +131,7 @@ def test_entity_from_protobuf_w_entity_with_meaning(): name = "hello" value_pb = _new_value_pb(entity_pb, name) value_pb.meaning = meaning = 9 - value_pb.string_value = val = u"something" + value_pb.string_value = val = "something" entity = entity_from_protobuf(entity_pb) assert entity.key is None @@ -249,7 +249,7 @@ def test_enity_to_protobf_w_simple_fields(): name1 = "foo" entity[name1] = value1 = 42 name2 = "bar" - entity[name2] = value2 = u"some-string" + entity[name2] = value2 = "some-string" entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity() @@ -299,7 +299,7 @@ def test_enity_to_protobf_w_inverts_to_protobuf(): val_pb1.exclude_from_indexes = True # Add a string property. val_pb2 = _new_value_pb(original_pb, "bar") - val_pb2.string_value = u"hello" + val_pb2.string_value = "hello" # Add a nested (entity) property. val_pb3 = _new_value_pb(original_pb, "entity-baz") @@ -386,7 +386,7 @@ def test_enity_to_protobf_w_dict_to_entity(): from google.cloud.datastore.helpers import entity_to_protobuf entity = Entity() - entity["a"] = {"b": u"c"} + entity["a"] = {"b": "c"} entity_pb = entity_to_protobuf(entity) expected_pb = entity_pb2.Entity( @@ -624,9 +624,9 @@ def test__pb_attr_value_w_bytes(): def test__pb_attr_value_w_unicode(): from google.cloud.datastore.helpers import _pb_attr_value - name, value = _pb_attr_value(u"str") + name, value = _pb_attr_value("str") assert name == "string_value" - assert value == u"str" + assert value == "str" def test__pb_attr_value_w_entity(): @@ -758,8 +758,8 @@ def test__get_value_from_value_pb_w_bytes(): def test__get_value_from_value_pb_w_unicode(): from google.cloud.datastore.helpers import _get_value_from_value_pb - value = _make_value_pb("string_value", u"str") - assert _get_value_from_value_pb(value._pb) == u"str" + value = _make_value_pb("string_value", "str") + assert _get_value_from_value_pb(value._pb) == "str" def test__get_value_from_value_pb_w_entity(): @@ -929,9 +929,9 @@ def test__set_protobuf_value_w_unicode(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - _set_protobuf_value(pb, u"str") + _set_protobuf_value(pb, "str") value = pb.string_value - assert value == u"str" + assert value == "str" def test__set_protobuf_value_w_entity_empty_wo_key(): @@ -952,7 +952,7 @@ def test__set_protobuf_value_w_entity_w_key(): from google.cloud.datastore.helpers import _set_protobuf_value name = "foo" - value = u"Foo" + value = "Foo" pb = _make_empty_value_pb() key = Key("KIND", 123, project="PROJECT") entity = Entity(key=key) @@ -971,7 +971,7 @@ def test__set_protobuf_value_w_array(): from google.cloud.datastore.helpers import _set_protobuf_value pb = _make_empty_value_pb() - values = [u"a", 0, 3.14] + values = ["a", 0, 3.14] _set_protobuf_value(pb, values) marshalled = pb.array_value.values assert len(marshalled) == len(values) @@ -1009,7 +1009,7 @@ def test__get_meaning_w_single(): value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 - value_pb.string_value = u"hi" + value_pb.string_value = "hi" result = _get_meaning(value_pb) assert meaning == result @@ -1036,8 +1036,8 @@ def test__get_meaning_w_array_value(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = sub_value_pb2.meaning = meaning - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert meaning == result @@ -1055,8 +1055,8 @@ def test__get_meaning_w_array_value_multiple_meanings(): sub_value_pb1.meaning = meaning1 sub_value_pb2.meaning = meaning2 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, meaning2] @@ -1072,8 +1072,8 @@ def test__get_meaning_w_array_value_meaning_partially_unset(): sub_value_pb2 = value_pb._pb.array_value.values.add() sub_value_pb1.meaning = meaning1 - sub_value_pb1.string_value = u"hi" - sub_value_pb2.string_value = u"bye" + sub_value_pb1.string_value = "hi" + sub_value_pb2.string_value = "bye" result = _get_meaning(value_pb, is_list=True) assert result == [meaning1, None] diff --git a/tests/unit/test_key.py b/tests/unit/test_key.py index 2d2a88e7..575601f0 100644 --- a/tests/unit/test_key.py +++ b/tests/unit/test_key.py @@ -72,7 +72,10 @@ def test_key_ctor_parent(): {"kind": _CHILD_KIND, "id": _CHILD_ID}, ] parent_key = _make_key( - _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE, + _PARENT_KIND, + _PARENT_ID, + project=_PARENT_PROJECT, + namespace=_PARENT_NAMESPACE, ) key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key) assert key.project == parent_key.project @@ -97,7 +100,11 @@ def test_key_ctor_parent_bad_namespace(): parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT) with pytest.raises(ValueError): _make_key( - "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT, + "KIND2", + 1234, + namespace="BAR", + parent=parent_key, + PROJECT=_DEFAULT_PROJECT, ) @@ -585,7 +592,7 @@ def test__cliean_app_w_dev_server(): def test__get_empty_w_unset(): from google.cloud.datastore.key import _get_empty - for empty_value in (u"", 0, 0.0, []): + for empty_value in ("", 0, 0.0, []): ret_val = _get_empty(empty_value, empty_value) assert ret_val is None @@ -593,7 +600,7 @@ def test__get_empty_w_unset(): def test__get_empty_w_actually_set(): from google.cloud.datastore.key import _get_empty - value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) + value_pairs = (("hello", ""), (10, 0), (3.14, 0.0), (["stuff", "here"], [])) for value, empty_value in value_pairs: ret_val = _get_empty(value, empty_value) assert ret_val is value @@ -602,7 +609,7 @@ def test__get_empty_w_actually_set(): def test__check_database_id_w_empty_value(): from google.cloud.datastore.key import _check_database_id - ret_val = _check_database_id(u"") + ret_val = _check_database_id("") # Really we are just happy there was no exception. assert ret_val is None @@ -611,7 +618,7 @@ def test__check_database_id_w_failure(): from google.cloud.datastore.key import _check_database_id with pytest.raises(ValueError): - _check_database_id(u"some-database-id") + _check_database_id("some-database-id") def test__add_id_or_name_add_id(): diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index 648ae7e4..3e78a6a3 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -144,7 +144,9 @@ def test_transaction_begin_w_retry_w_timeout(): expected_request = _make_begin_request(project) ds_api.begin_transaction.assert_called_once_with( - request=expected_request, retry=retry, timeout=timeout, + request=expected_request, + retry=retry, + timeout=timeout, )