Skip to content
This repository was archived by the owner on Nov 12, 2025. It is now read-only.

Commit 798cd34

Browse files
authored
feat: add manual wrapper for v1beta2 read client (#117)
* feat: add manual wrapper for v1beta2 read client * add missing v1beta2 * sort versions so v1 is last * tests: unify v1 and v1beta2 system tests * tests: use proto object for DataFormat checks * blacken * docs: add handwritten class to docs
1 parent d9691f1 commit 798cd34

File tree

9 files changed

+303
-83
lines changed

9 files changed

+303
-83
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Bigquery Storage v1beta2 API Library
2+
====================================
3+
4+
.. automodule:: google.cloud.bigquery_storage_v1beta2.client
5+
:members:
6+
:inherited-members:

docs/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ API Reference
2121
bigquery_storage_v1/library
2222
bigquery_storage_v1/services
2323
bigquery_storage_v1/types
24+
bigquery_storage_v1beta2/library
2425
bigquery_storage_v1beta2/services
2526
bigquery_storage_v1beta2/types
2627

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2020 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
from __future__ import absolute_import
18+
19+
import pkg_resources
20+
21+
__version__ = pkg_resources.get_distribution(
22+
"google-cloud-bigquery-storage"
23+
).version # noqa
24+
25+
from google.cloud.bigquery_storage_v1beta2 import client
26+
from google.cloud.bigquery_storage_v1beta2 import types
27+
28+
29+
class BigQueryReadClient(client.BigQueryReadClient):
30+
__doc__ = client.BigQueryReadClient.__doc__
31+
32+
33+
__all__ = (
34+
# google.cloud.bigquery_storage_v1beta2
35+
"__version__",
36+
"types",
37+
# google.cloud.bigquery_storage_v1beta2.client
38+
"BigQueryReadClient",
39+
)
Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2020 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
"""Parent client for calling the Cloud BigQuery Storage API.
18+
19+
This is the base from which all interactions with the API occur.
20+
"""
21+
22+
from __future__ import absolute_import
23+
24+
import google.api_core.gapic_v1.method
25+
26+
from google.cloud.bigquery_storage_v1 import reader
27+
from google.cloud.bigquery_storage_v1beta2.services import big_query_read
28+
29+
30+
_SCOPES = (
31+
"https://www.googleapis.com/auth/bigquery",
32+
"https://www.googleapis.com/auth/cloud-platform",
33+
)
34+
35+
36+
class BigQueryReadClient(big_query_read.BigQueryReadClient):
37+
"""Client for interacting with BigQuery Storage API.
38+
39+
The BigQuery storage API can be used to read data stored in BigQuery.
40+
"""
41+
42+
def read_rows(
43+
self,
44+
name,
45+
offset=0,
46+
retry=google.api_core.gapic_v1.method.DEFAULT,
47+
timeout=google.api_core.gapic_v1.method.DEFAULT,
48+
metadata=(),
49+
):
50+
"""
51+
Reads rows from the table in the format prescribed by the read
52+
session. Each response contains one or more table rows, up to a
53+
maximum of 10 MiB per response; read requests which attempt to read
54+
individual rows larger than this will fail.
55+
56+
Each request also returns a set of stream statistics reflecting the
57+
estimated total number of rows in the read stream. This number is
58+
computed based on the total table size and the number of active
59+
streams in the read session, and may change as other streams continue
60+
to read data.
61+
62+
Example:
63+
>>> from google.cloud import bigquery_storage
64+
>>>
65+
>>> client = bigquery_storage.BigQueryReadClient()
66+
>>>
67+
>>> # TODO: Initialize ``table``:
68+
>>> table = "projects/{}/datasets/{}/tables/{}".format(
69+
... 'project_id': 'your-data-project-id',
70+
... 'dataset_id': 'your_dataset_id',
71+
... 'table_id': 'your_table_id',
72+
... )
73+
>>>
74+
>>> # TODO: Initialize `parent`:
75+
>>> parent = 'projects/your-billing-project-id'
76+
>>>
77+
>>> requested_session = bigquery_storage.types.ReadSession(
78+
... table=table,
79+
... data_format=bigquery_storage.types.DataFormat.AVRO,
80+
... )
81+
>>> session = client.create_read_session(
82+
... parent=parent, read_session=requested_session
83+
... )
84+
>>>
85+
>>> stream = session.streams[0], # TODO: Also read any other streams.
86+
>>> read_rows_stream = client.read_rows(stream.name)
87+
>>>
88+
>>> for element in read_rows_stream.rows(session):
89+
... # process element
90+
... pass
91+
92+
Args:
93+
name (str):
94+
Required. Name of the stream to start
95+
reading from, of the form
96+
`projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`
97+
offset (Optional[int]):
98+
The starting offset from which to begin reading rows from
99+
in the stream. The offset requested must be less than the last
100+
row read from ReadRows. Requesting a larger offset is
101+
undefined.
102+
retry (Optional[google.api_core.retry.Retry]): A retry object used
103+
to retry requests. If ``None`` is specified, requests will not
104+
be retried.
105+
timeout (Optional[float]): The amount of time, in seconds, to wait
106+
for the request to complete. Note that if ``retry`` is
107+
specified, the timeout applies to each individual attempt.
108+
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
109+
that is provided to the method.
110+
111+
Returns:
112+
~google.cloud.bigquery_storage_v1.reader.ReadRowsStream:
113+
An iterable of
114+
:class:`~google.cloud.bigquery_storage_v1.types.ReadRowsResponse`.
115+
116+
Raises:
117+
google.api_core.exceptions.GoogleAPICallError: If the request
118+
failed for any reason.
119+
google.api_core.exceptions.RetryError: If the request failed due
120+
to a retryable error and retry attempts failed.
121+
ValueError: If the parameters are invalid.
122+
"""
123+
gapic_client = super(BigQueryReadClient, self)
124+
stream = gapic_client.read_rows(
125+
read_stream=name,
126+
offset=offset,
127+
retry=retry,
128+
timeout=timeout,
129+
metadata=metadata,
130+
)
131+
return reader.ReadRowsStream(
132+
stream,
133+
gapic_client,
134+
name,
135+
offset,
136+
{"retry": retry, "timeout": timeout, "metadata": metadata},
137+
)

synth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
gapic = gcp.GAPICBazel()
2424
common = gcp.CommonTemplates()
25-
versions = ["v1"]
25+
versions = ["v1beta2", "v1"]
2626

2727
for version in versions:
2828
library = gapic.py_library(

tests/system/v1/conftest.py renamed to tests/system/conftest.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,9 @@
2020

2121
import pytest
2222

23-
from google.cloud import bigquery_storage
24-
2523
_TABLE_FORMAT = "projects/{}/datasets/{}/tables/{}"
2624

27-
_ASSETS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../assets")
25+
_ASSETS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "assets")
2826

2927

3028
@pytest.fixture(scope="session")
@@ -52,11 +50,6 @@ def credentials(use_mtls):
5250
return service_account.Credentials.from_service_account_file(filename)
5351

5452

55-
@pytest.fixture(scope="session")
56-
def client(credentials):
57-
return bigquery_storage.BigQueryReadClient(credentials=credentials)
58-
59-
6053
@pytest.fixture()
6154
def table_reference():
6255
return _TABLE_FORMAT.format("bigquery-public-data", "usa_names", "usa_1910_2013")

tests/system/reader/conftest.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2020 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
"""System tests for reading rows from tables."""
17+
18+
import pytest
19+
20+
from google.cloud import bigquery_storage
21+
from google.cloud import bigquery_storage_v1beta2
22+
23+
24+
@pytest.fixture(scope="session")
25+
def client_v1(credentials):
26+
return bigquery_storage.BigQueryReadClient(credentials=credentials)
27+
28+
29+
@pytest.fixture(scope="session")
30+
def client_v1beta2(credentials):
31+
return bigquery_storage_v1beta2.BigQueryReadClient(credentials=credentials)
32+
33+
34+
@pytest.fixture(scope="session", params=["v1", "v1beta2"])
35+
def client_and_types(request, client_v1, client_v1beta2):
36+
if request.param == "v1":
37+
return client_v1, bigquery_storage.types
38+
return client_v1beta2, bigquery_storage_v1beta2.types

0 commit comments

Comments
 (0)