Skip to content

Commit ace6762

Browse files
committed
Adjust code to new BQ Storage 2.0
1 parent be1459f commit ace6762

16 files changed

+209
-195
lines changed

google/cloud/bigquery/_pandas_helpers.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,6 @@
2222
import six
2323
from six.moves import queue
2424

25-
try:
26-
from google.cloud import bigquery_storage_v1
27-
except ImportError: # pragma: NO COVER
28-
bigquery_storage_v1 = None
29-
3025
try:
3126
import pandas
3227
except ImportError: # pragma: NO COVER
@@ -613,7 +608,7 @@ def _download_table_bqstorage(
613608

614609
# Passing a BQ Storage client in implies that the BigQuery Storage library
615610
# is available and can be imported.
616-
from google.cloud.bigquery import storage
611+
from google.cloud import bigquery_storage
617612

618613
if "$" in table.table_id:
619614
raise ValueError(
@@ -624,8 +619,8 @@ def _download_table_bqstorage(
624619

625620
requested_streams = 1 if preserve_order else 0
626621

627-
requested_session = storage.types.ReadSession(
628-
table=table.to_bqstorage(), data_format=storage.types.DataFormat.ARROW
622+
requested_session = bigquery_storage.types.ReadSession(
623+
table=table.to_bqstorage(), data_format=bigquery_storage.types.DataFormat.ARROW
629624
)
630625
if selected_fields is not None:
631626
for field in selected_fields:

google/cloud/bigquery/client.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -435,19 +435,19 @@ def _create_bqstorage_client(self):
435435
warning and return ``None``.
436436
437437
Returns:
438-
Optional[google.cloud.bigquery_storage_v1.BigQueryReadClient]:
438+
Optional[google.cloud.bigquery_storage.BigQueryReadClient]:
439439
A BigQuery Storage API client.
440440
"""
441441
try:
442-
from google.cloud import bigquery_storage_v1
442+
from google.cloud import bigquery_storage
443443
except ImportError:
444444
warnings.warn(
445445
"Cannot create BigQuery Storage client, the dependency "
446446
"google-cloud-bigquery-storage is not installed."
447447
)
448448
return None
449449

450-
return bigquery_storage_v1.BigQueryReadClient(credentials=self._credentials)
450+
return bigquery_storage.BigQueryReadClient(credentials=self._credentials)
451451

452452
def create_dataset(
453453
self, dataset, exists_ok=False, retry=DEFAULT_RETRY, timeout=None

google/cloud/bigquery/dbapi/connection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def close(self):
7373

7474
if self._owns_bqstorage_client:
7575
# There is no close() on the BQ Storage client itself.
76-
self._bqstorage_client.transport.channel.close()
76+
self._bqstorage_client._transport.grpc_channel.close()
7777

7878
for cursor_ in self._cursors_created:
7979
cursor_.close()

google/cloud/bigquery/dbapi/cursor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -267,13 +267,13 @@ def _bqstorage_fetch(self, bqstorage_client):
267267
"""
268268
# Hitting this code path with a BQ Storage client instance implies that
269269
# bigquery.storage can indeed be imported here without errors.
270-
from google.cloud.bigquery import storage
270+
from google.cloud import bigquery_storage
271271

272272
table_reference = self._query_job.destination
273273

274-
requested_session = storage.types.ReadSession(
274+
requested_session = bigquery_storage.types.ReadSession(
275275
table=table_reference.to_bqstorage(),
276-
data_format=storage.types.DataFormat.ARROW,
276+
data_format=bigquery_storage.types.DataFormat.ARROW,
277277
)
278278
read_session = bqstorage_client.create_read_session(
279279
parent="projects/{}".format(table_reference.project),

google/cloud/bigquery/magics/magics.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -637,7 +637,7 @@ def _make_bqstorage_client(use_bqstorage_api, credentials):
637637
return None
638638

639639
try:
640-
from google.cloud import bigquery_storage_v1
640+
from google.cloud import bigquery_storage
641641
except ImportError as err:
642642
customized_error = ImportError(
643643
"The default BigQuery Storage API client cannot be used, install "
@@ -655,7 +655,7 @@ def _make_bqstorage_client(use_bqstorage_api, credentials):
655655
)
656656
six.raise_from(customized_error, err)
657657

658-
return bigquery_storage_v1.BigQueryReadClient(
658+
return bigquery_storage.BigQueryReadClient(
659659
credentials=credentials,
660660
client_info=gapic_client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
661661
)
@@ -670,12 +670,10 @@ def _close_transports(client, bqstorage_client):
670670
Args:
671671
client (:class:`~google.cloud.bigquery.client.Client`):
672672
bqstorage_client
673-
(Optional[:class:`~google.cloud.bigquery_storage_v1.BigQueryReadClient`]):
673+
(Optional[:class:`~google.cloud.bigquery_storage.BigQueryReadClient`]):
674674
A client for the BigQuery Storage API.
675675
676676
"""
677677
client.close()
678678
if bqstorage_client is not None:
679-
# import pudb; pu.db
680-
# bqstorage_client.transport.channel.close()
681679
bqstorage_client._transport.grpc_channel.close()

google/cloud/bigquery/table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1521,7 +1521,7 @@ def to_arrow(
15211521
progress_bar.close()
15221522
finally:
15231523
if owns_bqstorage_client:
1524-
bqstorage_client.transport.channel.close()
1524+
bqstorage_client._transport.grpc_channel.close()
15251525

15261526
if record_batches:
15271527
return pyarrow.Table.from_batches(record_batches)

noxfile.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -49,16 +49,10 @@ def default(session):
4949
constraints_path,
5050
)
5151

52-
if session.python == "2.7":
53-
# The [all] extra is not installable on Python 2.7.
54-
session.install("-e", ".[pandas,pyarrow]", "-c", constraints_path)
55-
elif session.python == "3.5":
56-
session.install("-e", ".[all]", "-c", constraints_path)
57-
else:
58-
# fastparquet is not included in .[all] because, in general, it's
59-
# redundant with pyarrow. We still want to run some unit tests with
60-
# fastparquet serialization, though.
61-
session.install("-e", ".[all,fastparquet]", "-c", constraints_path)
52+
# fastparquet is not included in .[all] because, in general, it's
53+
# redundant with pyarrow. We still want to run some unit tests with
54+
# fastparquet serialization, though.
55+
session.install("-e", ".[all,fastparquet]", "-c", constraints_path)
6256

6357
session.install("ipython", "-c", constraints_path)
6458

setup.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
name = "google-cloud-bigquery"
2424
description = "Google BigQuery API client library"
25-
version = "1.28.0"
25+
version = "2.0.0"
2626
# Should be one of:
2727
# 'Development Status :: 3 - Alpha'
2828
# 'Development Status :: 4 - Beta'
@@ -37,7 +37,7 @@
3737
]
3838
extras = {
3939
"bqstorage": [
40-
"google-cloud-bigquery-storage >= 1.0.0, <2.0.0dev",
40+
"google-cloud-bigquery-storage >= 2.0.0, <3.0.0dev",
4141
# Due to an issue in pip's dependency resolver, the `grpc` extra is not
4242
# installed, even though `google-cloud-bigquery-storage` specifies it
4343
# as `google-api-core[grpc]`. We thus need to explicitly specify it here.
@@ -118,10 +118,7 @@
118118
"Intended Audience :: Developers",
119119
"License :: OSI Approved :: Apache Software License",
120120
"Programming Language :: Python",
121-
"Programming Language :: Python :: 2",
122-
"Programming Language :: Python :: 2.7",
123121
"Programming Language :: Python :: 3",
124-
"Programming Language :: Python :: 3.5",
125122
"Programming Language :: Python :: 3.6",
126123
"Programming Language :: Python :: 3.7",
127124
"Programming Language :: Python :: 3.8",
@@ -133,7 +130,7 @@
133130
namespace_packages=namespaces,
134131
install_requires=dependencies,
135132
extras_require=extras,
136-
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
133+
python_requires=">=3.6",
137134
include_package_data=True,
138135
zip_safe=False,
139136
)

tests/system.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,9 @@
3434
import pkg_resources
3535

3636
try:
37-
from google.cloud.bigquery import storage
37+
from google.cloud import bigquery_storage
3838
except ImportError: # pragma: NO COVER
39-
storage = None
39+
bigquery_storage = None
4040

4141
try:
4242
import fastavro # to parse BQ storage client results
@@ -1790,10 +1790,12 @@ def test_dbapi_fetchall(self):
17901790
row_tuples = [r.values() for r in rows]
17911791
self.assertEqual(row_tuples, [(1, 2), (3, 4), (5, 6)])
17921792

1793-
@unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`")
1793+
@unittest.skipIf(
1794+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
1795+
)
17941796
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
17951797
def test_dbapi_fetch_w_bqstorage_client_large_result_set(self):
1796-
bqstorage_client = storage.BigQueryReadClient(
1798+
bqstorage_client = bigquery_storage.BigQueryReadClient(
17971799
credentials=Config.CLIENT._credentials
17981800
)
17991801
cursor = dbapi.connect(Config.CLIENT, bqstorage_client).cursor()
@@ -1850,7 +1852,9 @@ def test_dbapi_dry_run_query(self):
18501852

18511853
self.assertEqual(list(rows), [])
18521854

1853-
@unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`")
1855+
@unittest.skipIf(
1856+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
1857+
)
18541858
def test_dbapi_connection_does_not_leak_sockets(self):
18551859
current_process = psutil.Process()
18561860
conn_count_start = len(current_process.connections())
@@ -2278,15 +2282,17 @@ def test_query_results_to_dataframe(self):
22782282
self.assertIsInstance(row[col], exp_datatypes[col])
22792283

22802284
@unittest.skipIf(pandas is None, "Requires `pandas`")
2281-
@unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`")
2285+
@unittest.skipIf(
2286+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
2287+
)
22822288
def test_query_results_to_dataframe_w_bqstorage(self):
22832289
query = """
22842290
SELECT id, author, time_ts, dead
22852291
FROM `bigquery-public-data.hacker_news.comments`
22862292
LIMIT 10
22872293
"""
22882294

2289-
bqstorage_client = storage.BigQueryReadClient(
2295+
bqstorage_client = bigquery_storage.BigQueryReadClient(
22902296
credentials=Config.CLIENT._credentials
22912297
)
22922298

@@ -2575,7 +2581,9 @@ def _fetch_dataframe(self, query):
25752581
return Config.CLIENT.query(query).result().to_dataframe()
25762582

25772583
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
2578-
@unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`")
2584+
@unittest.skipIf(
2585+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
2586+
)
25792587
def test_nested_table_to_arrow(self):
25802588
from google.cloud.bigquery.job import SourceFormat
25812589
from google.cloud.bigquery.job import WriteDisposition
@@ -2610,7 +2618,7 @@ def test_nested_table_to_arrow(self):
26102618
job_config.schema = schema
26112619
# Load a table using a local JSON file from memory.
26122620
Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result()
2613-
bqstorage_client = storage.BigQueryReadClient(
2621+
bqstorage_client = bigquery_storage.BigQueryReadClient(
26142622
credentials=Config.CLIENT._credentials
26152623
)
26162624

@@ -2765,12 +2773,14 @@ def test_list_rows_page_size(self):
27652773
self.assertEqual(page.num_items, num_last_page)
27662774

27672775
@unittest.skipIf(pandas is None, "Requires `pandas`")
2768-
@unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`")
2776+
@unittest.skipIf(
2777+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
2778+
)
27692779
def test_list_rows_max_results_w_bqstorage(self):
27702780
table_ref = DatasetReference("bigquery-public-data", "utility_us").table(
27712781
"country_code_iso"
27722782
)
2773-
bqstorage_client = storage.BigQueryReadClient(
2783+
bqstorage_client = bigquery_storage.BigQueryReadClient(
27742784
credentials=Config.CLIENT._credentials
27752785
)
27762786

tests/unit/test__pandas_helpers.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -773,26 +773,6 @@ def test_dataframe_to_bq_schema_dict_sequence(module_under_test):
773773
assert returned_schema == expected_schema
774774

775775

776-
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
777-
@pytest.mark.skipif(not six.PY2, reason="Requires Python 2.7")
778-
def test_dataframe_to_bq_schema_w_struct_raises_py27(module_under_test):
779-
dataframe = pandas.DataFrame(
780-
data=[{"struct_field": {"int_col": 1}}, {"struct_field": {"int_col": 2}}]
781-
)
782-
bq_schema = [
783-
schema.SchemaField(
784-
"struct_field",
785-
field_type="STRUCT",
786-
fields=[schema.SchemaField("int_col", field_type="INT64")],
787-
),
788-
]
789-
790-
with pytest.raises(ValueError) as excinfo:
791-
module_under_test.dataframe_to_bq_schema(dataframe, bq_schema=bq_schema)
792-
793-
assert "struct (record) column types is not supported" in str(excinfo.value)
794-
795-
796776
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
797777
@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`")
798778
def test_dataframe_to_arrow_with_multiindex(module_under_test):

tests/unit/test_client.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,9 @@
6262
from google.cloud.bigquery.dataset import DatasetReference
6363

6464
try:
65-
from google.cloud import bigquery_storage_v1
65+
from google.cloud import bigquery_storage
6666
except (ImportError, AttributeError): # pragma: NO COVER
67-
bigquery_storage_v1 = None
67+
bigquery_storage = None
6868
from test_utils.imports import maybe_fail_import
6969
from tests.unit.helpers import make_connection
7070

@@ -794,17 +794,17 @@ def test_get_dataset(self):
794794
self.assertEqual(dataset.dataset_id, self.DS_ID)
795795

796796
@unittest.skipIf(
797-
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
797+
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
798798
)
799799
def test_create_bqstorage_client(self):
800-
mock_client = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
800+
mock_client = mock.create_autospec(bigquery_storage.BigQueryReadClient)
801801
mock_client_instance = object()
802802
mock_client.return_value = mock_client_instance
803803
creds = _make_credentials()
804804
client = self._make_one(project=self.PROJECT, credentials=creds)
805805

806806
with mock.patch(
807-
"google.cloud.bigquery_storage_v1.BigQueryReadClient", mock_client
807+
"google.cloud.bigquery_storage.BigQueryReadClient", mock_client
808808
):
809809
bqstorage_client = client._create_bqstorage_client()
810810

@@ -817,8 +817,8 @@ def test_create_bqstorage_client_missing_dependency(self):
817817

818818
def fail_bqstorage_import(name, globals, locals, fromlist, level):
819819
# NOTE: *very* simplified, assuming a straightforward absolute import
820-
return "bigquery_storage_v1" in name or (
821-
fromlist is not None and "bigquery_storage_v1" in fromlist
820+
return "bigquery_storage" in name or (
821+
fromlist is not None and "bigquery_storage" in fromlist
822822
)
823823

824824
no_bqstorage = maybe_fail_import(predicate=fail_bqstorage_import)

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy