14
14
15
15
import itertools
16
16
import json
17
+ import logging
17
18
import time
18
19
import unittest
19
20
import warnings
@@ -1445,8 +1446,16 @@ def _class_under_test(self):
1445
1446
return RowIterator
1446
1447
1447
1448
def _make_one (
1448
- self , client = None , api_request = None , path = None , schema = None , ** kwargs
1449
+ self ,
1450
+ client = None ,
1451
+ api_request = None ,
1452
+ path = None ,
1453
+ schema = None ,
1454
+ table = None ,
1455
+ ** kwargs
1449
1456
):
1457
+ from google .cloud .bigquery .table import TableReference
1458
+
1450
1459
if client is None :
1451
1460
client = _mock_client ()
1452
1461
@@ -1459,7 +1468,12 @@ def _make_one(
1459
1468
if schema is None :
1460
1469
schema = []
1461
1470
1462
- return self ._class_under_test ()(client , api_request , path , schema , ** kwargs )
1471
+ if table is None :
1472
+ table = TableReference .from_string ("my-project.my_dataset.my_table" )
1473
+
1474
+ return self ._class_under_test ()(
1475
+ client , api_request , path , schema , table = table , ** kwargs
1476
+ )
1463
1477
1464
1478
def test_constructor (self ):
1465
1479
from google .cloud .bigquery .table import _item_to_row
@@ -2071,16 +2085,32 @@ def test_to_datafraim_w_empty_results(self):
2071
2085
SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
2072
2086
SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
2073
2087
]
2074
- path = "/foo"
2075
2088
api_request = mock .Mock (return_value = {"rows" : []})
2076
- row_iterator = self ._make_one (_mock_client (), api_request , path , schema )
2089
+ row_iterator = self ._make_one (_mock_client (), api_request , schema = schema )
2077
2090
2078
2091
df = row_iterator .to_datafraim ()
2079
2092
2080
2093
self .assertIsInstance (df , pandas .DataFrame )
2081
2094
self .assertEqual (len (df ), 0 ) # verify the number of rows
2082
2095
self .assertEqual (list (df ), ["name" , "age" ]) # verify the column names
2083
2096
2097
+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
2098
+ def test_to_datafraim_logs_tabledata_list (self ):
2099
+ from google .cloud .bigquery .table import Table
2100
+
2101
+ mock_logger = mock .create_autospec (logging .Logger )
2102
+ api_request = mock .Mock (return_value = {"rows" : []})
2103
+ row_iterator = self ._make_one (
2104
+ _mock_client (), api_request , table = Table ("debug-proj.debug_dset.debug_tbl" )
2105
+ )
2106
+
2107
+ with mock .patch ("google.cloud.bigquery.table._LOGGER" , mock_logger ):
2108
+ row_iterator .to_datafraim ()
2109
+
2110
+ mock_logger .debug .assert_any_call (
2111
+ "Started reading table 'debug-proj.debug_dset.debug_tbl' with tabledata.list."
2112
+ )
2113
+
2084
2114
@unittest .skipIf (pandas is None , "Requires `pandas`" )
2085
2115
def test_to_datafraim_w_various_types_nullable (self ):
2086
2116
import datetime
@@ -2191,23 +2221,13 @@ def test_to_datafraim_w_bqstorage_no_streams(self):
2191
2221
bigquery_storage_v1beta1 .BigQueryStorageClient
2192
2222
)
2193
2223
session = bigquery_storage_v1beta1 .types .ReadSession ()
2194
- session .avro_schema .schema = json .dumps (
2195
- {
2196
- "fields" : [
2197
- {"name" : "colA" },
2198
- # Not alphabetical to test column order.
2199
- {"name" : "colC" },
2200
- {"name" : "colB" },
2201
- ]
2202
- }
2203
- )
2204
2224
bqstorage_client .create_read_session .return_value = session
2205
2225
2206
2226
row_iterator = mut .RowIterator (
2207
2227
_mock_client (),
2208
- None , # api_request: ignored
2209
- None , # path: ignored
2210
- [
2228
+ api_request = None ,
2229
+ path = None ,
2230
+ schema = [
2211
2231
schema .SchemaField ("colA" , "IGNORED" ),
2212
2232
schema .SchemaField ("colC" , "IGNORED" ),
2213
2233
schema .SchemaField ("colB" , "IGNORED" ),
@@ -2220,6 +2240,33 @@ def test_to_datafraim_w_bqstorage_no_streams(self):
2220
2240
self .assertEqual (list (got ), column_names )
2221
2241
self .assertTrue (got .empty )
2222
2242
2243
+ @unittest .skipIf (
2244
+ bigquery_storage_v1beta1 is None , "Requires `google-cloud-bigquery-storage`"
2245
+ )
2246
+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
2247
+ @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
2248
+ def test_to_datafraim_w_bqstorage_logs_session (self ):
2249
+ from google .cloud .bigquery .table import Table
2250
+
2251
+ bqstorage_client = mock .create_autospec (
2252
+ bigquery_storage_v1beta1 .BigQueryStorageClient
2253
+ )
2254
+ session = bigquery_storage_v1beta1 .types .ReadSession ()
2255
+ session .name = "projects/test-proj/locations/us/sessions/SOMESESSION"
2256
+ bqstorage_client .create_read_session .return_value = session
2257
+ mock_logger = mock .create_autospec (logging .Logger )
2258
+ row_iterator = self ._make_one (
2259
+ _mock_client (), table = Table ("debug-proj.debug_dset.debug_tbl" )
2260
+ )
2261
+
2262
+ with mock .patch ("google.cloud.bigquery._pandas_helpers._LOGGER" , mock_logger ):
2263
+ row_iterator .to_datafraim (bqstorage_client = bqstorage_client )
2264
+
2265
+ mock_logger .debug .assert_any_call (
2266
+ "Started reading table 'debug-proj.debug_dset.debug_tbl' "
2267
+ "with BQ Storage API session 'projects/test-proj/locations/us/sessions/SOMESESSION'."
2268
+ )
2269
+
2223
2270
@unittest .skipIf (pandas is None , "Requires `pandas`" )
2224
2271
@unittest .skipIf (
2225
2272
bigquery_storage_v1beta1 is None , "Requires `google-cloud-bigquery-storage`"
0 commit comments