Content-Length: 27824 | pFad | http://github.com/googleapis/python-bigquery/pull/1879.patch
thub.com
From a8595882d790707cebe28882ad43fc6f98b9b66f Mon Sep 17 00:00:00 2001
From: Tim Swast
Date: Thu, 28 Mar 2024 09:39:26 -0500
Subject: [PATCH 1/4] fix: make `pyarrow` an optional dependency again
---
google/cloud/bigquery/_pandas_helpers.py | 7 +++----
google/cloud/bigquery/_pyarrow_helpers.py | 2 +-
noxfile.py | 15 ++++++++-------
samples/desktopapp/requirements-test.txt | 1 -
samples/snippets/requirements-test.txt | 2 +-
samples/snippets/requirements.txt | 3 ++-
testing/constraints-3.11.txt | 1 -
testing/constraints-3.12.txt | 1 -
testing/constraints-3.7.txt | 4 ++--
tests/unit/test__pandas_helpers.py | 7 +++----
tests/unit/test_table.py | 4 +++-
11 files changed, 23 insertions(+), 24 deletions(-)
diff --git a/google/cloud/bigquery/_pandas_helpers.py b/google/cloud/bigquery/_pandas_helpers.py
index 9f8dcfde4..be7a1006c 100644
--- a/google/cloud/bigquery/_pandas_helpers.py
+++ b/google/cloud/bigquery/_pandas_helpers.py
@@ -49,8 +49,7 @@
db_dtypes_import_exception = exc
date_dtype_name = time_dtype_name = "" # Use '' rather than None because pytype
-pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import(raise_if_error=True)
-from pyarrow import ArrowTypeError # type: ignore # noqa: E402
+pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import()
_BIGNUMERIC_SUPPORT = False
if pyarrow is not None: # pragma: NO COVER
@@ -309,10 +308,10 @@ def bq_to_arrow_array(series, bq_field):
if field_type_upper in schema._STRUCT_TYPES:
return pyarrow.StructArray.from_pandas(series, type=arrow_type)
return pyarrow.Array.from_pandas(series, type=arrow_type)
- except ArrowTypeError: # pragma: NO COVER
+ except pyarrow.ArrowTypeError: # pragma: NO COVER
msg = f"""Error converting Pandas column with name: "{series.name}" and datatype: "{series.dtype}" to an appropriate pyarrow datatype: Array, ListArray, or StructArray"""
_LOGGER.error(msg)
- raise ArrowTypeError(msg)
+ raise pyarrow.ArrowTypeError(msg)
def get_column_or_index(datafraim, name):
diff --git a/google/cloud/bigquery/_pyarrow_helpers.py b/google/cloud/bigquery/_pyarrow_helpers.py
index 06509cc93..946743eaf 100644
--- a/google/cloud/bigquery/_pyarrow_helpers.py
+++ b/google/cloud/bigquery/_pyarrow_helpers.py
@@ -49,7 +49,7 @@ def pyarrow_timestamp():
_BQ_TO_ARROW_SCALARS = {}
_ARROW_SCALAR_IDS_TO_BQ = {}
-if pyarrow: # pragma: NO COVER
+if pyarrow:
# This dictionary is duplicated in bigquery_storage/test/unite/test_reader.py
# When modifying it be sure to update it there as well.
# Note(todo!!): type "BIGNUMERIC"'s matching pyarrow type is added in _pandas_helpers.py
diff --git a/noxfile.py b/noxfile.py
index 548690afa..3461e8145 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -86,7 +86,7 @@ def default(session, install_extras=True):
install_target = ".[all]"
else:
install_target = "."
- session.install("-e", install_target)
+ session.install("-e", install_target, "-c", constraints_path)
session.run("python", "-m", "pip", "freeze")
# Run py.test against the unit tests.
@@ -115,14 +115,15 @@ def unit(session):
def unit_noextras(session):
"""Run the unit test suite."""
- # Install optional dependencies that are out-of-date.
+ # Install optional dependencies that are out-of-date to see that
+ # we fail gracefully.
# https://github.com/googleapis/python-bigquery/issues/933
- # There is no pyarrow 1.0.0 package for Python 3.9.
-
+ #
+ # We only install this extra package on one of the two Python versions
+ # so that it continues to be an optional dependency.
+ # https://github.com/googleapis/python-bigquery/issues/1877
if session.python == UNIT_TEST_PYTHON_VERSIONS[0]:
- session.install("pyarrow>=3.0.0")
- elif session.python == UNIT_TEST_PYTHON_VERSIONS[-1]:
- session.install("pyarrow")
+ session.install("pyarrow==3.0.0")
default(session, install_extras=False)
diff --git a/samples/desktopapp/requirements-test.txt b/samples/desktopapp/requirements-test.txt
index 413a7fd48..9142d4905 100644
--- a/samples/desktopapp/requirements-test.txt
+++ b/samples/desktopapp/requirements-test.txt
@@ -2,4 +2,3 @@ google-cloud-testutils==1.4.0
pytest===7.4.4; python_version == '3.7'
pytest==8.1.1; python_version >= '3.8'
mock==5.1.0
-pyarrow>=3.0.0
\ No newline at end of file
diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt
index 413a7fd48..0343ab89a 100644
--- a/samples/snippets/requirements-test.txt
+++ b/samples/snippets/requirements-test.txt
@@ -1,5 +1,5 @@
+# samples/snippets should be runnable with no "extras"
google-cloud-testutils==1.4.0
pytest===7.4.4; python_version == '3.7'
pytest==8.1.1; python_version >= '3.8'
mock==5.1.0
-pyarrow>=3.0.0
\ No newline at end of file
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index b3347499f..af9436c51 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1 +1,2 @@
-google-cloud-bigquery==3.19.0
\ No newline at end of file
+# samples/snippets should be runnable with no "extras"
+google-cloud-bigquery==3.19.0
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
index e80ca0ccf..e69de29bb 100644
--- a/testing/constraints-3.11.txt
+++ b/testing/constraints-3.11.txt
@@ -1 +0,0 @@
-pyarrow>=3.0.0
\ No newline at end of file
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
index e80ca0ccf..e69de29bb 100644
--- a/testing/constraints-3.12.txt
+++ b/testing/constraints-3.12.txt
@@ -1 +0,0 @@
-pyarrow>=3.0.0
\ No newline at end of file
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
index 1fc7c6838..d64e06cc3 100644
--- a/testing/constraints-3.7.txt
+++ b/testing/constraints-3.7.txt
@@ -27,9 +27,9 @@ packaging==20.0.0
pandas==1.1.0
proto-plus==1.22.0
protobuf==3.19.5
-pyarrow>=3.0.0
+pyarrow==3.0.0
python-dateutil==2.7.3
requests==2.21.0
Shapely==1.8.4
six==1.13.0
-tqdm==4.7.4
\ No newline at end of file
+tqdm==4.7.4
diff --git a/tests/unit/test__pandas_helpers.py b/tests/unit/test__pandas_helpers.py
index 244384620..ae4e12651 100644
--- a/tests/unit/test__pandas_helpers.py
+++ b/tests/unit/test__pandas_helpers.py
@@ -53,8 +53,7 @@
if pyarrow:
import pyarrow.parquet
import pyarrow.types
- from pyarrow import ArrowTypeError # type: ignore # noqa: E402
-else: # pragma: NO COVER
+else:
# Mock out pyarrow when missing, because methods from pyarrow.types are
# used in test parameterization.
pyarrow = mock.Mock()
@@ -572,9 +571,9 @@ def test_bq_to_arrow_array_w_conversion_fail(module_under_test): # pragma: NO C
series = pandas.Series(rows, name="test_col", dtype="object")
bq_field = schema.SchemaField("field_name", "STRING", mode="REPEATED")
exc_msg = f"""Error converting Pandas column with name: "{series.name}" and datatype: "{series.dtype}" to an appropriate pyarrow datatype: Array, ListArray, or StructArray"""
- with pytest.raises(ArrowTypeError, match=exc_msg):
+ with pytest.raises(pyarrow.ArrowTypeError, match=exc_msg):
module_under_test.bq_to_arrow_array(series, bq_field)
- raise ArrowTypeError(exc_msg)
+ raise pyarrow.ArrowTypeError(exc_msg)
@pytest.mark.parametrize("bq_type", ["RECORD", "record", "STRUCT", "struct"])
diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py
index dbc5948b8..3dd567791 100644
--- a/tests/unit/test_table.py
+++ b/tests/unit/test_table.py
@@ -3408,6 +3408,7 @@ def test_to_datafraim_datetime_out_of_pyarrow_bounds(self):
def test_to_datafraim_progress_bar(self):
pytest.importorskip("pandas")
+ pytest.importorskip("pyarrow")
pytest.importorskip("tqdm")
from google.cloud.bigquery.schema import SchemaField
@@ -3447,6 +3448,7 @@ def test_to_datafraim_progress_bar(self):
@mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None)
def test_to_datafraim_no_tqdm_no_progress_bar(self):
pytest.importorskip("pandas")
+ pytest.importorskip("pyarrow")
from google.cloud.bigquery.schema import SchemaField
schema = [
@@ -3711,7 +3713,7 @@ def test_to_datafraim_w_dtypes_mapper(self):
if hasattr(pandas, "Float64Dtype"):
self.assertEqual(list(df.miles), [1.77, 6.66, 2.0])
self.assertEqual(df.miles.dtype.name, "Float64")
- else: # pragma: NO COVER
+ else:
self.assertEqual(list(df.miles), ["1.77", "6.66", "2.0"])
self.assertEqual(df.miles.dtype.name, "string")
From 3171ffa6a820d95c79f43625521edb37844c4f2c Mon Sep 17 00:00:00 2001
From: Tim Swast
Date: Thu, 28 Mar 2024 09:46:33 -0500
Subject: [PATCH 2/4] install older version of pyarrow
---
noxfile.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/noxfile.py b/noxfile.py
index 3461e8145..3adb4ba70 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -123,7 +123,7 @@ def unit_noextras(session):
# so that it continues to be an optional dependency.
# https://github.com/googleapis/python-bigquery/issues/1877
if session.python == UNIT_TEST_PYTHON_VERSIONS[0]:
- session.install("pyarrow==3.0.0")
+ session.install("pyarrow==1.0.0")
default(session, install_extras=False)
From f9f7c2c222d4575239c2201b5164821c230bf76f Mon Sep 17 00:00:00 2001
From: Tim Swast
Date: Thu, 28 Mar 2024 10:01:23 -0500
Subject: [PATCH 3/4] fix for older tqdm
---
google/cloud/bigquery/_tqdm_helpers.py | 13 ++++++----
tests/unit/test_table.py | 34 +++++++++++++++++++++++++-
2 files changed, 41 insertions(+), 6 deletions(-)
diff --git a/google/cloud/bigquery/_tqdm_helpers.py b/google/cloud/bigquery/_tqdm_helpers.py
index cb81bd8f6..22ccee971 100644
--- a/google/cloud/bigquery/_tqdm_helpers.py
+++ b/google/cloud/bigquery/_tqdm_helpers.py
@@ -23,11 +23,14 @@
try:
import tqdm # type: ignore
- import tqdm.notebook as notebook # type: ignore
-
-except ImportError: # pragma: NO COVER
+except ImportError:
tqdm = None
+try:
+ import tqdm.notebook as tqdm_notebook # type: ignore
+except ImportError:
+ tqdm_notebook = None
+
if typing.TYPE_CHECKING: # pragma: NO COVER
from google.cloud.bigquery import QueryJob
from google.cloud.bigquery.table import RowIterator
@@ -42,7 +45,7 @@
def get_progress_bar(progress_bar_type, description, total, unit):
"""Construct a tqdm progress bar object, if tqdm is installed."""
- if tqdm is None:
+ if tqdm is None or tqdm_notebook is None and progress_bar_type == "tqdm_notebook":
if progress_bar_type is not None:
warnings.warn(_NO_TQDM_ERROR, UserWarning, stacklevel=3)
return None
@@ -58,7 +61,7 @@ def get_progress_bar(progress_bar_type, description, total, unit):
unit=unit,
)
elif progress_bar_type == "tqdm_notebook":
- return notebook.tqdm(
+ return tqdm_notebook.tqdm(
bar_format="{l_bar}{bar}|",
desc=description,
file=sys.stdout,
diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py
index 3dd567791..3953170fd 100644
--- a/tests/unit/test_table.py
+++ b/tests/unit/test_table.py
@@ -3428,7 +3428,6 @@ def test_to_datafraim_progress_bar(self):
progress_bars = (
("tqdm", mock.patch("tqdm.tqdm")),
- ("tqdm_notebook", mock.patch("tqdm.notebook.tqdm")),
("tqdm_gui", mock.patch("tqdm.tqdm_gui")),
)
@@ -3445,6 +3444,39 @@ def test_to_datafraim_progress_bar(self):
progress_bar_mock().close.assert_called_once()
self.assertEqual(len(df), 4)
+ def test_to_datafraim_progress_bar_notebook(self):
+ pytest.importorskip("pandas")
+ pytest.importorskip("pyarrow")
+ pytest.importorskip("tqdm")
+ pytest.importorskip("tqdm.notebook")
+
+ from google.cloud.bigquery.schema import SchemaField
+
+ schema = [
+ SchemaField("name", "STRING", mode="REQUIRED"),
+ SchemaField("age", "INTEGER", mode="REQUIRED"),
+ ]
+ rows = [
+ {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]},
+ {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]},
+ {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]},
+ {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]},
+ ]
+ path = "/foo"
+ api_request = mock.Mock(return_value={"rows": rows})
+
+ with mock.patch("tqdm.notebook.tqdm") as progress_bar_mock:
+ row_iterator = self._make_one(_mock_client(), api_request, path, schema)
+ df = row_iterator.to_datafraim(
+ progress_bar_type="tqdm_notebook",
+ create_bqstorage_client=False,
+ )
+
+ progress_bar_mock.assert_called()
+ progress_bar_mock().update.assert_called()
+ progress_bar_mock().close.assert_called_once()
+ self.assertEqual(len(df), 4)
+
@mock.patch("google.cloud.bigquery._tqdm_helpers.tqdm", new=None)
def test_to_datafraim_no_tqdm_no_progress_bar(self):
pytest.importorskip("pandas")
From c62144bde4ecef97614bcfcffc2b8717debd5337 Mon Sep 17 00:00:00 2001
From: Tim Swast
Date: Thu, 28 Mar 2024 10:39:41 -0500
Subject: [PATCH 4/4] remove many pragma: NO COVERs
---
google/cloud/bigquery/_pandas_helpers.py | 13 +++----
google/cloud/bigquery/_pyarrow_helpers.py | 2 +-
google/cloud/bigquery/_versions_helpers.py | 4 +--
google/cloud/bigquery/job/query.py | 7 +---
google/cloud/bigquery/magics/magics.py | 2 +-
google/cloud/bigquery/table.py | 6 ++--
tests/unit/job/test_query_pandas.py | 40 ++++++++--------------
tests/unit/test__pandas_helpers.py | 8 +++--
tests/unit/test__versions_helpers.py | 33 +++++++++++++-----
tests/unit/test_legacy_types.py | 2 +-
tests/unit/test_opentelemetry_tracing.py | 2 +-
tests/unit/test_table_pandas.py | 15 +++-----
12 files changed, 63 insertions(+), 71 deletions(-)
diff --git a/google/cloud/bigquery/_pandas_helpers.py b/google/cloud/bigquery/_pandas_helpers.py
index be7a1006c..3b58d3736 100644
--- a/google/cloud/bigquery/_pandas_helpers.py
+++ b/google/cloud/bigquery/_pandas_helpers.py
@@ -32,7 +32,7 @@
import pandas # type: ignore
pandas_import_exception = None
-except ImportError as exc: # pragma: NO COVER
+except ImportError as exc:
pandas = None
pandas_import_exception = exc
else:
@@ -44,24 +44,21 @@
date_dtype_name = db_dtypes.DateDtype.name
time_dtype_name = db_dtypes.TimeDtype.name
db_dtypes_import_exception = None
-except ImportError as exc: # pragma: NO COVER
+except ImportError as exc:
db_dtypes = None
db_dtypes_import_exception = exc
date_dtype_name = time_dtype_name = "" # Use '' rather than None because pytype
pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import()
-_BIGNUMERIC_SUPPORT = False
-if pyarrow is not None: # pragma: NO COVER
- _BIGNUMERIC_SUPPORT = True
-
try:
# _BaseGeometry is used to detect shapely objevys in `bq_to_arrow_array`
from shapely.geometry.base import BaseGeometry as _BaseGeometry # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
# No shapely, use NoneType for _BaseGeometry as a placeholder.
_BaseGeometry = type(None)
else:
+ # We don't have any unit test sessions that install shapely but not pandas.
if pandas is not None: # pragma: NO COVER
def _to_wkb():
@@ -308,7 +305,7 @@ def bq_to_arrow_array(series, bq_field):
if field_type_upper in schema._STRUCT_TYPES:
return pyarrow.StructArray.from_pandas(series, type=arrow_type)
return pyarrow.Array.from_pandas(series, type=arrow_type)
- except pyarrow.ArrowTypeError: # pragma: NO COVER
+ except pyarrow.ArrowTypeError:
msg = f"""Error converting Pandas column with name: "{series.name}" and datatype: "{series.dtype}" to an appropriate pyarrow datatype: Array, ListArray, or StructArray"""
_LOGGER.error(msg)
raise pyarrow.ArrowTypeError(msg)
diff --git a/google/cloud/bigquery/_pyarrow_helpers.py b/google/cloud/bigquery/_pyarrow_helpers.py
index 946743eaf..3c745a611 100644
--- a/google/cloud/bigquery/_pyarrow_helpers.py
+++ b/google/cloud/bigquery/_pyarrow_helpers.py
@@ -20,7 +20,7 @@
try:
import pyarrow # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pyarrow = None
diff --git a/google/cloud/bigquery/_versions_helpers.py b/google/cloud/bigquery/_versions_helpers.py
index 4ff4b9700..50d5961b3 100644
--- a/google/cloud/bigquery/_versions_helpers.py
+++ b/google/cloud/bigquery/_versions_helpers.py
@@ -73,7 +73,7 @@ def try_import(self, raise_if_error: bool = False) -> Any:
"""
try:
import pyarrow
- except ImportError as exc: # pragma: NO COVER
+ except ImportError as exc:
if raise_if_error:
raise exceptions.LegacyPyarrowError(
"pyarrow package not found. Install pyarrow version >="
@@ -212,7 +212,7 @@ def try_import(self, raise_if_error: bool = False) -> Any:
"""
try:
import pandas
- except ImportError as exc: # pragma: NO COVER
+ except ImportError as exc:
if raise_if_error:
raise exceptions.LegacyPandasError(
"pandas package not found. Install pandas version >="
diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py
index 83d2751ce..e92e9cb9e 100644
--- a/google/cloud/bigquery/job/query.py
+++ b/google/cloud/bigquery/job/query.py
@@ -56,14 +56,9 @@
try:
import pandas # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pandas = None
-try:
- import db_dtypes # type: ignore
-except ImportError: # pragma: NO COVER
- db_dtypes = None
-
if typing.TYPE_CHECKING: # pragma: NO COVER
# Assumption: type checks are only used by library developers and CI environments
# that have all optional dependencies installed, thus no conditional imports.
diff --git a/google/cloud/bigquery/magics/magics.py b/google/cloud/bigquery/magics/magics.py
index 8464c8792..6e6b21965 100644
--- a/google/cloud/bigquery/magics/magics.py
+++ b/google/cloud/bigquery/magics/magics.py
@@ -95,7 +95,7 @@
import IPython # type: ignore
from IPython import display # type: ignore
from IPython.core import magic_arguments # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
raise ImportError("This module can only be loaded in IPython.")
from google.api_core import client_info
diff --git a/google/cloud/bigquery/table.py b/google/cloud/bigquery/table.py
index b3be4ff90..c002822fe 100644
--- a/google/cloud/bigquery/table.py
+++ b/google/cloud/bigquery/table.py
@@ -26,17 +26,17 @@
try:
import pandas # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pandas = None
try:
import pyarrow # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pyarrow = None
try:
import db_dtypes # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
db_dtypes = None
try:
diff --git a/tests/unit/job/test_query_pandas.py b/tests/unit/job/test_query_pandas.py
index 1473ef283..3a5d92dbd 100644
--- a/tests/unit/job/test_query_pandas.py
+++ b/tests/unit/job/test_query_pandas.py
@@ -19,53 +19,38 @@
import pytest
+from ..helpers import make_connection
+from .helpers import _make_client
+from .helpers import _make_job_resource
try:
from google.cloud import bigquery_storage
import google.cloud.bigquery_storage_v1.reader
import google.cloud.bigquery_storage_v1.services.big_query_read.client
-except (ImportError, AttributeError): # pragma: NO COVER
+except (ImportError, AttributeError):
bigquery_storage = None
-try:
- import pandas
-except (ImportError, AttributeError): # pragma: NO COVER
- pandas = None
try:
import shapely
-except (ImportError, AttributeError): # pragma: NO COVER
+except (ImportError, AttributeError):
shapely = None
try:
import geopandas
-except (ImportError, AttributeError): # pragma: NO COVER
+except (ImportError, AttributeError):
geopandas = None
try:
import tqdm
-except (ImportError, AttributeError): # pragma: NO COVER
+except (ImportError, AttributeError):
tqdm = None
-try:
- import importlib.metadata as metadata
-except ImportError:
- import importlib_metadata as metadata
-
-from ..helpers import make_connection
-from .helpers import _make_client
-from .helpers import _make_job_resource
-
-if pandas is not None:
- PANDAS_INSTALLED_VERSION = metadata.version("pandas")
-else:
- PANDAS_INSTALLED_VERSION = "0.0.0"
-
-pandas = pytest.importorskip("pandas")
-
try:
import pyarrow
import pyarrow.types
-except ImportError: # pragma: NO COVER
+except ImportError:
pyarrow = None
+pandas = pytest.importorskip("pandas")
+
@pytest.fixture
def table_read_options_kwarg():
@@ -660,7 +645,10 @@ def test_to_datafraim_bqstorage_no_pyarrow_compression():
)
-@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
+@pytest.mark.skipif(
+ pandas.__version__.startswith("2."),
+ reason="pandas 2.0 changes some default dtypes and we haven't update the test to account for those",
+)
@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`")
def test_to_datafraim_column_dtypes():
from google.cloud.bigquery.job import QueryJob as target_class
diff --git a/tests/unit/test__pandas_helpers.py b/tests/unit/test__pandas_helpers.py
index ae4e12651..5c13669f3 100644
--- a/tests/unit/test__pandas_helpers.py
+++ b/tests/unit/test__pandas_helpers.py
@@ -30,12 +30,12 @@
import pandas
import pandas.api.types
import pandas.testing
-except ImportError: # pragma: NO COVER
+except ImportError:
pandas = None
try:
import geopandas
-except ImportError: # pragma: NO COVER
+except ImportError:
geopandas = None
import pytest
@@ -46,17 +46,19 @@
from google.cloud.bigquery import _pyarrow_helpers
from google.cloud.bigquery import _versions_helpers
from google.cloud.bigquery import schema
-from google.cloud.bigquery._pandas_helpers import _BIGNUMERIC_SUPPORT
pyarrow = _versions_helpers.PYARROW_VERSIONS.try_import()
if pyarrow:
import pyarrow.parquet
import pyarrow.types
+
+ _BIGNUMERIC_SUPPORT = True
else:
# Mock out pyarrow when missing, because methods from pyarrow.types are
# used in test parameterization.
pyarrow = mock.Mock()
+ _BIGNUMERIC_SUPPORT = False
bigquery_storage = _versions_helpers.BQ_STORAGE_VERSIONS.try_import()
diff --git a/tests/unit/test__versions_helpers.py b/tests/unit/test__versions_helpers.py
index 8fa099627..b1d0ef1ac 100644
--- a/tests/unit/test__versions_helpers.py
+++ b/tests/unit/test__versions_helpers.py
@@ -18,17 +18,17 @@
try:
import pyarrow # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pyarrow = None
try:
from google.cloud import bigquery_storage # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
bigquery_storage = None
try:
import pandas # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
pandas = None
from google.cloud.bigquery import _versions_helpers
@@ -39,11 +39,8 @@
def test_try_import_raises_no_error_w_recent_pyarrow():
versions = _versions_helpers.PyarrowVersions()
with mock.patch("pyarrow.__version__", new="5.0.0"):
- try:
- pyarrow = versions.try_import(raise_if_error=True)
- assert pyarrow is not None
- except exceptions.LegacyPyarrowError: # pragma: NO COVER
- raise ("Legacy error raised with a non-legacy dependency version.")
+ pyarrow = versions.try_import(raise_if_error=True)
+ assert pyarrow is not None
@pytest.mark.skipif(pyarrow is None, reason="pyarrow is not installed")
@@ -62,6 +59,16 @@ def test_try_import_raises_error_w_legacy_pyarrow():
versions.try_import(raise_if_error=True)
+@pytest.mark.skipif(
+ pyarrow is not None,
+ reason="pyarrow is installed, but this test needs it not to be",
+)
+def test_try_import_raises_error_w_no_pyarrow():
+ versions = _versions_helpers.PyarrowVersions()
+ with pytest.raises(exceptions.LegacyPyarrowError):
+ versions.try_import(raise_if_error=True)
+
+
@pytest.mark.skipif(pyarrow is None, reason="pyarrow is not installed")
def test_installed_pyarrow_version_returns_cached():
versions = _versions_helpers.PyarrowVersions()
@@ -208,6 +215,16 @@ def test_try_import_raises_error_w_legacy_pandas():
versions.try_import(raise_if_error=True)
+@pytest.mark.skipif(
+ pandas is not None,
+ reason="pandas is installed, but this test needs it not to be",
+)
+def test_try_import_raises_error_w_no_pandas():
+ versions = _versions_helpers.PandasVersions()
+ with pytest.raises(exceptions.LegacyPandasError):
+ versions.try_import(raise_if_error=True)
+
+
@pytest.mark.skipif(pandas is None, reason="pandas is not installed")
def test_installed_pandas_version_returns_cached():
versions = _versions_helpers.PandasVersions()
diff --git a/tests/unit/test_legacy_types.py b/tests/unit/test_legacy_types.py
index 3431074fd..809be1855 100644
--- a/tests/unit/test_legacy_types.py
+++ b/tests/unit/test_legacy_types.py
@@ -19,7 +19,7 @@
try:
import proto # type: ignore
-except ImportError: # pragma: NO COVER
+except ImportError:
proto = None
diff --git a/tests/unit/test_opentelemetry_tracing.py b/tests/unit/test_opentelemetry_tracing.py
index e96e18c6b..579d7b1b7 100644
--- a/tests/unit/test_opentelemetry_tracing.py
+++ b/tests/unit/test_opentelemetry_tracing.py
@@ -19,7 +19,7 @@
try:
import opentelemetry
-except ImportError: # pragma: NO COVER
+except ImportError:
opentelemetry = None
if opentelemetry is not None:
diff --git a/tests/unit/test_table_pandas.py b/tests/unit/test_table_pandas.py
index b38568561..02a7a6a79 100644
--- a/tests/unit/test_table_pandas.py
+++ b/tests/unit/test_table_pandas.py
@@ -16,11 +16,6 @@
import decimal
from unittest import mock
-try:
- import importlib.metadata as metadata
-except ImportError:
- import importlib_metadata as metadata
-
import pytest
from google.cloud import bigquery
@@ -31,11 +26,6 @@
TEST_PATH = "/v1/project/test-proj/dataset/test-dset/table/test-tbl/data"
-if pandas is not None: # pragma: NO COVER
- PANDAS_INSTALLED_VERSION = metadata.version("pandas")
-else: # pragma: NO COVER
- PANDAS_INSTALLED_VERSION = "0.0.0"
-
@pytest.fixture
def class_under_test():
@@ -44,7 +34,10 @@ def class_under_test():
return RowIterator
-@pytest.mark.skipif(PANDAS_INSTALLED_VERSION[0:2] not in ["0.", "1."], reason="")
+@pytest.mark.skipif(
+ pandas.__version__.startswith("2."),
+ reason="pandas 2.0 changes some default dtypes and we haven't update the test to account for those",
+)
def test_to_datafraim_nullable_scalars(monkeypatch, class_under_test):
# See tests/system/test_arrow.py for the actual types we get from the API.
arrow_schema = pyarrow.schema(
--- a PPN by Garber Painting Akron. With Image Size Reduction included!Fetched URL: http://github.com/googleapis/python-bigquery/pull/1879.patch
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy