[go: up one dir, main page]

Skip to content

Commit

Permalink
fix: updates a number of optional dependencies (#1864)
Browse files Browse the repository at this point in the history
This fix updates a number of optional dependencies.
We use a different module import process (pytest.importorskip versus unittest.skipif).

This first major commit gets the ball rolling, there are gonna be a few additional commits to cover other files.

Fixes #<issue_number_goes_here> 🦕
  • Loading branch information
chalmerlowe committed Mar 27, 2024
1 parent 08b1e6f commit c2496a1
Show file tree
Hide file tree
Showing 9 changed files with 311 additions and 405 deletions.
2 changes: 1 addition & 1 deletion google/cloud/bigquery/_tqdm_helpers.py
Expand Up @@ -67,7 +67,7 @@ def get_progress_bar(progress_bar_type, description, total, unit):
)
elif progress_bar_type == "tqdm_gui":
return tqdm.tqdm_gui(desc=description, total=total, unit=unit)
except (KeyError, TypeError):
except (KeyError, TypeError): # pragma: NO COVER
# Protect ourselves from any tqdm errors. In case of
# unexpected tqdm behavior, just fall back to showing
# no progress bar.
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/bigquery/client.py
Expand Up @@ -593,7 +593,7 @@ def _ensure_bqstorage_client(
)
return None

if bqstorage_client is None:
if bqstorage_client is None: # pragma: NO COVER
bqstorage_client = bigquery_storage.BigQueryReadClient(
credentials=self._credentials,
client_options=client_options,
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Expand Up @@ -45,8 +45,9 @@
]
pyarrow_dependency = "pyarrow >= 3.0.0"
extras = {
# Keep the no-op bqstorage extra for backward compatibility.
# See: https://github.com/googleapis/python-bigquery/issues/757
# bqstorage had a period where it was a required dependency, and has been
# moved back to optional due to bloat. See
# https://github.com/googleapis/python-bigquery/issues/1196 for more background.
"bqstorage": [
"google-cloud-bigquery-storage >= 2.6.0, <3.0.0dev",
# Due to an issue in pip's dependency resolver, the `grpc` extra is not
Expand Down
28 changes: 7 additions & 21 deletions tests/system/test_client.py
Expand Up @@ -54,16 +54,6 @@

from . import helpers

try:
from google.cloud import bigquery_storage
except ImportError: # pragma: NO COVER
bigquery_storage = None

try:
import pyarrow
import pyarrow.types
except ImportError: # pragma: NO COVER
pyarrow = None

JOB_TIMEOUT = 120 # 2 minutes
DATA_PATH = pathlib.Path(__file__).parent.parent / "data"
Expand Down Expand Up @@ -1772,11 +1762,10 @@ def test_dbapi_fetchall_from_script(self):
row_tuples = [r.values() for r in rows]
self.assertEqual(row_tuples, [(5, "foo"), (6, "bar"), (7, "baz")])

@unittest.skipIf(
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
)
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_dbapi_fetch_w_bqstorage_client_large_result_set(self):
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pyarrow")

bqstorage_client = bigquery_storage.BigQueryReadClient(
credentials=Config.CLIENT._credentials
)
Expand Down Expand Up @@ -1834,10 +1823,8 @@ def test_dbapi_dry_run_query(self):

self.assertEqual(list(rows), [])

@unittest.skipIf(
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
)
def test_dbapi_connection_does_not_leak_sockets(self):
pytest.importorskip("google.cloud.bigquery_storage")
current_process = psutil.Process()
conn_count_start = len(current_process.connections())

Expand Down Expand Up @@ -2382,11 +2369,10 @@ def test_create_table_rows_fetch_nested_schema(self):
self.assertEqual(found[7], e_favtime)
self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"]))

@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
@unittest.skipIf(
bigquery_storage is None, "Requires `google-cloud-bigquery-storage`"
)
def test_nested_table_to_arrow(self):
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pyarrow = pytest.importorskip("pyarrow")
pyarrow.types = pytest.importorskip("pyarrow.types")
from google.cloud.bigquery.job import SourceFormat
from google.cloud.bigquery.job import WriteDisposition

Expand Down

0 comments on commit c2496a1

Please sign in to comment.