Skip to content

Commit 8360487

Browse files
authored
tests: remove warning spew (#197)
Fixes: #196
1 parent 8fe7254 commit 8360487

File tree

5 files changed

+112
-33
lines changed

5 files changed

+112
-33
lines changed

tests/unit/test__pandas_helpers.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import warnings
2121

2222
import mock
23+
import six
2324

2425
try:
2526
import pandas
@@ -299,7 +300,10 @@ def test_bq_to_arrow_data_type_w_struct(module_under_test, bq_type):
299300
)
300301
)
301302
assert pyarrow.types.is_struct(actual)
302-
assert actual.num_children == len(fields)
303+
try:
304+
assert actual.num_fields == len(fields)
305+
except AttributeError: # py27
306+
assert actual.num_children == len(fields)
303307
assert actual.equals(expected)
304308

305309

@@ -344,7 +348,10 @@ def test_bq_to_arrow_data_type_w_array_struct(module_under_test, bq_type):
344348
)
345349
assert pyarrow.types.is_list(actual)
346350
assert pyarrow.types.is_struct(actual.value_type)
347-
assert actual.value_type.num_children == len(fields)
351+
try:
352+
assert actual.value_type.num_fields == len(fields)
353+
except AttributeError: # py27
354+
assert actual.value_type.num_children == len(fields)
348355
assert actual.value_type.equals(expected_value_type)
349356

350357

@@ -542,9 +549,17 @@ def test_bq_to_arrow_schema_w_unknown_type(module_under_test):
542549
# instead.
543550
schema.SchemaField("field3", "UNKNOWN_TYPE"),
544551
)
545-
actual = module_under_test.bq_to_arrow_schema(fields)
552+
with warnings.catch_warnings(record=True) as warned:
553+
actual = module_under_test.bq_to_arrow_schema(fields)
546554
assert actual is None
547555

556+
if six.PY3:
557+
assert len(warned) == 1
558+
warning = warned[0]
559+
assert "field3" in str(warning)
560+
else:
561+
assert len(warned) == 0
562+
548563

549564
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
550565
def test_get_column_or_index_not_found(module_under_test):

tests/unit/test_client.py

Lines changed: 17 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,8 @@ def test__call_api_applying_custom_retry_on_timeout(self):
221221
from concurrent.futures import TimeoutError
222222
from google.cloud.bigquery.retry import DEFAULT_RETRY
223223

224-
client = self._make_one(project=self.PROJECT)
224+
creds = _make_credentials()
225+
client = self._make_one(project=self.PROJECT, credentials=creds)
225226

226227
api_request_patcher = mock.patch.object(
227228
client._connection, "api_request", side_effect=[TimeoutError, "result"],
@@ -674,7 +675,8 @@ def test_create_bqstorage_client(self):
674675
mock_client.assert_called_once_with(credentials=creds)
675676

676677
def test_create_bqstorage_client_missing_dependency(self):
677-
client = self._make_one(project=self.PROJECT)
678+
creds = _make_credentials()
679+
client = self._make_one(project=self.PROJECT, credentials=creds)
678680

679681
def fail_bqstorage_import(name, globals, locals, fromlist, level):
680682
# NOTE: *very* simplified, assuming a straightforward absolute import
@@ -7680,17 +7682,24 @@ def test_load_table_from_dataframe_wo_pyarrow_custom_compression(self):
76807682
)
76817683

76827684
with load_patch, get_table_patch, pyarrow_patch, to_parquet_patch as to_parquet_spy:
7683-
client.load_table_from_dataframe(
7684-
dataframe,
7685-
self.TABLE_REF,
7686-
location=self.LOCATION,
7687-
parquet_compression="gzip",
7688-
)
7685+
with warnings.catch_warnings(record=True) as warned:
7686+
client.load_table_from_dataframe(
7687+
dataframe,
7688+
self.TABLE_REF,
7689+
location=self.LOCATION,
7690+
parquet_compression="gzip",
7691+
)
76897692

76907693
call_args = to_parquet_spy.call_args
76917694
assert call_args is not None
76927695
assert call_args.kwargs.get("compression") == "gzip"
76937696

7697+
assert len(warned) == 2
7698+
warning = warned[0]
7699+
assert "Loading dataframe data without pyarrow" in str(warning)
7700+
warning = warned[1]
7701+
assert "Please install the pyarrow package" in str(warning)
7702+
76947703
@unittest.skipIf(pandas is None, "Requires `pandas`")
76957704
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
76967705
def test_load_table_from_dataframe_w_nulls(self):

tests/unit/test_job.py

Lines changed: 33 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
import json
1818
import textwrap
1919
import unittest
20+
import warnings
2021

2122
import freezegun
2223
import mock
@@ -1834,26 +1835,34 @@ def test_time_partitioning_hit(self):
18341835
"expirationMs": str(year_ms),
18351836
"requirePartitionFilter": False,
18361837
}
1837-
expected = TimePartitioning(
1838-
type_=TimePartitioningType.DAY,
1839-
field=field,
1840-
expiration_ms=year_ms,
1841-
require_partition_filter=False,
1842-
)
1838+
with warnings.catch_warnings(record=True) as warned:
1839+
expected = TimePartitioning(
1840+
type_=TimePartitioningType.DAY,
1841+
field=field,
1842+
expiration_ms=year_ms,
1843+
require_partition_filter=False,
1844+
)
18431845
self.assertEqual(config.time_partitioning, expected)
18441846

1847+
assert len(warned) == 1
1848+
warning = warned[0]
1849+
assert "TimePartitioning.require_partition_filter" in str(warning)
1850+
18451851
def test_time_partitioning_setter(self):
18461852
from google.cloud.bigquery.table import TimePartitioning
18471853
from google.cloud.bigquery.table import TimePartitioningType
18481854

18491855
field = "creation_date"
18501856
year_ms = 86400 * 1000 * 365
1851-
time_partitioning = TimePartitioning(
1852-
type_=TimePartitioningType.DAY,
1853-
field=field,
1854-
expiration_ms=year_ms,
1855-
require_partition_filter=False,
1856-
)
1857+
1858+
with warnings.catch_warnings(record=True) as warned:
1859+
time_partitioning = TimePartitioning(
1860+
type_=TimePartitioningType.DAY,
1861+
field=field,
1862+
expiration_ms=year_ms,
1863+
require_partition_filter=False,
1864+
)
1865+
18571866
config = self._get_target_class()()
18581867
config.time_partitioning = time_partitioning
18591868
expected = {
@@ -1864,6 +1873,10 @@ def test_time_partitioning_setter(self):
18641873
}
18651874
self.assertEqual(config._properties["load"]["timePartitioning"], expected)
18661875

1876+
assert len(warned) == 1
1877+
warning = warned[0]
1878+
assert "TimePartitioning.require_partition_filter" in str(warning)
1879+
18671880
def test_time_partitioning_setter_w_none(self):
18681881
from google.cloud.bigquery.table import TimePartitioningType
18691882

@@ -5595,7 +5608,10 @@ def test_to_dataframe_column_date_dtypes_wo_pyarrow(self):
55955608
job = self._make_one(self.JOB_ID, self.QUERY, client)
55965609

55975610
with mock.patch("google.cloud.bigquery.table.pyarrow", None):
5598-
df = job.to_dataframe(date_as_object=False, create_bqstorage_client=False)
5611+
with warnings.catch_warnings(record=True) as warned:
5612+
df = job.to_dataframe(
5613+
date_as_object=False, create_bqstorage_client=False
5614+
)
55995615

56005616
self.assertIsInstance(df, pandas.DataFrame)
56015617
self.assertEqual(len(df), 1) # verify the number of rows
@@ -5604,6 +5620,10 @@ def test_to_dataframe_column_date_dtypes_wo_pyarrow(self):
56045620

56055621
self.assertEqual(df.date.dtype.name, "object")
56065622

5623+
assert len(warned) == 1
5624+
warning = warned[0]
5625+
assert "without pyarrow" in str(warning)
5626+
56075627
@unittest.skipIf(pandas is None, "Requires `pandas`")
56085628
@unittest.skipIf(tqdm is None, "Requires `tqdm`")
56095629
@mock.patch("tqdm.tqdm")

tests/unit/test_magics.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,7 @@ def test_bigquery_magic_without_optional_arguments(monkeypatch):
399399

400400
# Set up the context with monkeypatch so that it's reset for subsequent
401401
# tests.
402-
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
402+
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)
403403

404404
# Mock out the BigQuery Storage API.
405405
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
@@ -560,7 +560,7 @@ def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch):
560560

561561
# Set up the context with monkeypatch so that it's reset for subsequent
562562
# tests.
563-
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
563+
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)
564564

565565
# Mock out the BigQuery Storage API.
566566
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
@@ -624,7 +624,7 @@ def test_bigquery_magic_with_rest_client_requested(monkeypatch):
624624

625625
# Set up the context with monkeypatch so that it's reset for subsequent
626626
# tests.
627-
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
627+
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)
628628

629629
# Mock out the BigQuery Storage API.
630630
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)

tests/unit/test_table.py

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1778,7 +1778,8 @@ def test_to_arrow_w_unknown_type(self):
17781778
api_request = mock.Mock(return_value={"rows": rows})
17791779
row_iterator = self._make_one(_mock_client(), api_request, path, schema)
17801780

1781-
tbl = row_iterator.to_arrow(create_bqstorage_client=False)
1781+
with warnings.catch_warnings(record=True) as warned:
1782+
tbl = row_iterator.to_arrow(create_bqstorage_client=False)
17821783

17831784
self.assertIsInstance(tbl, pyarrow.Table)
17841785
self.assertEqual(tbl.num_rows, 2)
@@ -1799,6 +1800,10 @@ def test_to_arrow_w_unknown_type(self):
17991800
self.assertEqual(ages, [33, 29])
18001801
self.assertEqual(sports, ["volleyball", "basketball"])
18011802

1803+
self.assertEqual(len(warned), 1)
1804+
warning = warned[0]
1805+
self.assertTrue("sport" in str(warning))
1806+
18021807
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
18031808
def test_to_arrow_w_empty_table(self):
18041809
from google.cloud.bigquery.schema import SchemaField
@@ -2370,13 +2375,18 @@ def test_to_dataframe_progress_bar_wo_pyarrow(
23702375
for progress_bar_type, progress_bar_mock in progress_bars:
23712376
row_iterator = self._make_one(_mock_client(), api_request, path, schema)
23722377
with mock.patch("google.cloud.bigquery.table.pyarrow", None):
2373-
df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type)
2378+
with warnings.catch_warnings(record=True) as warned:
2379+
df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type)
23742380

23752381
progress_bar_mock.assert_called()
23762382
progress_bar_mock().update.assert_called()
23772383
progress_bar_mock().close.assert_called_once()
23782384
self.assertEqual(len(df), 4)
23792385

2386+
self.assertEqual(len(warned), 1)
2387+
warning = warned[0]
2388+
self.assertTrue("without pyarrow" in str(warning))
2389+
23802390
@unittest.skipIf(pandas is None, "Requires `pandas`")
23812391
@mock.patch("google.cloud.bigquery.table.tqdm", new=None)
23822392
def test_to_dataframe_no_tqdm_no_progress_bar(self):
@@ -2499,12 +2509,17 @@ def test_to_dataframe_w_empty_results_wo_pyarrow(self):
24992509
api_request = mock.Mock(return_value={"rows": []})
25002510
row_iterator = self._make_one(_mock_client(), api_request, schema=schema)
25012511

2502-
df = row_iterator.to_dataframe()
2512+
with warnings.catch_warnings(record=True) as warned:
2513+
df = row_iterator.to_dataframe()
25032514

25042515
self.assertIsInstance(df, pandas.DataFrame)
25052516
self.assertEqual(len(df), 0) # verify the number of rows
25062517
self.assertEqual(list(df), ["name", "age"]) # verify the column names
25072518

2519+
self.assertEqual(len(warned), 1)
2520+
warning = warned[0]
2521+
self.assertTrue("without pyarrow" in str(warning))
2522+
25082523
@unittest.skipIf(pandas is None, "Requires `pandas`")
25092524
def test_to_dataframe_w_no_results_wo_pyarrow(self):
25102525
from google.cloud.bigquery.schema import SchemaField
@@ -2522,12 +2537,17 @@ def empty_iterable(dtypes=None):
25222537

25232538
row_iterator.to_dataframe_iterable = empty_iterable
25242539

2525-
df = row_iterator.to_dataframe()
2540+
with warnings.catch_warnings(record=True) as warned:
2541+
df = row_iterator.to_dataframe()
25262542

25272543
self.assertIsInstance(df, pandas.DataFrame)
25282544
self.assertEqual(len(df), 0) # verify the number of rows
25292545
self.assertEqual(list(df), ["name", "age"]) # verify the column names
25302546

2547+
self.assertEqual(len(warned), 1)
2548+
warning = warned[0]
2549+
self.assertTrue("without pyarrow" in str(warning))
2550+
25312551
@unittest.skipIf(pandas is None, "Requires `pandas`")
25322552
def test_to_dataframe_w_various_types_nullable(self):
25332553
import datetime
@@ -2787,11 +2807,19 @@ def test_to_dataframe_w_bqstorage_v1beta1_no_streams(self):
27872807
table=mut.TableReference.from_string("proj.dset.tbl"),
27882808
)
27892809

2790-
got = row_iterator.to_dataframe(bqstorage_client)
2810+
with warnings.catch_warnings(record=True) as warned:
2811+
got = row_iterator.to_dataframe(bqstorage_client)
2812+
27912813
column_names = ["colA", "colC", "colB"]
27922814
self.assertEqual(list(got), column_names)
27932815
self.assertTrue(got.empty)
27942816

2817+
self.assertEqual(len(warned), 1)
2818+
warning = warned[0]
2819+
self.assertTrue(
2820+
"Support for BigQuery Storage v1beta1 clients is deprecated" in str(warning)
2821+
)
2822+
27952823
@unittest.skipIf(
27962824
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
27972825
)
@@ -3493,7 +3521,10 @@ def test_to_dataframe_concat_categorical_dtype_wo_pyarrow(self):
34933521

34943522
row_iterator = self._make_one(_mock_client(), api_request, path, schema)
34953523

3496-
with mock.patch("google.cloud.bigquery.table.pyarrow", None):
3524+
mock_pyarrow = mock.patch("google.cloud.bigquery.table.pyarrow", None)
3525+
catch_warnings = warnings.catch_warnings(record=True)
3526+
3527+
with mock_pyarrow, catch_warnings as warned:
34973528
got = row_iterator.to_dataframe(
34983529
dtypes={
34993530
"col_category": pandas.core.dtypes.dtypes.CategoricalDtype(
@@ -3522,6 +3553,10 @@ def test_to_dataframe_concat_categorical_dtype_wo_pyarrow(self):
35223553
["low", "medium", "low", "medium", "high", "low"],
35233554
)
35243555

3556+
self.assertEqual(len(warned), 1)
3557+
warning = warned[0]
3558+
self.assertTrue("without pyarrow" in str(warning))
3559+
35253560

35263561
class TestPartitionRange(unittest.TestCase):
35273562
def _get_target_class(self):

0 commit comments

Comments
 (0)