15
15
import datetime
16
16
import logging
17
17
import time
18
+ import types
18
19
import unittest
19
20
import warnings
20
21
@@ -1862,6 +1863,15 @@ def test__validate_bqstorage_returns_false_when_completely_cached(self):
1862
1863
)
1863
1864
)
1864
1865
1866
+ def test__validate_bqstorage_returns_false_if_max_results_set (self ):
1867
+ iterator = self ._make_one (
1868
+ max_results = 10 , first_page_response = None # not cached
1869
+ )
1870
+ result = iterator ._validate_bqstorage (
1871
+ bqstorage_client = None , create_bqstorage_client = True
1872
+ )
1873
+ self .assertFalse (result )
1874
+
1865
1875
def test__validate_bqstorage_returns_false_if_missing_dependency (self ):
1866
1876
iterator = self ._make_one (first_page_response = None ) # not cached
1867
1877
@@ -2105,7 +2115,7 @@ def test_to_arrow_w_empty_table(self):
2105
2115
@unittest .skipIf (
2106
2116
bigquery_storage is None , "Requires `google-cloud-bigquery-storage`"
2107
2117
)
2108
- def test_to_arrow_max_results_w_create_bqstorage_warning (self ):
2118
+ def test_to_arrow_max_results_w_explicit_bqstorage_client_warning (self ):
2109
2119
from google .cloud .bigquery .schema import SchemaField
2110
2120
2111
2121
schema = [
@@ -2119,6 +2129,7 @@ def test_to_arrow_max_results_w_create_bqstorage_warning(self):
2119
2129
path = "/foo"
2120
2130
api_request = mock .Mock (return_value = {"rows" : rows })
2121
2131
mock_client = _mock_client ()
2132
+ mock_bqstorage_client = mock .sentinel .bq_storage_client
2122
2133
2123
2134
row_iterator = self ._make_one (
2124
2135
client = mock_client ,
@@ -2129,7 +2140,7 @@ def test_to_arrow_max_results_w_create_bqstorage_warning(self):
2129
2140
)
2130
2141
2131
2142
with warnings .catch_warnings (record = True ) as warned :
2132
- row_iterator .to_arrow (create_bqstorage_client = True )
2143
+ row_iterator .to_arrow (bqstorage_client = mock_bqstorage_client )
2133
2144
2134
2145
matches = [
2135
2146
warning
@@ -2139,6 +2150,49 @@ def test_to_arrow_max_results_w_create_bqstorage_warning(self):
2139
2150
and "REST" in str (warning )
2140
2151
]
2141
2152
self .assertEqual (len (matches ), 1 , msg = "User warning was not emitted." )
2153
+ self .assertIn (
2154
+ __file__ , str (matches [0 ]), msg = "Warning emitted with incorrect stacklevel"
2155
+ )
2156
+ mock_client ._ensure_bqstorage_client .assert_not_called ()
2157
+
2158
+ @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
2159
+ @unittest .skipIf (
2160
+ bigquery_storage is None , "Requires `google-cloud-bigquery-storage`"
2161
+ )
2162
+ def test_to_arrow_max_results_w_create_bqstorage_client_no_warning (self ):
2163
+ from google .cloud .bigquery .schema import SchemaField
2164
+
2165
+ schema = [
2166
+ SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
2167
+ SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
2168
+ ]
2169
+ rows = [
2170
+ {"f" : [{"v" : "Phred Phlyntstone" }, {"v" : "32" }]},
2171
+ {"f" : [{"v" : "Bharney Rhubble" }, {"v" : "33" }]},
2172
+ ]
2173
+ path = "/foo"
2174
+ api_request = mock .Mock (return_value = {"rows" : rows })
2175
+ mock_client = _mock_client ()
2176
+
2177
+ row_iterator = self ._make_one (
2178
+ client = mock_client ,
2179
+ api_request = api_request ,
2180
+ path = path ,
2181
+ schema = schema ,
2182
+ max_results = 42 ,
2183
+ )
2184
+
2185
+ with warnings .catch_warnings (record = True ) as warned :
2186
+ row_iterator .to_arrow (create_bqstorage_client = True )
2187
+
2188
+ matches = [
2189
+ warning
2190
+ for warning in warned
2191
+ if warning .category is UserWarning
2192
+ and "cannot use bqstorage_client" in str (warning ).lower ()
2193
+ and "REST" in str (warning )
2194
+ ]
2195
+ self .assertFalse (matches )
2142
2196
mock_client ._ensure_bqstorage_client .assert_not_called ()
2143
2197
2144
2198
@unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
@@ -2372,7 +2426,6 @@ def test_to_arrow_w_pyarrow_none(self):
2372
2426
@unittest .skipIf (pandas is None , "Requires `pandas`" )
2373
2427
def test_to_dataframe_iterable (self ):
2374
2428
from google .cloud .bigquery .schema import SchemaField
2375
- import types
2376
2429
2377
2430
schema = [
2378
2431
SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
@@ -2415,7 +2468,6 @@ def test_to_dataframe_iterable(self):
2415
2468
@unittest .skipIf (pandas is None , "Requires `pandas`" )
2416
2469
def test_to_dataframe_iterable_with_dtypes (self ):
2417
2470
from google .cloud .bigquery .schema import SchemaField
2418
- import types
2419
2471
2420
2472
schema = [
2421
2473
SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
@@ -2527,6 +2579,61 @@ def test_to_dataframe_iterable_w_bqstorage(self):
2527
2579
# Don't close the client if it was passed in.
2528
2580
bqstorage_client ._transport .grpc_channel .close .assert_not_called ()
2529
2581
2582
+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
2583
+ @unittest .skipIf (
2584
+ bigquery_storage is None , "Requires `google-cloud-bigquery-storage`"
2585
+ )
2586
+ @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
2587
+ def test_to_dataframe_iterable_w_bqstorage_max_results_warning (self ):
2588
+ from google .cloud .bigquery import schema
2589
+ from google .cloud .bigquery import table as mut
2590
+
2591
+ bqstorage_client = mock .create_autospec (bigquery_storage .BigQueryReadClient )
2592
+
2593
+ iterator_schema = [
2594
+ schema .SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
2595
+ schema .SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
2596
+ ]
2597
+ path = "/foo"
2598
+ api_request = mock .Mock (
2599
+ side_effect = [
2600
+ {
2601
+ "rows" : [{"f" : [{"v" : "Bengt" }, {"v" : "32" }]}],
2602
+ "pageToken" : "NEXTPAGE" ,
2603
+ },
2604
+ {"rows" : [{"f" : [{"v" : "Sven" }, {"v" : "33" }]}]},
2605
+ ]
2606
+ )
2607
+ row_iterator = mut .RowIterator (
2608
+ _mock_client (),
2609
+ api_request ,
2610
+ path ,
2611
+ iterator_schema ,
2612
+ table = mut .TableReference .from_string ("proj.dset.tbl" ),
2613
+ selected_fields = iterator_schema ,
2614
+ max_results = 25 ,
2615
+ )
2616
+
2617
+ with warnings .catch_warnings (record = True ) as warned :
2618
+ dfs = row_iterator .to_dataframe_iterable (bqstorage_client = bqstorage_client )
2619
+
2620
+ # Was a warning emitted?
2621
+ matches = [
2622
+ warning
2623
+ for warning in warned
2624
+ if warning .category is UserWarning
2625
+ and "cannot use bqstorage_client" in str (warning ).lower ()
2626
+ and "REST" in str (warning )
2627
+ ]
2628
+ assert len (matches ) == 1 , "User warning was not emitted."
2629
+ assert __file__ in str (matches [0 ]), "Warning emitted with incorrect stacklevel"
2630
+
2631
+ # Basic check of what we got as a result.
2632
+ dataframes = list (dfs )
2633
+ assert len (dataframes ) == 2
2634
+ assert isinstance (dataframes [0 ], pandas .DataFrame )
2635
+ assert isinstance (dataframes [1 ], pandas .DataFrame )
2636
+
2530
2637
@mock .patch ("google.cloud.bigquery.table.pandas" , new = None )
2531
2638
def test_to_dataframe_iterable_error_if_pandas_is_none (self ):
2532
2639
from google .cloud .bigquery .schema import SchemaField
@@ -2926,7 +3033,7 @@ def test_to_dataframe_max_results_w_bqstorage_warning(self):
2926
3033
self .assertEqual (len (matches ), 1 , msg = "User warning was not emitted." )
2927
3034
2928
3035
@unittest .skipIf (pandas is None , "Requires `pandas`" )
2929
- def test_to_dataframe_max_results_w_create_bqstorage_warning (self ):
3036
+ def test_to_dataframe_max_results_w_explicit_bqstorage_client_warning (self ):
2930
3037
from google .cloud .bigquery .schema import SchemaField
2931
3038
2932
3039
schema = [
@@ -2940,6 +3047,7 @@ def test_to_dataframe_max_results_w_create_bqstorage_warning(self):
2940
3047
path = "/foo"
2941
3048
api_request = mock .Mock (return_value = {"rows" : rows })
2942
3049
mock_client = _mock_client ()
3050
+ mock_bqstorage_client = mock .sentinel .bq_storage_client
2943
3051
2944
3052
row_iterator = self ._make_one (
2945
3053
client = mock_client ,
@@ -2950,7 +3058,7 @@ def test_to_dataframe_max_results_w_create_bqstorage_warning(self):
2950
3058
)
2951
3059
2952
3060
with warnings .catch_warnings (record = True ) as warned :
2953
- row_iterator .to_dataframe (create_bqstorage_client = True )
3061
+ row_iterator .to_dataframe (bqstorage_client = mock_bqstorage_client )
2954
3062
2955
3063
matches = [
2956
3064
warning
@@ -2960,6 +3068,46 @@ def test_to_dataframe_max_results_w_create_bqstorage_warning(self):
2960
3068
and "REST" in str (warning )
2961
3069
]
2962
3070
self .assertEqual (len (matches ), 1 , msg = "User warning was not emitted." )
3071
+ self .assertIn (
3072
+ __file__ , str (matches [0 ]), msg = "Warning emitted with incorrect stacklevel"
3073
+ )
3074
+ mock_client ._ensure_bqstorage_client .assert_not_called ()
3075
+
3076
+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
3077
+ def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning (self ):
3078
+ from google .cloud .bigquery .schema import SchemaField
3079
+
3080
+ schema = [
3081
+ SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
3082
+ SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
3083
+ ]
3084
+ rows = [
3085
+ {"f" : [{"v" : "Phred Phlyntstone" }, {"v" : "32" }]},
3086
+ {"f" : [{"v" : "Bharney Rhubble" }, {"v" : "33" }]},
3087
+ ]
3088
+ path = "/foo"
3089
+ api_request = mock .Mock (return_value = {"rows" : rows })
3090
+ mock_client = _mock_client ()
3091
+
3092
+ row_iterator = self ._make_one (
3093
+ client = mock_client ,
3094
+ api_request = api_request ,
3095
+ path = path ,
3096
+ schema = schema ,
3097
+ max_results = 42 ,
3098
+ )
3099
+
3100
+ with warnings .catch_warnings (record = True ) as warned :
3101
+ row_iterator .to_dataframe (create_bqstorage_client = True )
3102
+
3103
+ matches = [
3104
+ warning
3105
+ for warning in warned
3106
+ if warning .category is UserWarning
3107
+ and "cannot use bqstorage_client" in str (warning ).lower ()
3108
+ and "REST" in str (warning )
3109
+ ]
3110
+ self .assertFalse (matches )
2963
3111
mock_client ._ensure_bqstorage_client .assert_not_called ()
2964
3112
2965
3113
@unittest .skipIf (pandas is None , "Requires `pandas`" )
0 commit comments