18
18
if typing .TYPE_CHECKING : # pragma: NO COVER
19
19
import pandas
20
20
21
- # Required dependencies, but treat as optional so that _test_google_api_imports
22
- # can provide a better error message.
23
- try :
24
- from google .api_core import exceptions as google_exceptions
25
- from google .cloud import bigquery
26
- except ImportError : # pragma: NO COVER
27
- bigquery = None
28
- google_exceptions = None
29
-
30
21
from pandas_gbq .exceptions import (
31
22
AccessDenied ,
32
23
GenericGBQException ,
33
- PerformanceWarning ,
34
24
)
35
- from pandas_gbq import features
36
25
from pandas_gbq .features import FEATURES
37
26
import pandas_gbq .schema
38
27
import pandas_gbq .timestamp
48
37
def _test_google_api_imports ():
49
38
try :
50
39
import pkg_resources # noqa
51
- except ImportError as ex :
40
+ except ImportError as ex : # pragma: NO COVER
52
41
raise ImportError ("pandas-gbq requires setuptools" ) from ex
53
42
54
43
try :
55
44
import db_dtypes # noqa
56
- except ImportError as ex :
45
+ except ImportError as ex : # pragma: NO COVER
57
46
raise ImportError ("pandas-gbq requires db-dtypes" ) from ex
58
47
59
48
try :
60
49
import pydata_google_auth # noqa
61
- except ImportError as ex :
50
+ except ImportError as ex : # pragma: NO COVER
62
51
raise ImportError ("pandas-gbq requires pydata-google-auth" ) from ex
63
52
64
53
try :
65
54
from google_auth_oauthlib .flow import InstalledAppFlow # noqa
66
- except ImportError as ex :
55
+ except ImportError as ex : # pragma: NO COVER
67
56
raise ImportError ("pandas-gbq requires google-auth-oauthlib" ) from ex
68
57
69
58
try :
70
59
import google .auth # noqa
71
- except ImportError as ex :
60
+ except ImportError as ex : # pragma: NO COVER
72
61
raise ImportError ("pandas-gbq requires google-auth" ) from ex
73
62
74
63
try :
75
64
from google .cloud import bigquery # noqa
76
- except ImportError as ex :
65
+ except ImportError as ex : # pragma: NO COVER
77
66
raise ImportError ("pandas-gbq requires google-cloud-bigquery" ) from ex
78
67
79
68
@@ -372,23 +361,17 @@ def sizeof_fmt(num, suffix="B"):
372
361
373
362
def get_client (self ):
374
363
import google .api_core .client_info
364
+ from google .cloud import bigquery
375
365
import pandas
376
366
377
367
client_info = google .api_core .client_info .ClientInfo (
378
368
user_agent = "pandas-{}" .format (pandas .__version__ )
379
369
)
380
-
381
- # In addition to new enough version of google-api-core, a new enough
382
- # version of google-cloud-bigquery is required to populate the
383
- # client_info.
384
- if FEATURES .bigquery_has_client_info :
385
- return bigquery .Client (
386
- project = self .project_id ,
387
- credentials = self .credentials ,
388
- client_info = client_info ,
389
- )
390
-
391
- return bigquery .Client (project = self .project_id , credentials = self .credentials )
370
+ return bigquery .Client (
371
+ project = self .project_id ,
372
+ credentials = self .credentials ,
373
+ client_info = client_info ,
374
+ )
392
375
393
376
@staticmethod
394
377
def process_http_error (ex ):
@@ -404,6 +387,8 @@ def download_table(
404
387
progress_bar_type : Optional [str ] = None ,
405
388
dtypes : Optional [Dict [str , Union [str , Any ]]] = None ,
406
389
) -> "pandas.DataFrame" :
390
+ from google .cloud import bigquery
391
+
407
392
self ._start_timer ()
408
393
409
394
try :
@@ -424,6 +409,7 @@ def download_table(
424
409
def run_query (self , query , max_results = None , progress_bar_type = None , ** kwargs ):
425
410
from concurrent .futures import TimeoutError
426
411
from google .auth .exceptions import RefreshError
412
+ from google .cloud import bigquery
427
413
428
414
job_config = {
429
415
"query" : {
@@ -529,27 +515,11 @@ def _download_results(
529
515
if user_dtypes is None :
530
516
user_dtypes = {}
531
517
532
- if self .use_bqstorage_api and not FEATURES .bigquery_has_bqstorage :
533
- warnings .warn (
534
- (
535
- "use_bqstorage_api was set, but have google-cloud-bigquery "
536
- "version {}. Requires google-cloud-bigquery version "
537
- "{} or later."
538
- ).format (
539
- FEATURES .bigquery_installed_version ,
540
- features .BIGQUERY_BQSTORAGE_VERSION ,
541
- ),
542
- PerformanceWarning ,
543
- stacklevel = 4 ,
544
- )
545
-
546
518
create_bqstorage_client = self .use_bqstorage_api
547
519
if max_results is not None :
548
520
create_bqstorage_client = False
549
521
550
522
to_dataframe_kwargs = {}
551
- if FEATURES .bigquery_has_bqstorage :
552
- to_dataframe_kwargs ["create_bqstorage_client" ] = create_bqstorage_client
553
523
if FEATURES .bigquery_needs_date_as_object :
554
524
to_dataframe_kwargs ["date_as_object" ] = True
555
525
@@ -560,6 +530,7 @@ def _download_results(
560
530
df = rows_iter .to_dataframe (
561
531
dtypes = conversion_dtypes ,
562
532
progress_bar_type = progress_bar_type ,
533
+ create_bqstorage_client = create_bqstorage_client ,
563
534
** to_dataframe_kwargs ,
564
535
)
565
536
except self .http_error as ex :
@@ -1051,6 +1022,9 @@ def to_gbq(
1051
1022
1052
1023
_test_google_api_imports ()
1053
1024
1025
+ from google .api_core import exceptions as google_exceptions
1026
+ from google .cloud import bigquery
1027
+
1054
1028
if verbose is not None and FEATURES .pandas_has_deprecated_verbose :
1055
1029
warnings .warn (
1056
1030
"verbose is deprecated and will be removed in "
0 commit comments