@@ -279,16 +279,14 @@ def test_load_table_from_dataframe_w_required(bigquery_client, dataset_id):
279
279
280
280
def test_load_table_from_dataframe_w_explicit_schema (bigquery_client , dataset_id ):
281
281
# Schema with all scalar types.
282
- # TODO: Uploading DATETIME columns currently fails, thus that field type
283
- # is temporarily removed from the test.
284
282
# See:
285
283
# https://github.com/googleapis/python-bigquery/issues/61
286
284
# https://issuetracker.google.com/issues/151765076
287
285
scalars_schema = (
288
286
bigquery .SchemaField ("bool_col" , "BOOLEAN" ),
289
287
bigquery .SchemaField ("bytes_col" , "BYTES" ),
290
288
bigquery .SchemaField ("date_col" , "DATE" ),
291
- # bigquery.SchemaField("dt_col", "DATETIME"),
289
+ bigquery .SchemaField ("dt_col" , "DATETIME" ),
292
290
bigquery .SchemaField ("float_col" , "FLOAT" ),
293
291
bigquery .SchemaField ("geo_col" , "GEOGRAPHY" ),
294
292
bigquery .SchemaField ("int_col" , "INTEGER" ),
@@ -313,14 +311,14 @@ def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id
313
311
("bool_col" , [True , None , False ]),
314
312
("bytes_col" , [b"abc" , None , b"def" ]),
315
313
("date_col" , [datetime .date (1 , 1 , 1 ), None , datetime .date (9999 , 12 , 31 )]),
316
- # (
317
- # "dt_col",
318
- # [
319
- # datetime.datetime(1, 1, 1, 0, 0, 0),
320
- # None,
321
- # datetime.datetime(9999, 12, 31, 23, 59, 59, 999999),
322
- # ],
323
- # ),
314
+ (
315
+ "dt_col" ,
316
+ [
317
+ datetime .datetime (1 , 1 , 1 , 0 , 0 , 0 ),
318
+ None ,
319
+ datetime .datetime (9999 , 12 , 31 , 23 , 59 , 59 , 999999 ),
320
+ ],
321
+ ),
324
322
("float_col" , [float ("-inf" ), float ("nan" ), float ("inf" )]),
325
323
(
326
324
"geo_col" ,
@@ -800,6 +798,50 @@ def test_list_rows_max_results_w_bqstorage(bigquery_client):
800
798
assert len (dataframe .index ) == 100
801
799
802
800
801
+ def test_upload_time_and_datetime_56 (bigquery_client , dataset_id ):
802
+ df = pandas .DataFrame (
803
+ dict (
804
+ dt = [
805
+ datetime .datetime (2020 , 1 , 8 , 8 , 0 , 0 ),
806
+ datetime .datetime (
807
+ 2020 ,
808
+ 1 ,
809
+ 8 ,
810
+ 8 ,
811
+ 0 ,
812
+ 0 ,
813
+ tzinfo = datetime .timezone (datetime .timedelta (hours = - 7 )),
814
+ ),
815
+ ],
816
+ t = [datetime .time (0 , 0 , 10 , 100001 ), None ],
817
+ )
818
+ )
819
+ table = f"{ dataset_id } .test_upload_time_and_datetime"
820
+ bigquery_client .load_table_from_dataframe (df , table ).result ()
821
+ data = list (map (list , bigquery_client .list_rows (table )))
822
+ assert data == [
823
+ [
824
+ datetime .datetime (2020 , 1 , 8 , 8 , 0 , tzinfo = datetime .timezone .utc ),
825
+ datetime .time (0 , 0 , 10 , 100001 ),
826
+ ],
827
+ [datetime .datetime (2020 , 1 , 8 , 15 , 0 , tzinfo = datetime .timezone .utc ), None ],
828
+ ]
829
+
830
+ from google .cloud .bigquery import job , schema
831
+
832
+ table = f"{ dataset_id } .test_upload_time_and_datetime_dt"
833
+ config = job .LoadJobConfig (
834
+ schema = [schema .SchemaField ("dt" , "DATETIME" ), schema .SchemaField ("t" , "TIME" )]
835
+ )
836
+
837
+ bigquery_client .load_table_from_dataframe (df , table , job_config = config ).result ()
838
+ data = list (map (list , bigquery_client .list_rows (table )))
839
+ assert data == [
840
+ [datetime .datetime (2020 , 1 , 8 , 8 , 0 ), datetime .time (0 , 0 , 10 , 100001 )],
841
+ [datetime .datetime (2020 , 1 , 8 , 15 , 0 ), None ],
842
+ ]
843
+
844
+
803
845
def test_to_dataframe_geography_as_objects (bigquery_client , dataset_id ):
804
846
wkt = pytest .importorskip ("shapely.wkt" )
805
847
bigquery_client .query (
0 commit comments