Skip to content

Commit aa4876e

Browse files
author
Jim Fulton
authored
test: Add test of datetime and time pandas load (#895)
1 parent 16f65e6 commit aa4876e

File tree

1 file changed

+53
-11
lines changed

1 file changed

+53
-11
lines changed

tests/system/test_pandas.py

Lines changed: 53 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -279,16 +279,14 @@ def test_load_table_from_dataframe_w_required(bigquery_client, dataset_id):
279279

280280
def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id):
281281
# Schema with all scalar types.
282-
# TODO: Uploading DATETIME columns currently fails, thus that field type
283-
# is temporarily removed from the test.
284282
# See:
285283
# https://github.com/googleapis/python-bigquery/issues/61
286284
# https://issuetracker.google.com/issues/151765076
287285
scalars_schema = (
288286
bigquery.SchemaField("bool_col", "BOOLEAN"),
289287
bigquery.SchemaField("bytes_col", "BYTES"),
290288
bigquery.SchemaField("date_col", "DATE"),
291-
# bigquery.SchemaField("dt_col", "DATETIME"),
289+
bigquery.SchemaField("dt_col", "DATETIME"),
292290
bigquery.SchemaField("float_col", "FLOAT"),
293291
bigquery.SchemaField("geo_col", "GEOGRAPHY"),
294292
bigquery.SchemaField("int_col", "INTEGER"),
@@ -313,14 +311,14 @@ def test_load_table_from_dataframe_w_explicit_schema(bigquery_client, dataset_id
313311
("bool_col", [True, None, False]),
314312
("bytes_col", [b"abc", None, b"def"]),
315313
("date_col", [datetime.date(1, 1, 1), None, datetime.date(9999, 12, 31)]),
316-
# (
317-
# "dt_col",
318-
# [
319-
# datetime.datetime(1, 1, 1, 0, 0, 0),
320-
# None,
321-
# datetime.datetime(9999, 12, 31, 23, 59, 59, 999999),
322-
# ],
323-
# ),
314+
(
315+
"dt_col",
316+
[
317+
datetime.datetime(1, 1, 1, 0, 0, 0),
318+
None,
319+
datetime.datetime(9999, 12, 31, 23, 59, 59, 999999),
320+
],
321+
),
324322
("float_col", [float("-inf"), float("nan"), float("inf")]),
325323
(
326324
"geo_col",
@@ -800,6 +798,50 @@ def test_list_rows_max_results_w_bqstorage(bigquery_client):
800798
assert len(dataframe.index) == 100
801799

802800

801+
def test_upload_time_and_datetime_56(bigquery_client, dataset_id):
802+
df = pandas.DataFrame(
803+
dict(
804+
dt=[
805+
datetime.datetime(2020, 1, 8, 8, 0, 0),
806+
datetime.datetime(
807+
2020,
808+
1,
809+
8,
810+
8,
811+
0,
812+
0,
813+
tzinfo=datetime.timezone(datetime.timedelta(hours=-7)),
814+
),
815+
],
816+
t=[datetime.time(0, 0, 10, 100001), None],
817+
)
818+
)
819+
table = f"{dataset_id}.test_upload_time_and_datetime"
820+
bigquery_client.load_table_from_dataframe(df, table).result()
821+
data = list(map(list, bigquery_client.list_rows(table)))
822+
assert data == [
823+
[
824+
datetime.datetime(2020, 1, 8, 8, 0, tzinfo=datetime.timezone.utc),
825+
datetime.time(0, 0, 10, 100001),
826+
],
827+
[datetime.datetime(2020, 1, 8, 15, 0, tzinfo=datetime.timezone.utc), None],
828+
]
829+
830+
from google.cloud.bigquery import job, schema
831+
832+
table = f"{dataset_id}.test_upload_time_and_datetime_dt"
833+
config = job.LoadJobConfig(
834+
schema=[schema.SchemaField("dt", "DATETIME"), schema.SchemaField("t", "TIME")]
835+
)
836+
837+
bigquery_client.load_table_from_dataframe(df, table, job_config=config).result()
838+
data = list(map(list, bigquery_client.list_rows(table)))
839+
assert data == [
840+
[datetime.datetime(2020, 1, 8, 8, 0), datetime.time(0, 0, 10, 100001)],
841+
[datetime.datetime(2020, 1, 8, 15, 0), None],
842+
]
843+
844+
803845
def test_to_dataframe_geography_as_objects(bigquery_client, dataset_id):
804846
wkt = pytest.importorskip("shapely.wkt")
805847
bigquery_client.query(

0 commit comments

Comments
 (0)