From 2a39602a3249b24ee6337b36ad85b6b933d6a0e3 Mon Sep 17 00:00:00 2001 From: Eva Ogbe Date: Wed, 12 Jul 2017 14:36:13 -0700 Subject: [PATCH] Add bigquery jobid to table --- bigquery/google/cloud/bigquery/table.py | 15 ++++++++++++--- bigquery/tests/unit/test_table.py | 16 ++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 37dc1159cc8e..d28c31227424 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -842,7 +842,8 @@ def upload_from_file(self, quote_character=None, skip_leading_rows=None, write_disposition=None, - client=None): + client=None, + job_name=None): """Upload the contents of this table from a file-like object. The content type of the upload will either be @@ -915,6 +916,10 @@ def upload_from_file(self, :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current dataset. + :type job_name: str + :param job_name: Optional. The id of the job. Generated if not + explicitly passed in. + :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob` :returns: the job instance used to load the data (e.g., for querying status). Note that the job is already started: @@ -977,7 +982,7 @@ def upload_from_file(self, encoding, field_delimiter, ignore_unknown_values, max_bad_records, quote_character, skip_leading_rows, - write_disposition) + write_disposition, job_name) upload = Upload(file_obj, content_type, total_bytes, auto_transfer=False) @@ -1033,7 +1038,8 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments max_bad_records, quote_character, skip_leading_rows, - write_disposition): + write_disposition, + job_name): """Helper for :meth:`Table.upload_from_file`.""" load_config = metadata['configuration']['load'] @@ -1067,6 +1073,9 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments if write_disposition is not None: load_config['writeDisposition'] = write_disposition + if job_name is not None: + load_config['jobReference'] = {'jobId': job_name} + def _parse_schema_resource(info): """Parse a resource fragment into a schema field. diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index b27736fb896e..f535e8799628 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -1844,6 +1844,22 @@ class _UploadConfig(object): self.assertEqual(req['body'], BODY) # pylint: enable=too-many-statements + def test_upload_from_file_w_jobid(self): + import json + from google.cloud._helpers import _to_bytes + + requested, PATH, BODY = self._upload_from_file_helper(job_name='foo') + parse_chunk = _email_chunk_parser() + req = requested[0] + ctype, boundary = [x.strip() + for x in req['headers']['content-type'].split(';')] + divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) + chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog + text_msg = parse_chunk(chunks[0].strip()) + metadata = json.loads(text_msg._payload) + load_config = metadata['configuration']['load'] + self.assertEqual(load_config['jobReference'], {'jobId': 'foo'}) + class Test_parse_schema_resource(unittest.TestCase, _SchemaBase):