Skip to content

Commit

Permalink
Add bigquery jobid to table (googleapis#3605)
Browse files Browse the repository at this point in the history
  • Loading branch information
evaogbe authored and landrito committed Aug 22, 2017
1 parent e01a5df commit be83bce
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 3 deletions.
15 changes: 12 additions & 3 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,7 +842,8 @@ def upload_from_file(self,
quote_character=None,
skip_leading_rows=None,
write_disposition=None,
client=None):
client=None,
job_name=None):
"""Upload the contents of this table from a file-like object.
The content type of the upload will either be
Expand Down Expand Up @@ -915,6 +916,10 @@ def upload_from_file(self,
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current dataset.
:type job_name: str
:param job_name: Optional. The id of the job. Generated if not
explicitly passed in.
:rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
:returns: the job instance used to load the data (e.g., for
querying status). Note that the job is already started:
Expand Down Expand Up @@ -977,7 +982,7 @@ def upload_from_file(self,
encoding, field_delimiter,
ignore_unknown_values, max_bad_records,
quote_character, skip_leading_rows,
write_disposition)
write_disposition, job_name)

upload = Upload(file_obj, content_type, total_bytes,
auto_transfer=False)
Expand Down Expand Up @@ -1033,7 +1038,8 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments
max_bad_records,
quote_character,
skip_leading_rows,
write_disposition):
write_disposition,
job_name):
"""Helper for :meth:`Table.upload_from_file`."""
load_config = metadata['configuration']['load']

Expand Down Expand Up @@ -1067,6 +1073,9 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments
if write_disposition is not None:
load_config['writeDisposition'] = write_disposition

if job_name is not None:
load_config['jobReference'] = {'jobId': job_name}


def _parse_schema_resource(info):
"""Parse a resource fragment into a schema field.
Expand Down
16 changes: 16 additions & 0 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1844,6 +1844,22 @@ class _UploadConfig(object):
self.assertEqual(req['body'], BODY)
# pylint: enable=too-many-statements

def test_upload_from_file_w_jobid(self):
import json
from google.cloud._helpers import _to_bytes

requested, PATH, BODY = self._upload_from_file_helper(job_name='foo')
parse_chunk = _email_chunk_parser()
req = requested[0]
ctype, boundary = [x.strip()
for x in req['headers']['content-type'].split(';')]
divider = b'--' + _to_bytes(boundary[len('boundary="'):-1])
chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog
text_msg = parse_chunk(chunks[0].strip())
metadata = json.loads(text_msg._payload)
load_config = metadata['configuration']['load']
self.assertEqual(load_config['jobReference'], {'jobId': 'foo'})


class Test_parse_schema_resource(unittest.TestCase, _SchemaBase):

Expand Down

0 comments on commit be83bce

Please sign in to comment.