Skip to content

Commit

Permalink
BigQuery: Updates Table constructor to use TableReference as parameter
Browse files Browse the repository at this point in the history
  • Loading branch information
alixhami committed Sep 18, 2017
1 parent fd4890a commit 69e7eea
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 18 deletions.
3 changes: 2 additions & 1 deletion bigquery/google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,8 @@ def table(self, name, schema=()):
:rtype: :class:`google.cloud.bigquery.table.Table`
:returns: a new ``Table`` instance
"""
return Table(name, dataset=self, schema=schema)
table_ref = TableReference(self, name)
return Table(table_ref, schema=schema, client=self._client)


def _item_to_table(iterator, resource):
Expand Down
13 changes: 9 additions & 4 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.table import _build_schema_resource
from google.cloud.bigquery.table import _parse_schema_resource
from google.cloud.bigquery._helpers import ArrayQueryParameter
Expand Down Expand Up @@ -837,7 +838,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
table_ref = TableReference(dataset, dest_config['tableId'])
destination = Table(table_ref, client=client)
source_urls = config.get('sourceUris', ())
job = cls(job_id, destination, source_urls, client=client)
job._set_properties(resource)
Expand Down Expand Up @@ -952,7 +954,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
table_ref = TableReference(dataset, dest_config['tableId'])
destination = Table(table_ref, client=client)
sources = []
source_configs = config.get('sourceTables')
if source_configs is None:
Expand All @@ -963,7 +966,8 @@ def from_api_repr(cls, resource, client):
source_configs = [single]
for source_config in source_configs:
dataset = Dataset(source_config['datasetId'], client)
sources.append(Table(source_config['tableId'], dataset))
table_ref = TableReference(dataset, source_config['tableId'])
sources.append(Table(table_ref, client=client))
job = cls(job_id, destination, sources, client=client)
job._set_properties(resource)
return job
Expand Down Expand Up @@ -1109,7 +1113,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
source_config = config['sourceTable']
dataset = Dataset(source_config['datasetId'], client)
source = Table(source_config['tableId'], dataset)
table_ref = TableReference(dataset, source_config['tableId'])
source = Table(table_ref, client=client)
destination_uris = config['destinationUris']
job = cls(job_id, source, destination_uris, client=client)
job._set_properties(resource)
Expand Down
6 changes: 3 additions & 3 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def project(self):
:rtype: str
:returns: the project (derived from the dataset).
"""
return self._dataset.project_id
return self._dataset.project

@property
def dataset_id(self):
Expand Down Expand Up @@ -538,7 +538,7 @@ def _build_resource(self):
"""Generate a resource for ``create`` or ``update``."""
resource = {
'tableReference': {
'projectId': self._dataset.project_id,
'projectId': self._dataset.project,
'datasetId': self._dataset.dataset_id,
'tableId': self.table_id},
}
Expand Down Expand Up @@ -584,7 +584,7 @@ def create(self, client=None):
"""
client = self._require_client(client)
path = '/projects/%s/datasets/%s/tables' % (
self._dataset.project_id, self._dataset.dataset_id)
self._dataset.project, self._dataset.dataset_id)
api_response = client._connection.api_request(
method='POST', path=path, data=self._build_resource())
self._set_properties(api_response)
Expand Down
14 changes: 9 additions & 5 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -2169,6 +2169,7 @@ def test_begin_w_bound_client(self):

def test_begin_w_alternate_client(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.dataset import Table

PATH = '/projects/%s/jobs' % (self.PROJECT,)
Expand Down Expand Up @@ -2203,8 +2204,10 @@ def test_begin_w_alternate_client(self):
client2 = _Client(project=self.PROJECT, connection=conn2)
job = self._make_one(self.JOB_NAME, self.QUERY, client1)

dataset_ref = DatasetReference(self.PROJECT, DS_ID)
dataset = Dataset(DS_ID, client1)
table = Table(TABLE, dataset)
table_ref = dataset_ref.table(TABLE)
table = Table(table_ref, client=client1)

job.allow_large_results = True
job.create_disposition = 'CREATE_NEVER'
Expand Down Expand Up @@ -2460,8 +2463,8 @@ def test_exists_hit_w_alternate_client(self):
self.assertEqual(req['query_params'], {'fields': 'id'})

def test_reload_w_bound_client(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import Table
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.table import Table

PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)
DS_ID = 'DATASET'
Expand All @@ -2471,8 +2474,9 @@ def test_reload_w_bound_client(self):
client = _Client(project=self.PROJECT, connection=conn)
job = self._make_one(self.JOB_NAME, None, client)

dataset = Dataset(DS_ID, client)
table = Table(DEST_TABLE, dataset)
dataset_ref = DatasetReference(self.PROJECT, DS_ID)
table_ref = dataset_ref.table(DEST_TABLE)
table = Table(table_ref, client=client)
job.destination = table

job.reload()
Expand Down
9 changes: 4 additions & 5 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from six.moves import http_client
import pytest

from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.dataset import DatasetReference


Expand Down Expand Up @@ -436,8 +435,6 @@ def test_view_use_legacy_sql_setter(self):
def test_from_api_repr_missing_identity(self):
self._setUpConstants()
client = _Client(self.PROJECT)
dataset = DatasetReference(self.PROJECT, self.DS_ID)
table_ref = dataset.table(self.TABLE_NAME)
RESOURCE = {}
klass = self._get_target_class()
with self.assertRaises(KeyError):
Expand Down Expand Up @@ -1280,7 +1277,8 @@ def test_fetch_data_w_alternate_client(self):
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
voter = SchemaField('voter', 'BOOLEAN', mode='NULLABLE')
score = SchemaField('score', 'FLOAT', mode='NULLABLE')
table = self._make_one(table_ref, schema=[full_name, age, voter, score],
table = self._make_one(table_ref,
schema=[full_name, age, voter, score],
client=client1)

iterator = table.fetch_data(
Expand Down Expand Up @@ -1452,7 +1450,8 @@ def test_row_from_mapping_w_invalid_schema(self):
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
colors = SchemaField('colors', 'DATETIME', mode='REPEATED')
bogus = SchemaField('joined', 'STRING', mode='BOGUS')
table = self._make_one(table_ref, schema=[full_name, age, colors, bogus],
table = self._make_one(table_ref,
schema=[full_name, age, colors, bogus],
client=client)

with self.assertRaises(ValueError) as exc:
Expand Down

0 comments on commit 69e7eea

Please sign in to comment.