From 37a6b23458df8090421f6e7283000c15bb85bd69 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Tue, 19 Sep 2017 09:59:05 -0700 Subject: [PATCH] BigQuery: Updates Table constructor to use TableReference as parameter (#3997) * wip update Table contructor * BigQuery: Updates Table constructor to use TableReference as parameter * fixes circular import error with Python 2.7 --- bigquery/google/cloud/bigquery/client.py | 2 + bigquery/google/cloud/bigquery/dataset.py | 6 +- bigquery/google/cloud/bigquery/job.py | 13 +- bigquery/google/cloud/bigquery/table.py | 28 +- bigquery/tests/unit/test_job.py | 14 +- bigquery/tests/unit/test_table.py | 426 ++++++++++++---------- 6 files changed, 282 insertions(+), 207 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index f17f43deaf5c..1b9e9a522a15 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -14,6 +14,8 @@ """Client for interacting with the Google BigQuery API.""" +from __future__ import absolute_import + from google.api.core import page_iterator from google.cloud.client import ClientWithProject from google.cloud.bigquery._http import Connection diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index dcb52d20eacb..29dc3af19458 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -13,6 +13,9 @@ # limitations under the License. """Define API Datasets.""" + +from __future__ import absolute_import + import six from google.api.core import page_iterator @@ -598,7 +601,8 @@ def table(self, name, schema=()): :rtype: :class:`google.cloud.bigquery.table.Table` :returns: a new ``Table`` instance """ - return Table(name, dataset=self, schema=schema) + table_ref = TableReference(self, name) + return Table(table_ref, schema=schema, client=self._client) def _item_to_table(iterator, resource): diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 76a7d476cf6b..5807fcd25e0b 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -26,6 +26,7 @@ from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table +from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.table import _build_schema_resource from google.cloud.bigquery.table import _parse_schema_resource from google.cloud.bigquery._helpers import ArrayQueryParameter @@ -837,7 +838,8 @@ def from_api_repr(cls, resource, client): job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] dataset = Dataset(dest_config['datasetId'], client) - destination = Table(dest_config['tableId'], dataset) + table_ref = TableReference(dataset, dest_config['tableId']) + destination = Table(table_ref, client=client) source_urls = config.get('sourceUris', ()) job = cls(job_id, destination, source_urls, client=client) job._set_properties(resource) @@ -952,7 +954,8 @@ def from_api_repr(cls, resource, client): job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] dataset = Dataset(dest_config['datasetId'], client) - destination = Table(dest_config['tableId'], dataset) + table_ref = TableReference(dataset, dest_config['tableId']) + destination = Table(table_ref, client=client) sources = [] source_configs = config.get('sourceTables') if source_configs is None: @@ -963,7 +966,8 @@ def from_api_repr(cls, resource, client): source_configs = [single] for source_config in source_configs: dataset = Dataset(source_config['datasetId'], client) - sources.append(Table(source_config['tableId'], dataset)) + table_ref = TableReference(dataset, source_config['tableId']) + sources.append(Table(table_ref, client=client)) job = cls(job_id, destination, sources, client=client) job._set_properties(resource) return job @@ -1109,7 +1113,8 @@ def from_api_repr(cls, resource, client): job_id, config = cls._get_resource_config(resource) source_config = config['sourceTable'] dataset = Dataset(source_config['datasetId'], client) - source = Table(source_config['tableId'], dataset) + table_ref = TableReference(dataset, source_config['tableId']) + source = Table(table_ref, client=client) destination_uris = config['destinationUris'] job = cls(job_id, source, destination_uris, client=client) job._set_properties(resource) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index f9c07b1e8ee6..fe1a9d3b4ec5 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -14,6 +14,8 @@ """Define API Datasets.""" +from __future__ import absolute_import + import datetime import os @@ -90,11 +92,8 @@ class Table(object): See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables - :type table_id: str - :param table_id: the ID of the table - - :type dataset: :class:`google.cloud.bigquery.dataset.Dataset` - :param dataset: The dataset which contains the table. + :type table_ref: :class:`google.cloud.bigquery.table.TableReference` + :param table_ref: a pointer to a table :type schema: list of :class:`SchemaField` :param schema: The table's schema @@ -102,12 +101,13 @@ class Table(object): _schema = None - def __init__(self, table_id, dataset, schema=()): - self._table_id = table_id - self._dataset = dataset + def __init__(self, table_ref, schema=(), client=None): + self._table_id = table_ref.table_id + self._dataset = table_ref.dataset self._properties = {} # Let the @property do validation. self.schema = schema + self._client = client @property def project(self): @@ -477,7 +477,7 @@ def list_partitions(self, client=None): return [row[0] for row in query.rows] @classmethod - def from_api_repr(cls, resource, dataset): + def from_api_repr(cls, resource, client): """Factory: construct a table given its API representation :type resource: dict @@ -489,12 +489,18 @@ def from_api_repr(cls, resource, dataset): :rtype: :class:`google.cloud.bigquery.table.Table` :returns: Table parsed from ``resource``. """ + from google.cloud.bigquery import dataset + if ('tableReference' not in resource or 'tableId' not in resource['tableReference']): raise KeyError('Resource lacks required identity information:' '["tableReference"]["tableId"]') + project_id = resource['tableReference']['projectId'] table_id = resource['tableReference']['tableId'] - table = cls(table_id, dataset=dataset) + dataset_id = resource['tableReference']['datasetId'] + dataset_ref = dataset.DatasetReference(project_id, dataset_id) + + table = cls(dataset_ref.table(table_id), client=client) table._set_properties(resource) return table @@ -510,7 +516,7 @@ def _require_client(self, client): :returns: The client passed in or the currently bound client. """ if client is None: - client = self._dataset._client + client = self._client return client def _set_properties(self, api_response): diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 7c9a84f4503a..11f4dec9870c 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -2169,6 +2169,7 @@ def test_begin_w_bound_client(self): def test_begin_w_alternate_client(self): from google.cloud.bigquery.dataset import Dataset + from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.dataset import Table PATH = '/projects/%s/jobs' % (self.PROJECT,) @@ -2203,8 +2204,10 @@ def test_begin_w_alternate_client(self): client2 = _Client(project=self.PROJECT, connection=conn2) job = self._make_one(self.JOB_NAME, self.QUERY, client1) + dataset_ref = DatasetReference(self.PROJECT, DS_ID) dataset = Dataset(DS_ID, client1) - table = Table(TABLE, dataset) + table_ref = dataset_ref.table(TABLE) + table = Table(table_ref, client=client1) job.allow_large_results = True job.create_disposition = 'CREATE_NEVER' @@ -2460,8 +2463,8 @@ def test_exists_hit_w_alternate_client(self): self.assertEqual(req['query_params'], {'fields': 'id'}) def test_reload_w_bound_client(self): - from google.cloud.bigquery.dataset import Dataset - from google.cloud.bigquery.dataset import Table + from google.cloud.bigquery.dataset import DatasetReference + from google.cloud.bigquery.table import Table PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) DS_ID = 'DATASET' @@ -2471,8 +2474,9 @@ def test_reload_w_bound_client(self): client = _Client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_NAME, None, client) - dataset = Dataset(DS_ID, client) - table = Table(DEST_TABLE, dataset) + dataset_ref = DatasetReference(self.PROJECT, DS_ID) + table_ref = dataset_ref.table(DEST_TABLE) + table = Table(table_ref, client=client) job.destination = table job.reload() diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index f076c6d39938..7cc7bffe7080 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -21,6 +21,8 @@ from six.moves import http_client import pytest +from google.cloud.bigquery.dataset import DatasetReference + class _SchemaBase(object): @@ -166,8 +168,10 @@ def _verifyResourceProperties(self, table, resource): def test_ctor(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) + self.assertEqual(table.table_id, self.TABLE_NAME) self.assertIs(table._dataset, dataset) self.assertEqual(table.project, self.PROJECT) @@ -198,17 +202,20 @@ def test_ctor_w_schema(self): from google.cloud.bigquery.table import SchemaField client = _Client(self.PROJECT) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) + self.assertEqual(table.schema, [full_name, age]) def test_num_bytes_getter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) # Check with no value set. self.assertIsNone(table.num_bytes) @@ -229,8 +236,9 @@ def test_num_bytes_getter(self): def test_num_rows_getter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) # Check with no value set. self.assertIsNone(table.num_rows) @@ -251,8 +259,9 @@ def test_num_rows_getter(self): def test_schema_setter_non_list(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(TypeError): table.schema = object() @@ -260,8 +269,9 @@ def test_schema_setter_invalid_field(self): from google.cloud.bigquery.table import SchemaField client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') with self.assertRaises(ValueError): table.schema = [full_name, object()] @@ -270,8 +280,9 @@ def test_schema_setter(self): from google.cloud.bigquery.table import SchemaField client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') table.schema = [full_name, age] @@ -289,8 +300,9 @@ def test_props_set_by_server(self): URL = 'http://example.com/projects/%s/datasets/%s/tables/%s' % ( self.PROJECT, self.DS_ID, self.TABLE_NAME) client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table._properties['creationTime'] = _millis(CREATED) table._properties['etag'] = 'ETAG' table._properties['lastModifiedTime'] = _millis(MODIFIED) @@ -311,22 +323,25 @@ def test_props_set_by_server(self): def test_description_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.description = 12345 def test_description_setter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.description = 'DESCRIPTION' self.assertEqual(table.description, 'DESCRIPTION') def test_expires_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.expires = object() @@ -336,72 +351,82 @@ def test_expires_setter(self): WHEN = datetime.datetime(2015, 7, 28, 16, 39, tzinfo=UTC) client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.expires = WHEN self.assertEqual(table.expires, WHEN) def test_friendly_name_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.friendly_name = 12345 def test_friendly_name_setter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.friendly_name = 'FRIENDLY' self.assertEqual(table.friendly_name, 'FRIENDLY') def test_location_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.location = 12345 def test_location_setter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.location = 'LOCATION' self.assertEqual(table.location, 'LOCATION') def test_view_query_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.view_query = 12345 def test_view_query_setter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.view_query = 'select * from foo' self.assertEqual(table.view_query, 'select * from foo') def test_view_query_deleter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.view_query = 'select * from foo' del table.view_query self.assertIsNone(table.view_query) def test_view_use_legacy_sql_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.view_use_legacy_sql = 12345 def test_view_use_legacy_sql_setter(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.view_use_legacy_sql = False table.view_query = 'select * from foo' self.assertEqual(table.view_use_legacy_sql, False) @@ -410,16 +435,14 @@ def test_view_use_legacy_sql_setter(self): def test_from_api_repr_missing_identity(self): self._setUpConstants() client = _Client(self.PROJECT) - dataset = _Dataset(client) RESOURCE = {} klass = self._get_target_class() with self.assertRaises(KeyError): - klass.from_api_repr(RESOURCE, dataset) + klass.from_api_repr(RESOURCE, client) def test_from_api_repr_bare(self): self._setUpConstants() client = _Client(self.PROJECT) - dataset = _Dataset(client) RESOURCE = { 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_NAME), 'tableReference': { @@ -430,18 +453,17 @@ def test_from_api_repr_bare(self): 'type': 'TABLE', } klass = self._get_target_class() - table = klass.from_api_repr(RESOURCE, dataset) + table = klass.from_api_repr(RESOURCE, client) self.assertEqual(table.table_id, self.TABLE_NAME) - self.assertIs(table._dataset, dataset) + self.assertIs(table._client, client) self._verifyResourceProperties(table, RESOURCE) def test_from_api_repr_w_properties(self): client = _Client(self.PROJECT) - dataset = _Dataset(client) RESOURCE = self._makeResource() klass = self._get_target_class() - table = klass.from_api_repr(RESOURCE, dataset) - self.assertIs(table._dataset._client, client) + table = klass.from_api_repr(RESOURCE, client) + self.assertIs(table._client, client) self._verifyResourceProperties(table, RESOURCE) def test_create_new_day_partitioned_table(self): @@ -449,8 +471,9 @@ def test_create_new_day_partitioned_table(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.partitioning_type = 'DAY' table.create() @@ -475,11 +498,12 @@ def test_create_w_bound_client(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) table.create() @@ -506,11 +530,12 @@ def test_create_w_partition_no_expire(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partitioning_type) table.partitioning_type = "DAY" @@ -541,11 +566,12 @@ def test_create_w_partition_and_expire(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partition_expiration) table.partition_expiration = 100 self.assertEqual(table.partitioning_type, "DAY") @@ -575,11 +601,12 @@ def test_partition_type_setter_bad_type(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) with self.assertRaises(ValueError): table.partitioning_type = 123 @@ -589,11 +616,12 @@ def test_partition_type_setter_unknown_value(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) with self.assertRaises(ValueError): table.partitioning_type = "HASH" @@ -603,11 +631,12 @@ def test_partition_type_setter_w_known_value(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partitioning_type) table.partitioning_type = 'DAY' self.assertEqual(table.partitioning_type, 'DAY') @@ -618,11 +647,12 @@ def test_partition_type_setter_w_none(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) table._properties['timePartitioning'] = {'type': 'DAY'} table.partitioning_type = None self.assertIsNone(table.partitioning_type) @@ -634,11 +664,12 @@ def test_partition_experation_bad_type(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) with self.assertRaises(ValueError): table.partition_expiration = "NEVER" @@ -648,11 +679,12 @@ def test_partition_expiration_w_integer(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partition_expiration) table.partition_expiration = 100 self.assertEqual(table.partitioning_type, "DAY") @@ -664,11 +696,12 @@ def test_partition_expiration_w_none(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partition_expiration) table._properties['timePartitioning'] = { 'type': 'DAY', @@ -684,11 +717,12 @@ def test_partition_expiration_w_none_no_partition_set(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertIsNone(table.partition_expiration) table.partition_expiration = None self.assertIsNone(table.partitioning_type) @@ -700,11 +734,12 @@ def test_list_partitions(self): conn = _Connection() client = _Client(project=self.PROJECT, connection=conn) client._query_results = [(20160804, None), (20160805, None)] - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) self.assertEqual(table.list_partitions(), [20160804, 20160805]) def test_create_w_alternate_client(self): @@ -729,8 +764,9 @@ def test_create_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client=client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) table.friendly_name = TITLE table.description = DESCRIPTION table.view_query = QUERY @@ -766,11 +802,12 @@ def test_create_w_missing_output_properties(self): self.WHEN = None conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) table.create() @@ -795,8 +832,9 @@ def test_exists_miss_w_bound_client(self): self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection() client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) self.assertFalse(table.exists()) @@ -813,8 +851,9 @@ def test_exists_hit_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) self.assertTrue(table.exists(client=client2)) @@ -831,8 +870,9 @@ def test_reload_w_bound_client(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.reload() @@ -850,8 +890,9 @@ def test_reload_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) table.reload(client=client2) @@ -866,8 +907,9 @@ def test_patch_w_invalid_expiration(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError): table.patch(expires='BOGUS') @@ -882,8 +924,9 @@ def test_patch_w_bound_client(self): RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.patch(description=DESCRIPTION, friendly_name=TITLE, @@ -922,8 +965,9 @@ def test_patch_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='NULLABLE') @@ -958,8 +1002,9 @@ def test_patch_w_schema_None(self): RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.patch(schema=None) @@ -983,11 +1028,12 @@ def test_update_w_bound_client(self): RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age]) + table = self._make_one(table_ref, schema=[full_name, age], + client=client) table.description = DESCRIPTION table.friendly_name = TITLE @@ -1033,8 +1079,9 @@ def test_update_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) table.default_table_expiration_ms = DEF_TABLE_EXP table.location = LOCATION table.expires = self.EXP_TIME @@ -1065,8 +1112,9 @@ def test_delete_w_bound_client(self): self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) table.delete() @@ -1082,8 +1130,9 @@ def test_delete_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client1) table.delete(client=client2) @@ -1097,8 +1146,9 @@ def test_fetch_data_wo_schema(self): from google.cloud.bigquery.table import _TABLE_HAS_NO_SCHEMA client = _Client(project=self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError) as exc: table.fetch_data() @@ -1154,12 +1204,13 @@ def _bigquery_timestamp_float_repr(ts_float): conn = _Connection(DATA) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='NULLABLE') joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, joined]) + table = self._make_one(table_ref, schema=[full_name, age, joined], + client=client) iterator = table.fetch_data() page = six.next(iterator.pages) @@ -1220,13 +1271,15 @@ def test_fetch_data_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(DATA) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') voter = SchemaField('voter', 'BOOLEAN', mode='NULLABLE') score = SchemaField('score', 'FLOAT', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, voter, score]) + table = self._make_one(table_ref, + schema=[full_name, age, voter, score], + client=client1) iterator = table.fetch_data( client=client2, max_results=MAX, page_token=TOKEN) @@ -1277,14 +1330,15 @@ def test_fetch_data_w_repeated_fields(self): } conn = _Connection(DATA) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) color = SchemaField('color', 'STRING', mode='REPEATED') index = SchemaField('index', 'INTEGER', 'REPEATED') score = SchemaField('score', 'FLOAT', 'REPEATED') struct = SchemaField('struct', 'RECORD', mode='REPEATED', fields=[index, score]) - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[color, struct]) + table = self._make_one(table_ref, schema=[color, struct], + client=client) iterator = table.fetch_data() page = six.next(iterator.pages) @@ -1332,15 +1386,16 @@ def test_fetch_data_w_record_schema(self): } conn = _Connection(DATA) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') area_code = SchemaField('area_code', 'STRING', 'REQUIRED') local_number = SchemaField('local_number', 'STRING', 'REQUIRED') rank = SchemaField('rank', 'INTEGER', 'REQUIRED') phone = SchemaField('phone', 'RECORD', mode='NULLABLE', fields=[area_code, local_number, rank]) - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, phone]) + table = self._make_one(table_ref, schema=[full_name, phone], + client=client) iterator = table.fetch_data() page = six.next(iterator.pages) @@ -1371,8 +1426,9 @@ def test_row_from_mapping_wo_schema(self): from google.cloud.bigquery.table import _TABLE_HAS_NO_SCHEMA MAPPING = {'full_name': 'Phred Phlyntstone', 'age': 32} client = _Client(project=self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) with self.assertRaises(ValueError) as exc: table.row_from_mapping(MAPPING) @@ -1388,13 +1444,15 @@ def test_row_from_mapping_w_invalid_schema(self): 'bogus': 'WHATEVER', } client = _Client(project=self.PROJECT) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') colors = SchemaField('colors', 'DATETIME', mode='REPEATED') bogus = SchemaField('joined', 'STRING', mode='BOGUS') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, colors, bogus]) + table = self._make_one(table_ref, + schema=[full_name, age, colors, bogus], + client=client) with self.assertRaises(ValueError) as exc: table.row_from_mapping(MAPPING) @@ -1410,13 +1468,15 @@ def test_row_from_mapping_w_schema(self): 'extra': 'IGNORED', } client = _Client(project=self.PROJECT) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') colors = SchemaField('colors', 'DATETIME', mode='REPEATED') joined = SchemaField('joined', 'STRING', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, colors, joined]) + table = self._make_one(table_ref, + schema=[full_name, age, colors, joined], + client=client) self.assertEqual( table.row_from_mapping(MAPPING), @@ -1426,8 +1486,9 @@ def test_insert_data_wo_schema(self): from google.cloud.bigquery.table import _TABLE_HAS_NO_SCHEMA client = _Client(project=self.PROJECT) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) ROWS = [ ('Phred Phlyntstone', 32), ('Bharney Rhubble', 33), @@ -1454,12 +1515,13 @@ def test_insert_data_w_bound_client(self): self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, joined]) + table = self._make_one(table_ref, schema=[full_name, age, joined], + client=client) ROWS = [ ('Phred Phlyntstone', 32, _datetime_to_rfc3339(WHEN)), ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), @@ -1507,12 +1569,13 @@ def test_insert_data_w_alternate_client(self): client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESPONSE) client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = _Dataset(client1) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') voter = SchemaField('voter', 'BOOLEAN', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, voter]) + table = self._make_one(table_ref, schema=[full_name, age, voter], + client=client1) ROWS = [ ('Phred Phlyntstone', 32, True), ('Bharney Rhubble', 33, False), @@ -1564,14 +1627,15 @@ def test_insert_data_w_repeated_fields(self): self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('color', 'STRING', mode='REPEATED') index = SchemaField('index', 'INTEGER', 'REPEATED') score = SchemaField('score', 'FLOAT', 'REPEATED') struct = SchemaField('struct', 'RECORD', mode='REPEATED', fields=[index, score]) - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, struct]) + table = self._make_one(table_ref, schema=[full_name, struct], + client=client) ROWS = [ (['red', 'green'], [{'index': [1, 2], 'score': [3.1415, 1.414]}]), ] @@ -1600,15 +1664,16 @@ def test_insert_data_w_record_schema(self): self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') area_code = SchemaField('area_code', 'STRING', 'REQUIRED') local_number = SchemaField('local_number', 'STRING', 'REQUIRED') rank = SchemaField('rank', 'INTEGER', 'REQUIRED') phone = SchemaField('phone', 'RECORD', mode='NULLABLE', fields=[area_code, local_number, rank]) - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, phone]) + table = self._make_one(table_ref, schema=[full_name, phone], + client=client) ROWS = [ ('Phred Phlyntstone', {'area_code': '800', 'local_number': '555-1212', @@ -1639,7 +1704,9 @@ def _row_data(row): def test__get_transport(self): client = mock.Mock(spec=[u'_credentials', '_http']) client._http = mock.sentinel.http - table = self._make_one(self.TABLE_NAME, None) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) transport = table._get_transport(client) @@ -1667,8 +1734,9 @@ def _initiate_resumable_upload_helper(self, num_retries=None): connection = _Connection() client = _Client(self.PROJECT, connection=connection) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) # Create mocks to be checked for doing transport. resumable_url = 'http://test.invalid?upload_id=hey-you' @@ -1736,8 +1804,9 @@ def _do_multipart_upload_success_helper( connection = _Connection() client = _Client(self.PROJECT, connection=connection) - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset) + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref, client=client) # Create mocks to be checked for doing transport. fake_transport = self._mock_transport(http_client.OK, {}) @@ -1808,8 +1877,9 @@ def _make_table(transport=None): client._http = transport client.project = 'project_id' - dataset = dataset.Dataset('test_dataset', client) - table = table.Table('test_table', dataset) + dataset_ref = dataset.DatasetReference('project_id', 'test_dataset') + table_ref = dataset_ref.table('test_table') + table = table.Table(table_ref, client=client) return table @@ -1867,7 +1937,7 @@ def test_upload_from_file_resumable(self): table.upload_from_file(file_obj, source_format='CSV') do_upload.assert_called_once_with( - table._dataset._client, + table._client, file_obj, self.EXPECTED_CONFIGURATION, google.cloud.bigquery.table._DEFAULT_NUM_RETRIES) @@ -1897,7 +1967,7 @@ def test_upload_file_resumable_metadata(self): 'load': { 'sourceFormat': config_args['source_format'], 'destinationTable': { - 'projectId': table._dataset._client.project, + 'projectId': table._dataset.project, 'datasetId': table.dataset_id, 'tableId': table.table_id, }, @@ -1926,7 +1996,7 @@ def test_upload_file_resumable_metadata(self): file_obj, **config_args) do_upload.assert_called_once_with( - table._dataset._client, + table._client, file_obj, expected_config, mock.ANY) @@ -1945,7 +2015,7 @@ def test_upload_from_file_multipart(self): file_obj, source_format='CSV', size=file_obj_size) do_upload.assert_called_once_with( - table._dataset._client, + table._client, file_obj, self.EXPECTED_CONFIGURATION, file_obj_size, @@ -1963,7 +2033,7 @@ def test_upload_from_file_with_retries(self): file_obj, source_format='CSV', num_retries=num_retries) do_upload.assert_called_once_with( - table._dataset._client, + table._client, file_obj, self.EXPECTED_CONFIGURATION, num_retries) @@ -2046,7 +2116,7 @@ def test__do_resumable_upload(self): table = self._make_table(transport) result = table._do_resumable_upload( - table._dataset._client, + table._client, file_obj, self.EXPECTED_CONFIGURATION, None) @@ -2069,7 +2139,7 @@ def test__do_multipart_upload(self): file_obj_len = len(file_obj.getvalue()) table._do_multipart_upload( - table._dataset._client, + table._client, file_obj, self.EXPECTED_CONFIGURATION, file_obj_len, @@ -2100,7 +2170,7 @@ def test__do_multipart_upload_wrong_size(self): with pytest.raises(ValueError): table._do_multipart_upload( - table._dataset._client, + table._client, file_obj, {}, file_obj_len+1, @@ -2308,22 +2378,6 @@ def run(self): self.rows = self.client._query_results -class _Dataset(object): - - def __init__(self, client, dataset_id=TestTable.DS_ID): - self._client = client - self.dataset_id = dataset_id - - @property - def path(self): - return '/projects/%s/datasets/%s' % ( - self._client.project, self.dataset_id) - - @property - def project(self): - return self._client.project - - class _Connection(object): API_BASE_URL = 'http://example.com'