From 99bc58849a85240b576bd5a97e278a7e3e0ccb6d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 20 Aug 2015 16:16:02 -0400 Subject: [PATCH] Replace validating property boilerplate w/ custom descriptors. Addresses point #3 of: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1072#issuecomment-132682862 --- gcloud/bigquery/job.py | 733 +++++++++--------------------------- gcloud/bigquery/test_job.py | 385 +++++-------------- 2 files changed, 284 insertions(+), 834 deletions(-) diff --git a/gcloud/bigquery/job.py b/gcloud/bigquery/job.py index 734345463760..32e32c452e27 100644 --- a/gcloud/bigquery/job.py +++ b/gcloud/bigquery/job.py @@ -23,6 +23,121 @@ from gcloud.bigquery.table import _parse_schema_resource +class _ConfigurationProperty(object): + """Base property implementation. + + Values will be stored on a `_configuration` helper attribute of the + property's job instance. + + :type name: string + :param name: name of the property + """ + + def __init__(self, name): + self.name = name + self._backing_name = '_%s' % (self.name,) + + def __get__(self, instance, owner): + """Descriptor protocal: accesstor""" + if instance is None: + return self + return getattr(instance._configuration, self._backing_name) + + def _validate(self, value): + """Subclasses override to impose validation policy.""" + pass + + def __set__(self, instance, value): + """Descriptor protocal: mutator""" + self._validate(value) + setattr(instance._configuration, self._backing_name, value) + + def __delete__(self, instance): + """Descriptor protocal: deleter""" + delattr(instance._configuration, self._backing_name) + + +class _TypedProperty(_ConfigurationProperty): + """Property implementation: validates based on value type. + + :type name: string + :param name: name of the property + + :type property_type: type or sequence of types + :param property_type: type to be validated + """ + def __init__(self, name, property_type): + super(_TypedProperty, self).__init__(name) + self.property_type = property_type + + def _validate(self, value): + if not isinstance(value, self.property_type): + raise ValueError('Required type: %s' % (self.property_type,)) + + +class _EnumProperty(_ConfigurationProperty): + """Psedo-enumeration class. + + Subclasses must define ``ALLOWED`` as a class-level constant: it must + be a sequence of strings. + + :type name: string + :param name: name of the property + """ + def _validate(self, value): + """Check that ``value`` is one of the allowed values. + + :raises: ValueError if value is not allowed. + """ + if value not in self.ALLOWED: + raise ValueError('Pass one of: %s' ', '.join(self.ALLOWED)) + + +class Compression(_EnumProperty): + """Pseudo-enum for ``compression`` properties.""" + GZIP = 'GZIP' + NONE = 'NONE' + ALLOWED = (GZIP, NONE) + + +class CreateDisposition(_EnumProperty): + """Pseudo-enum for ``create_disposition`` properties.""" + CREATE_IF_NEEDED = 'CREATE_IF_NEEDED' + CREATE_NEVER = 'CREATE_NEVER' + ALLOWED = (CREATE_IF_NEEDED, CREATE_NEVER) + + +class DestinationFormat(_EnumProperty): + """Pseudo-enum for ``destination_format`` properties.""" + CSV = 'CSV' + NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + AVRO = 'AVRO' + ALLOWED = (CSV, NEWLINE_DELIMITED_JSON, AVRO) + + +class Encoding(_EnumProperty): + """Pseudo-enum for ``encoding`` properties.""" + UTF_8 = 'UTF-8' + ISO_8559_1 = 'ISO-8559-1' + ALLOWED = (UTF_8, ISO_8559_1) + + +class SourceFormat(_EnumProperty): + """Pseudo-enum for ``source_format`` properties.""" + CSV = 'CSV' + DATASTORE_BACKUP = 'DATASTORE_BACKUP' + NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + ALLOWED = (CSV, DATASTORE_BACKUP, NEWLINE_DELIMITED_JSON) + + +class WriteDisposition(_EnumProperty): + """Pseudo-enum for ``write_disposition`` properties.""" + WRITE_APPEND = 'WRITE_APPEND' + WRITE_TRUNCATE = 'WRITE_TRUNCATE' + WRITE_EMPTY = 'WRITE_EMPTY' + ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) + + class _BaseJob(object): """Base class for asynchronous jobs. @@ -271,70 +386,6 @@ def cancel(self, client=None): self._set_properties(api_response) -class _Enum(object): - """Psedo-enumeration class. - - Subclasses must define ``ALLOWED`` as a class-level constant: it must - be a sequence of strings. - """ - @classmethod - def validate(cls, value): - """Check that ``value`` is one of the allowed values. - - :raises: ValueError if value is not allowed. - """ - if value not in cls.ALLOWED: - raise ValueError('Pass one of: %s' ', '.join(cls.ALLOWED)) - - -class CreateDisposition(_Enum): - """Pseudo-enum for allowed values for ``create_disposition`` properties. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.createDisposition - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition - """ - CREATE_IF_NEEDED = 'CREATE_IF_NEEDED' - CREATE_NEVER = 'CREATE_NEVER' - ALLOWED = (CREATE_IF_NEEDED, CREATE_NEVER) - - -class Encoding(_Enum): - """Pseudo-enum for allowed values for ``encoding`` properties. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.encoding - """ - UTF_8 = 'UTF-8' - ISO_8559_1 = 'ISO-8559-1' - ALLOWED = (UTF_8, ISO_8559_1) - - -class SourceFormat(_Enum): - """Pseudo-enum for allowed values for ``source_format`` properties. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceFormat - """ - CSV = 'CSV' - DATASTORE_BACKUP = 'DATASTORE_BACKUP' - NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' - ALLOWED = (CSV, DATASTORE_BACKUP, NEWLINE_DELIMITED_JSON) - - -class WriteDisposition(_Enum): - """Pseudo-enum for allowed values for ``write_disposition`` properties. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.writeDisposition - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition - """ - WRITE_APPEND = 'WRITE_APPEND' - WRITE_TRUNCATE = 'WRITE_TRUNCATE' - WRITE_EMPTY = 'WRITE_EMPTY' - ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) - - class _LoadConfiguration(object): """User-settable configuration options for load jobs.""" # None -> use server default. @@ -444,323 +495,60 @@ def output_rows(self): if statistics is not None: return int(statistics['load']['outputRows']) - @property - def allow_jagged_rows(self): - """Allow rows with missing trailing commas for optional fields. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowJaggedRows - - :rtype: boolean, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._allow_jagged_rows - - @allow_jagged_rows.setter - def allow_jagged_rows(self, value): - """Update allow_jagged_rows. - - :type value: boolean - :param value: new allow_jagged_rows - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, bool): - raise ValueError("Pass a boolean") - self._configuration._allow_jagged_rows = value - - @allow_jagged_rows.deleter - def allow_jagged_rows(self): - """Delete allow_jagged_rows.""" - del self._configuration._allow_jagged_rows - - @property - def allow_quoted_newlines(self): - """Allow rows with quoted newlines. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowQuotedNewlines - - :rtype: boolean, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._allow_quoted_newlines - - @allow_quoted_newlines.setter - def allow_quoted_newlines(self, value): - """Update allow_quoted_newlines. - - :type value: boolean - :param value: new allow_quoted_newlines - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, bool): - raise ValueError("Pass a boolean") - self._configuration._allow_quoted_newlines = value - - @allow_quoted_newlines.deleter - def allow_quoted_newlines(self): - """Delete allow_quoted_newlines.""" - del self._configuration._allow_quoted_newlines - - @property - def create_disposition(self): - """Define how the back-end handles a missing destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.createDisposition - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._create_disposition - - @create_disposition.setter - def create_disposition(self, value): - """Update create_disposition. - - :type value: string - :param value: allowed values for :class:`CreateDisposition`. - """ - CreateDisposition.validate(value) # raises ValueError if invalid - self._configuration._create_disposition = value - - @create_disposition.deleter - def create_disposition(self): - """Delete create_disposition.""" - del self._configuration._create_disposition - - @property - def encoding(self): - """Encoding for source data. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.encoding - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._encoding - - @encoding.setter - def encoding(self, value): - """Update encoding. - - :type value: string - :param value: allowed values for :class:`Encoding`. - """ - Encoding.validate(value) # raises ValueError if invalid - self._configuration._encoding = value - - @encoding.deleter - def encoding(self): - """Delete encoding.""" - del self._configuration._encoding - - @property - def field_delimiter(self): - """Allow rows with missing trailing commas for optional fields. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.fieldDelimiter - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._field_delimiter - - @field_delimiter.setter - def field_delimiter(self, value): - """Update field_delimiter. - - :type value: string - :param value: new field delimiter - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, six.string_types): - raise ValueError("Pass a string") - self._configuration._field_delimiter = value - - @field_delimiter.deleter - def field_delimiter(self): - """Delete field_delimiter.""" - del self._configuration._field_delimiter - - @property - def ignore_unknown_values(self): - """Ignore rows with extra columns beyond those specified by the schema. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.ignoreUnknownValues - - :rtype: boolean, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._ignore_unknown_values - - @ignore_unknown_values.setter - def ignore_unknown_values(self, value): - """Update ignore_unknown_values. - - :type value: boolean - :param value: new ignore_unknown_values - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, bool): - raise ValueError("Pass a boolean") - self._configuration._ignore_unknown_values = value - - @ignore_unknown_values.deleter - def ignore_unknown_values(self): - """Delete ignore_unknown_values.""" - del self._configuration._ignore_unknown_values - - @property - def max_bad_records(self): - """Max number of bad records to be ignored. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.maxBadRecords - - :rtype: integer, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._max_bad_records - - @max_bad_records.setter - def max_bad_records(self, value): - """Update max_bad_records. - - :type value: integer - :param value: new max_bad_records - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, six.integer_types): - raise ValueError("Pass an integer") - self._configuration._max_bad_records = value - - @max_bad_records.deleter - def max_bad_records(self): - """Delete max_bad_records.""" - del self._configuration._max_bad_records - - @property - def quote_character(self): - """Character used to quote values. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.quote - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._quote_character - - @quote_character.setter - def quote_character(self, value): - """Update quote_character. - - :type value: string - :param value: new quote_character - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, six.string_types): - raise ValueError("Pass a string") - self._configuration._quote_character = value - - @quote_character.deleter - def quote_character(self): - """Delete quote_character.""" - del self._configuration._quote_character - - @property - def skip_leading_rows(self): - """Count of leading rows to be skipped. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.skipLeadingRows - - :rtype: integer, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._skip_leading_rows - - @skip_leading_rows.setter - def skip_leading_rows(self, value): - """Update skip_leading_rows. - - :type value: integer - :param value: new skip_leading_rows - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, six.integer_types): - raise ValueError("Pass a boolean") - self._configuration._skip_leading_rows = value - - @skip_leading_rows.deleter - def skip_leading_rows(self): - """Delete skip_leading_rows.""" - del self._configuration._skip_leading_rows - - @property - def source_format(self): - """Format of source data files. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceFormat + allow_jagged_rows = _TypedProperty('allow_jagged_rows', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowJaggedRows + """ - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._source_format + allow_quoted_newlines = _TypedProperty('allow_quoted_newlines', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowQuotedNewlines + """ - @source_format.setter - def source_format(self, value): - """Update source_format. + create_disposition = CreateDisposition('create_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.createDisposition + """ - :type value: string - :param value: valid values for :class:`SourceFormat`. - """ - SourceFormat.validate(value) # raises ValueError if invalid - self._configuration._source_format = value + encoding = Encoding('encoding') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.encoding + """ - @source_format.deleter - def source_format(self): - """Delete source_format.""" - del self._configuration._source_format + field_delimiter = _TypedProperty('field_delimiter', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.fieldDelimiter + """ - @property - def write_disposition(self): - """Allow rows with missing trailing commas for optional fields. + ignore_unknown_values = _TypedProperty('ignore_unknown_values', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.ignoreUnknownValues + """ - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.writeDisposition + max_bad_records = _TypedProperty('max_bad_records', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.maxBadRecords + """ - :rtype: boolean, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._write_disposition + quote_character = _TypedProperty('quote_character', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.quote + """ - @write_disposition.setter - def write_disposition(self, value): - """Update write_disposition. + skip_leading_rows = _TypedProperty('skip_leading_rows', six.integer_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.skipLeadingRows + """ - :type value: string - :param value: valid values for :class:`WriteDisposition`. - """ - WriteDisposition.validate(value) # raises ValueError if invalid - self._configuration._write_disposition = value + source_format = SourceFormat('source_format') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceFormat + """ - @write_disposition.deleter - def write_disposition(self): - """Delete write_disposition.""" - del self._configuration._write_disposition + write_disposition = WriteDisposition('write_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.writeDisposition + """ def _populate_config_resource(self, configuration): """Helper for _build_resource: copy config properties to resource""" @@ -849,59 +637,15 @@ def __init__(self, name, destination, sources, client): self.sources = sources self._configuration = _CopyConfiguration() - @property - def create_disposition(self): - """Handling for missing destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._create_disposition - - @create_disposition.setter - def create_disposition(self, value): - """Update create_disposition. - - :type value: string - :param value: allowed values for :class:`CreateDisposition` - """ - CreateDisposition.validate(value) # raises ValueError if invalid - self._configuration._create_disposition = value - - @create_disposition.deleter - def create_disposition(self): - """Delete create_disposition.""" - del self._configuration._create_disposition - - @property - def write_disposition(self): - """Allow rows with missing trailing commas for optional fields. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._write_disposition - - @write_disposition.setter - def write_disposition(self, value): - """Update write_disposition. - - :type value: string - :param value: allowed values for :class:`WriteDisposition`. - """ - WriteDisposition.validate(value) # raises ValueError if invalid - self._configuration._write_disposition = value + create_disposition = CreateDisposition('create_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition + """ - @write_disposition.deleter - def write_disposition(self): - """Delete write_disposition.""" - del self._configuration._write_disposition + write_disposition = WriteDisposition('write_disposition') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition + """ def _populate_config_resource(self, configuration): """Helper for _build_resource: copy config properties to resource""" @@ -950,23 +694,6 @@ class _ExtractConfiguration(object): _print_header = None -class Compression(_Enum): - """Pseudo-enum for allowed values for ``compression`` properties. - """ - GZIP = 'GZIP' - NONE = 'NONE' - ALLOWED = (GZIP, NONE) - - -class DestinationFormat(_Enum): - """Pseudo-enum for allowed values for ``destination_format`` properties. - """ - CSV = 'CSV' - NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' - AVRO = 'AVRO' - ALLOWED = (CSV, NEWLINE_DELIMITED_JSON, AVRO) - - class ExtractTableToStorageJob(_BaseJob): """Asynchronous job: extract data from a BQ table into Cloud Storage. @@ -990,119 +717,25 @@ def __init__(self, name, source, destination_uris, client): self.destination_uris = destination_uris self._configuration = _ExtractConfiguration() - @property - def compression(self): - """Compression to apply to destination blobs. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract.compression - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._compression - - @compression.setter - def compression(self, value): - """Update compression. - - :type value: string - :param value: allowed value for :class:`Compression`. - """ - Compression.validate(value) # raises ValueError if invalie - self._configuration._compression = value - - @compression.deleter - def compression(self): - """Delete compression.""" - del self._configuration._compression - - @property - def destination_format(self): - """Handling for missing destination table. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract.destinationFormat - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._destination_format - - @destination_format.setter - def destination_format(self, value): - """Update destination_format. - - :type value: string - :param value: allowed value for :class:`DestinationFormat`. - """ - DestinationFormat.validate(value) # raises ValueError if invalid - self._configuration._destination_format = value - - @destination_format.deleter - def destination_format(self): - """Delete destination_format.""" - del self._configuration._destination_format - - @property - def field_delimiter(self): - """Allow rows with missing trailing commas for optional fields. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract.fieldDelimiter - - :rtype: string, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._field_delimiter - - @field_delimiter.setter - def field_delimiter(self, value): - """Update field_delimiter. - - :type value: string - :param value: new field delimiter - - :raises: ValueError for invalid value types. - """ - if not isinstance(value, six.string_types): - raise ValueError("Pass a string") - self._configuration._field_delimiter = value - - @field_delimiter.deleter - def field_delimiter(self): - """Delete field_delimiter.""" - del self._configuration._field_delimiter - - @property - def print_header(self): - """Write a header row into destination blobs. - - See: - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract.printHeader - - :rtype: boolean, or ``NoneType`` - :returns: The value as set by the user, or None (the default). - """ - return self._configuration._print_header + compression = Compression('compression') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.compression + """ - @print_header.setter - def print_header(self, value): - """Update print_header. + destination_format = DestinationFormat('destination_format') + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.destinationFormat + """ - :type value: boolean - :param value: new print_header + field_delimiter = _TypedProperty('field_delimiter', six.string_types) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.fieldDelimiter + """ - :raises: ValueError for invalid value types. - """ - if not isinstance(value, bool): - raise ValueError("Pass a boolean") - self._configuration._print_header = value - - @print_header.deleter - def print_header(self): - """Delete print_header.""" - del self._configuration._print_header + print_header = _TypedProperty('print_header', bool) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.printHeader + """ def _populate_config_resource(self, configuration): """Helper for _build_resource: copy config properties to resource""" diff --git a/gcloud/bigquery/test_job.py b/gcloud/bigquery/test_job.py index b23b1dd6b9fc..cbed14d4f1a6 100644 --- a/gcloud/bigquery/test_job.py +++ b/gcloud/bigquery/test_job.py @@ -15,6 +15,107 @@ import unittest2 +class Test_ConfigurationProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.job import _ConfigurationProperty + return _ConfigurationProperty + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_it(self): + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = self._makeOne('attr') + + def __init__(self): + self._configuration = Configuration() + + self.assertEqual(Wrapper.attr.name, 'attr') + + wrapper = Wrapper() + self.assertEqual(wrapper.attr, None) + + value = object() + wrapper.attr = value + self.assertTrue(wrapper.attr is value) + self.assertTrue(wrapper._configuration._attr is value) + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) + + +class Test_TypedProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.job import _TypedProperty + return _TypedProperty + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_it(self): + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = self._makeOne('attr', int) + + def __init__(self): + self._configuration = Configuration() + + wrapper = Wrapper() + with self.assertRaises(ValueError): + wrapper.attr = 'BOGUS' + + wrapper.attr = 42 + self.assertEqual(wrapper.attr, 42) + self.assertEqual(wrapper._configuration._attr, 42) + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) + + +class Test_EnumProperty(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.bigquery.job import _EnumProperty + return _EnumProperty + + def test_it(self): + + class Sub(self._getTargetClass()): + ALLOWED = ('FOO', 'BAR', 'BAZ') + + class Configuration(object): + _attr = None + + class Wrapper(object): + attr = Sub('attr') + + def __init__(self): + self._configuration = Configuration() + + wrapper = Wrapper() + with self.assertRaises(ValueError): + wrapper.attr = 'BOGUS' + + wrapper.attr = 'FOO' + self.assertEqual(wrapper.attr, 'FOO') + self.assertEqual(wrapper._configuration._attr, 'FOO') + + del wrapper.attr + self.assertEqual(wrapper.attr, None) + self.assertEqual(wrapper._configuration._attr, None) + + class _Base(object): PROJECT = 'project' SOURCE1 = 'http://example.com/source1.csv' @@ -348,182 +449,6 @@ def test_props_set_by_server(self): self.assertEqual(job.errors, [ERROR_RESULT]) self.assertEqual(job.state, 'STATE') - def test_allow_jagged_rows_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.allow_jagged_rows = object() - - def test_allow_jagged_rows_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.allow_jagged_rows = True - self.assertTrue(job.allow_jagged_rows) - del job.allow_jagged_rows - self.assertTrue(job.allow_jagged_rows is None) - - def test_allow_quoted_newlines_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.allow_quoted_newlines = object() - - def test_allow_quoted_newlines_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.allow_quoted_newlines = True - self.assertTrue(job.allow_quoted_newlines) - del job.allow_quoted_newlines - self.assertTrue(job.allow_quoted_newlines is None) - - def test_create_disposition_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.create_disposition = 'BOGUS' - - def test_create_disposition_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.create_disposition = 'CREATE_IF_NEEDED' - self.assertEqual(job.create_disposition, 'CREATE_IF_NEEDED') - del job.create_disposition - self.assertTrue(job.create_disposition is None) - - def test_encoding_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.encoding = 'BOGUS' - - def test_encoding_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.encoding = 'ISO-8559-1' - self.assertEqual(job.encoding, 'ISO-8559-1') - del job.encoding - self.assertTrue(job.encoding is None) - - def test_field_delimiter_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.field_delimiter = object() - - def test_field_delimiter_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.field_delimiter = '|' - self.assertEqual(job.field_delimiter, '|') - del job.field_delimiter - self.assertTrue(job.field_delimiter is None) - - def test_ignore_unknown_values_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.ignore_unknown_values = object() - - def test_ignore_unknown_values_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.ignore_unknown_values = True - self.assertTrue(job.ignore_unknown_values) - del job.ignore_unknown_values - self.assertTrue(job.ignore_unknown_values is None) - - def test_max_bad_records_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.max_bad_records = object() - - def test_max_bad_records_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.max_bad_records = 100 - self.assertEqual(job.max_bad_records, 100) - del job.max_bad_records - self.assertTrue(job.max_bad_records is None) - - def test_quote_character_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.quote_character = object() - - def test_quote_character_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.quote_character = "'" - self.assertEqual(job.quote_character, "'") - del job.quote_character - self.assertTrue(job.quote_character is None) - - def test_skip_leading_rows_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.skip_leading_rows = object() - - def test_skip_leading_rows_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.skip_leading_rows = 2 - self.assertEqual(job.skip_leading_rows, 2) - del job.skip_leading_rows - self.assertTrue(job.skip_leading_rows is None) - - def test_source_format_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.source_format = 'BOGUS' - - def test_source_format_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.source_format = 'NEWLINE_DELIMITED_JSON' - self.assertEqual(job.source_format, 'NEWLINE_DELIMITED_JSON') - del job.source_format - self.assertTrue(job.source_format is None) - - def test_write_disposition_setter_bad_value(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - with self.assertRaises(ValueError): - job.write_disposition = 'BOGUS' - - def test_write_disposition_setter_deleter(self): - client = _Client(self.PROJECT) - table = _Table() - job = self._makeOne(self.JOB_NAME, table, [self.SOURCE1], client) - job.write_disposition = 'WRITE_TRUNCATE' - self.assertEqual(job.write_disposition, 'WRITE_TRUNCATE') - del job.write_disposition - self.assertTrue(job.write_disposition is None) - def test_begin_w_bound_client(self): PATH = 'projects/%s/jobs' % self.PROJECT RESOURCE = self._makeResource() @@ -779,42 +704,6 @@ def test_ctor(self): self.assertTrue(job.create_disposition is None) self.assertTrue(job.write_disposition is None) - def test_create_disposition_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - destination = _Table(self.DESTINATION_TABLE) - job = self._makeOne(self.JOB_NAME, destination, [source], client) - with self.assertRaises(ValueError): - job.create_disposition = 'BOGUS' - - def test_create_disposition_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - destination = _Table(self.DESTINATION_TABLE) - job = self._makeOne(self.JOB_NAME, destination, [source], client) - job.create_disposition = 'CREATE_IF_NEEDED' - self.assertEqual(job.create_disposition, 'CREATE_IF_NEEDED') - del job.create_disposition - self.assertTrue(job.create_disposition is None) - - def test_write_disposition_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - destination = _Table(self.DESTINATION_TABLE) - job = self._makeOne(self.JOB_NAME, destination, [source], client) - with self.assertRaises(ValueError): - job.write_disposition = 'BOGUS' - - def test_write_disposition_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - destination = _Table(self.DESTINATION_TABLE) - job = self._makeOne(self.JOB_NAME, destination, [source], client) - job.write_disposition = 'WRITE_TRUNCATE' - self.assertEqual(job.write_disposition, 'WRITE_TRUNCATE') - del job.write_disposition - self.assertTrue(job.write_disposition is None) - def test_begin_w_bound_client(self): PATH = 'projects/%s/jobs' % self.PROJECT RESOURCE = self._makeResource() @@ -1037,78 +926,6 @@ def test_ctor(self): self.assertTrue(job.field_delimiter is None) self.assertTrue(job.print_header is None) - def test_compression_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - with self.assertRaises(ValueError): - job.compression = 'BOGUS' - - def test_compression_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - job.compression = 'GZIP' - self.assertEqual(job.compression, 'GZIP') - del job.compression - self.assertTrue(job.compression is None) - - def test_destination_format_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - with self.assertRaises(ValueError): - job.destination_format = 'BOGUS' - - def test_destination_format_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - job.destination_format = 'AVRO' - self.assertEqual(job.destination_format, 'AVRO') - del job.destination_format - self.assertTrue(job.destination_format is None) - - def test_field_delimiter_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - with self.assertRaises(ValueError): - job.field_delimiter = object() - - def test_field_delimiter_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - job.field_delimiter = '|' - self.assertEqual(job.field_delimiter, '|') - del job.field_delimiter - self.assertTrue(job.field_delimiter is None) - - def test_print_header_setter_bad_value(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - with self.assertRaises(ValueError): - job.print_header = 'BOGUS' - - def test_print_header_setter_deleter(self): - client = _Client(self.PROJECT) - source = _Table(self.SOURCE_TABLE) - job = self._makeOne(self.JOB_NAME, source, [self.DESTINATION_URI], - client) - job.print_header = False - self.assertEqual(job.print_header, False) - del job.print_header - self.assertTrue(job.print_header is None) - def test_begin_w_bound_client(self): PATH = 'projects/%s/jobs' % self.PROJECT RESOURCE = self._makeResource()