-
Notifications
You must be signed in to change notification settings - Fork 310
/
Copy pathenums.py
274 lines (200 loc) · 7.75 KB
/
enums.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import enum
import itertools
from google.cloud.bigquery_v2 import types as gapic_types
from google.cloud.bigquery.query import ScalarQueryParameterType
class Compression(object):
"""The compression type to use for exported files. The default value is
:attr:`NONE`.
:attr:`DEFLATE` and :attr:`SNAPPY` are
only supported for Avro.
"""
GZIP = "GZIP"
"""Specifies GZIP format."""
DEFLATE = "DEFLATE"
"""Specifies DEFLATE format."""
SNAPPY = "SNAPPY"
"""Specifies SNAPPY format."""
NONE = "NONE"
"""Specifies no compression."""
class CreateDisposition(object):
"""Specifies whether the job is allowed to create new tables. The default
value is :attr:`CREATE_IF_NEEDED`.
Creation, truncation and append actions occur as one atomic update
upon job completion.
"""
CREATE_IF_NEEDED = "CREATE_IF_NEEDED"
"""If the table does not exist, BigQuery creates the table."""
CREATE_NEVER = "CREATE_NEVER"
"""The table must already exist. If it does not, a 'notFound' error is
returned in the job result."""
class DestinationFormat(object):
"""The exported file format. The default value is :attr:`CSV`.
Tables with nested or repeated fields cannot be exported as CSV.
"""
CSV = "CSV"
"""Specifies CSV format."""
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
"""Specifies newline delimited JSON format."""
AVRO = "AVRO"
"""Specifies Avro format."""
PARQUET = "PARQUET"
"""Specifies Parquet format."""
class Encoding(object):
"""The character encoding of the data. The default is :attr:`UTF_8`.
BigQuery decodes the data after the raw, binary data has been
split using the values of the quote and fieldDelimiter properties.
"""
UTF_8 = "UTF-8"
"""Specifies UTF-8 encoding."""
ISO_8859_1 = "ISO-8859-1"
"""Specifies ISO-8859-1 encoding."""
class QueryPriority(object):
"""Specifies a priority for the query. The default value is
:attr:`INTERACTIVE`.
"""
INTERACTIVE = "INTERACTIVE"
"""Specifies interactive priority."""
BATCH = "BATCH"
"""Specifies batch priority."""
class SchemaUpdateOption(object):
"""Specifies an update to the destination table schema as a side effect of
a load job.
"""
ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION"
"""Allow adding a nullable field to the schema."""
ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION"
"""Allow relaxing a required field in the original schema to nullable."""
class SourceFormat(object):
"""The format of the data files. The default value is :attr:`CSV`.
Note that the set of allowed values for loading data is different
than the set used for external data sources (see
:class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`).
"""
CSV = "CSV"
"""Specifies CSV format."""
DATASTORE_BACKUP = "DATASTORE_BACKUP"
"""Specifies datastore backup format"""
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
"""Specifies newline delimited JSON format."""
AVRO = "AVRO"
"""Specifies Avro format."""
PARQUET = "PARQUET"
"""Specifies Parquet format."""
ORC = "ORC"
"""Specifies Orc format."""
_SQL_SCALAR_TYPES = frozenset(
(
"INT64",
"BOOL",
"FLOAT64",
"STRING",
"BYTES",
"TIMESTAMP",
"DATE",
"TIME",
"DATETIME",
"GEOGRAPHY",
"NUMERIC",
"BIGNUMERIC",
)
)
_SQL_NONSCALAR_TYPES = frozenset(("TYPE_KIND_UNSPECIFIED", "ARRAY", "STRUCT"))
def _make_sql_scalars_enum():
"""Create an enum based on a gapic enum containing only SQL scalar types."""
new_enum = enum.Enum(
"StandardSqlDataTypes",
(
(member.name, member.value)
for member in gapic_types.StandardSqlDataType.TypeKind
if member.name in _SQL_SCALAR_TYPES
),
)
# make sure the docstring for the new enum is also correct
orig_doc = gapic_types.StandardSqlDataType.TypeKind.__doc__
skip_pattern = re.compile(
"|".join(_SQL_NONSCALAR_TYPES)
+ "|because a JSON object" # the second description line of STRUCT member
)
new_doc = "\n".join(
itertools.filterfalse(skip_pattern.search, orig_doc.splitlines())
)
new_enum.__doc__ = "An Enum of scalar SQL types.\n" + new_doc
return new_enum
StandardSqlDataTypes = _make_sql_scalars_enum()
# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types
# and https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
class SqlTypeNames(str, enum.Enum):
"""Enum of allowed SQL type names in schema.SchemaField."""
STRING = "STRING"
BYTES = "BYTES"
INTEGER = "INTEGER"
INT64 = "INTEGER"
FLOAT = "FLOAT"
FLOAT64 = "FLOAT"
NUMERIC = "NUMERIC"
BIGNUMERIC = "BIGNUMERIC"
BOOLEAN = "BOOLEAN"
BOOL = "BOOLEAN"
GEOGRAPHY = "GEOGRAPHY" # NOTE: not available in legacy types
RECORD = "RECORD"
STRUCT = "RECORD"
TIMESTAMP = "TIMESTAMP"
DATE = "DATE"
TIME = "TIME"
DATETIME = "DATETIME"
class SqlParameterScalarTypes:
"""Supported scalar SQL query parameter types as type objects."""
STRING = ScalarQueryParameterType("STRING")
BYTES = ScalarQueryParameterType("BYTES")
INTEGER = ScalarQueryParameterType("INT64")
INT64 = ScalarQueryParameterType("INT64")
FLOAT = ScalarQueryParameterType("FLOAT64")
FLOAT64 = ScalarQueryParameterType("FLOAT64")
NUMERIC = ScalarQueryParameterType("NUMERIC")
BIGNUMERIC = ScalarQueryParameterType("BIGNUMERIC")
BOOLEAN = ScalarQueryParameterType("BOOL")
BOOL = ScalarQueryParameterType("BOOL")
GEOGRAPHY = ScalarQueryParameterType("GEOGRAPHY")
TIMESTAMP = ScalarQueryParameterType("TIMESTAMP")
DATE = ScalarQueryParameterType("DATE")
TIME = ScalarQueryParameterType("TIME")
DATETIME = ScalarQueryParameterType("DATETIME")
class WriteDisposition(object):
"""Specifies the action that occurs if destination table already exists.
The default value is :attr:`WRITE_APPEND`.
Each action is atomic and only occurs if BigQuery is able to complete
the job successfully. Creation, truncation and append actions occur as one
atomic update upon job completion.
"""
WRITE_APPEND = "WRITE_APPEND"
"""If the table already exists, BigQuery appends the data to the table."""
WRITE_TRUNCATE = "WRITE_TRUNCATE"
"""If the table already exists, BigQuery overwrites the table data."""
WRITE_EMPTY = "WRITE_EMPTY"
"""If the table already exists and contains data, a 'duplicate' error is
returned in the job result."""
class DeterminismLevel:
"""Specifies determinism level for JavaScript user-defined functions (UDFs).
https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#DeterminismLevel
"""
DETERMINISM_LEVEL_UNSPECIFIED = "DETERMINISM_LEVEL_UNSPECIFIED"
"""The determinism of the UDF is unspecified."""
DETERMINISTIC = "DETERMINISTIC"
"""The UDF is deterministic, meaning that 2 function calls with the same inputs
always produce the same result, even across 2 query runs."""
NOT_DETERMINISTIC = "NOT_DETERMINISTIC"
"""The UDF is not deterministic."""