Skip to content

Commit

Permalink
bigquery: document job location (#3142)
Browse files Browse the repository at this point in the history
Fixes #3126.
  • Loading branch information
pongad authored Apr 10, 2018
1 parent 696232f commit 2a10cae
Show file tree
Hide file tree
Showing 2 changed files with 102 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -970,10 +970,12 @@ TableResult listTableData(
TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options);

/**
* Returns the requested job or {@code null} if not found.
* Returns the requested job or {@code null} if not found. If the location of the job is not "US"
* or "EU", {@link #getJob(JobId, JobOption...)} must be used instead.
*
* <p>Example of getting a job.
* <pre> {@code
*
* <pre>{@code
* String jobName = "my_job_name";
* Job job = bigquery.getJob(jobName);
* if (job == null) {
Expand All @@ -986,10 +988,12 @@ TableResult listTableData(
Job getJob(String jobId, JobOption... options);

/**
* Returns the requested job or {@code null} if not found.
* Returns the requested job or {@code null} if not found. If the location of the job is not "US"
* or "EU", the {@code jobId} must specify the job location.
*
* <p>Example of getting a job.
* <pre> {@code
*
* <pre>{@code
* String jobName = "my_job_name";
* JobId jobIdObject = JobId.of(jobName);
* Job job = bigquery.getJob(jobIdObject);
Expand Down Expand Up @@ -1019,11 +1023,14 @@ TableResult listTableData(

/**
* Sends a job cancel request. This call will return immediately. The job status can then be
* checked using either {@link #getJob(JobId, JobOption...)} or
* {@link #getJob(String, JobOption...)}).
* checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
* JobOption...)}).
*
* <p>If the location of the job is not "US" or "EU", {@link #cancel(JobId)} must be used instead.
*
* <p>Example of cancelling a job.
* <pre> {@code
*
* <pre>{@code
* String jobName = "my_job_name";
* boolean success = bigquery.cancel(jobName);
* if (success) {
Expand All @@ -1041,11 +1048,15 @@ TableResult listTableData(

/**
* Sends a job cancel request. This call will return immediately. The job status can then be
* checked using either {@link #getJob(JobId, JobOption...)} or
* {@link #getJob(String, JobOption...)}).
* checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
* JobOption...)}).
*
* <p>If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
* location.
*
* <p>Example of cancelling a job.
* <pre> {@code
*
* <pre>{@code
* String jobName = "my_job_name";
* JobId jobId = JobId.of(jobName);
* boolean success = bigquery.cancel(jobId);
Expand All @@ -1065,11 +1076,15 @@ TableResult listTableData(
/**
* Runs the query associated with the request, using an internally-generated random JobId.
*
* <p>If the location of the job is not "US" or "EU", {@link #query(QueryJobConfiguration, JobId,
* JobOption...)} must be used instead.
*
* <p>This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()}
* queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
*
* <p>Example of running a query.
* <pre> {@code
*
* <pre>{@code
* String query = "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]";
* QueryJobConfiguration queryConfig =
* QueryJobConfiguration.newBuilder(query).setUseLegacySql(true).build();
Expand All @@ -1079,7 +1094,8 @@ TableResult listTableData(
* }</pre>
*
* <p>Example of running a query with query parameters.
* <pre> {@code
*
* <pre>{@code
* String query = "SELECT distinct(corpus) FROM `bigquery-public-data.samples.shakespeare` where word_count > @wordCount";
* // Note, standard SQL is required to use query parameters. Legacy SQL will not work.
* QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query)
Expand All @@ -1101,6 +1117,9 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options)
/**
* Runs the query associated with the request, using the given JobId.
*
* <p>If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
* location.
*
* <p>This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()}
* queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
*
Expand All @@ -1125,10 +1144,12 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...

/**
* Returns a channel to write data to be inserted into a BigQuery table. Data format and other
* options can be configured using the {@link WriteChannelConfiguration} parameter.
* options can be configured using the {@link WriteChannelConfiguration} parameter. If the job is
* not in "US" or "EU", {@link #writer(JobId, WriteChannelConfiguration)} must be used instead.
*
* <p>Example of creating a channel with which to write to a table.
* <pre> {@code
*
* <pre>{@code
* String datasetName = "my_dataset_name";
* String tableName = "my_table_name";
* String csvData = "StringValue1\nStringValue2\n";
Expand All @@ -1152,7 +1173,8 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
* }</pre>
*
* <p>Example of writing a local file to a table.
* <pre> {@code
*
* <pre>{@code
* String datasetName = "my_dataset_name";
* String tableName = "my_table_name";
* Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
Expand All @@ -1177,6 +1199,37 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
*/
TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration);

// TODO(pongad): document
/**
* Returns a channel to write data to be inserted into a BigQuery table. Data format and other
* options can be configured using the {@link WriteChannelConfiguration} parameter. If the job is
* not in "US" or "EU", the {@code jobId} must contain the location of the job.
*
* <p>Example of creating a channel with which to write to a table.
*
* <pre>{@code
* String datasetName = "my_dataset_name";
* String tableName = "my_table_name";
* String csvData = "StringValue1\nStringValue2\n";
* String csvData = "StringValue1\nStringValue2\n";
* String location = "asia-northeast1";
* TableId tableId = TableId.of(datasetName, tableName);
* WriteChannelConfiguration writeChannelConfiguration =
* WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
* // The location must be specified; other fields can be auto-detected.
* JobId jobId = JobId.newBuilder().setLocation(location).build();
* TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
* // Write data to writer
* try {
* writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
* } finally {
* writer.close();
* }
* // Get load job
* Job job = writer.getJob();
* job = job.waitFor();
* LoadStatistics stats = job.getStatistics();
* return stats.getOutputRows();
* }</pre>
*/
TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration);
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,6 @@
import com.google.api.client.util.Charsets;
import com.google.api.gax.paging.Page;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.JobInfo.CreateDisposition;
import com.google.cloud.bigquery.LoadJobConfiguration;
import com.google.cloud.bigquery.TableResult;
import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
import com.google.cloud.bigquery.BigQuery.DatasetListOption;
import com.google.cloud.bigquery.BigQuery.JobListOption;
Expand All @@ -47,8 +44,10 @@
import com.google.cloud.bigquery.JobConfiguration;
import com.google.cloud.bigquery.JobId;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.JobInfo.CreateDisposition;
import com.google.cloud.bigquery.JobStatistics.LoadStatistics;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.LoadJobConfiguration;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.QueryParameterValue;
import com.google.cloud.bigquery.Schema;
Expand All @@ -58,6 +57,7 @@
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.TableResult;
import com.google.cloud.bigquery.WriteChannelConfiguration;
import java.io.IOException;
import java.io.OutputStream;
Expand Down Expand Up @@ -352,6 +352,36 @@ public long writeToTable(String datasetName, String tableName, String csvData)
// [END writeToTable]
}

/** Example of creating a channel with which to write to a table. */
// [TARGET writer(WriteChannelConfiguration)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
// [VARIABLE "StringValue1\nStringValue2\n"]
// [VARIABLE "asia-northeast1"]
public long writeToTableLocation(
String datasetName, String tableName, String csvData, String location)
throws IOException, InterruptedException, TimeoutException {
// [START writeToTableLocation]
TableId tableId = TableId.of(datasetName, tableName);
WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
// The location must be specified; other fields can be auto-detected.
JobId jobId = JobId.newBuilder().setLocation(location).build();
TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
// Write data to writer
try {
writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
} finally {
writer.close();
}
// Get load job
Job job = writer.getJob();
job = job.waitFor();
LoadStatistics stats = job.getStatistics();
return stats.getOutputRows();
// [END writeToTableLocation]
}

/**
* Example of writing a local file to a table.
*/
Expand Down

0 comments on commit 2a10cae

Please sign in to comment.