From b32e4b94a31584ea93b10c0cb3a87525c39df3e5 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Mon, 11 Jan 2016 16:56:52 +0100 Subject: [PATCH 01/18] Add support for BigQuery resumable uploads via a write channel - Move BlobWriteChannel and BlobReadChannel to core module - Rename BlobWriteChannel and BlobReadChannel to WriteChannel and ReadChannel - Add abstract class BaseWriteChannel implementing entity-agnostic channel functionality - Add BlobWriteChannel and BlobReadChannel implementation to gcloud-java-storage - Add LoadConfiguration and modify LoadJobInfo to take configuration as a parameter - Add BigQuery.writer method to return a writer given LoadConfiguration - Add BigQueryRpc.open and .write methods to implement write channel - Add TableDataWriteChannel class to support bigquery resumable streaming inserts - Add unit and integration tests - Update bigquery example with load-data action --- .../com/google/gcloud/bigquery/BigQuery.java | 8 + .../google/gcloud/bigquery/BigQueryImpl.java | 13 +- .../google/gcloud/bigquery/CopyJobInfo.java | 16 +- .../gcloud/bigquery/LoadConfiguration.java | 382 ++++++++++++++++++ .../google/gcloud/bigquery/LoadJobInfo.java | 347 ++-------------- .../google/gcloud/bigquery/QueryJobInfo.java | 16 +- .../com/google/gcloud/bigquery/Table.java | 4 +- .../bigquery/TableDataWriteChannel.java | 91 +++++ .../com/google/gcloud/spi/BigQueryRpc.java | 23 ++ .../google/gcloud/spi/DefaultBigQueryRpc.java | 81 ++++ .../gcloud/bigquery/BigQueryImplTest.java | 19 +- .../gcloud/bigquery/ITBigQueryTest.java | 85 +++- .../bigquery/LoadConfigurationTest.java | 123 ++++++ .../gcloud/bigquery/LoadJobInfoTest.java | 87 ++-- .../gcloud/bigquery/SerializationTest.java | 49 ++- .../bigquery/TableDataWriteChannelTest.java | 248 ++++++++++++ .../com/google/gcloud/bigquery/TableTest.java | 6 +- .../com/google/gcloud/BaseWriteChannel.java | 293 ++++++++++++++ .../java/com/google/gcloud/ReadChannel.java | 57 +++ .../java/com/google/gcloud/WriteChannel.java | 48 +++ .../google/gcloud/BaseWriteChannelTest.java | 144 +++++++ .../gcloud/examples/BigQueryExample.java | 56 ++- .../gcloud/examples/StorageExample.java | 8 +- .../java/com/google/gcloud/storage/Blob.java | 10 +- .../gcloud/storage/BlobReadChannel.java | 286 +++++++++++-- .../gcloud/storage/BlobReadChannelImpl.java | 278 ------------- .../gcloud/storage/BlobWriteChannel.java | 94 +++-- .../gcloud/storage/BlobWriteChannelImpl.java | 273 ------------- .../com/google/gcloud/storage/Storage.java | 8 +- .../google/gcloud/storage/StorageImpl.java | 11 +- ...ImplTest.java => BlobReadChannelTest.java} | 37 +- .../com/google/gcloud/storage/BlobTest.java | 3 +- ...mplTest.java => BlobWriteChannelTest.java} | 46 +-- .../google/gcloud/storage/ITStorageTest.java | 33 +- .../gcloud/storage/SerializationTest.java | 18 +- .../gcloud/storage/StorageImplTest.java | 12 +- 36 files changed, 2181 insertions(+), 1132 deletions(-) create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java rename gcloud-java-storage/src/test/java/com/google/gcloud/storage/{BlobReadChannelImplTest.java => BlobReadChannelTest.java} (86%) rename gcloud-java-storage/src/test/java/com/google/gcloud/storage/{BlobWriteChannelImplTest.java => BlobWriteChannelTest.java} (85%) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index af5ced9d4230..aa516c31fb54 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -662,4 +662,12 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * @throws BigQueryException upon failure */ QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException; + + /** + * Returns a channel to write data to be inserted into a BigQuery table. Data format and other + * options can be configured using the {@link LoadConfiguration} parameter. + * + * @throws BigQueryException upon failure + */ + TableDataWriteChannel writer(LoadConfiguration loadConfiguration); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index 9bc89206889b..3a1cc658bef3 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -619,6 +619,10 @@ private static QueryResult.Builder transformQueryResults(JobId jobId, List optionMap(Option... options) { Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class); for (Option option : options) { @@ -698,8 +702,7 @@ public TableId apply(TableId tableId) { if (job instanceof LoadJobInfo) { LoadJobInfo loadJob = (LoadJobInfo) job; LoadJobInfo.Builder loadBuilder = loadJob.toBuilder(); - loadBuilder.destinationTable(setProjectId(loadJob.destinationTable())); - return loadBuilder.build(); + return loadBuilder.configuration(setProjectId(loadJob.configuration())).build(); } return job; } @@ -711,4 +714,10 @@ private QueryRequest setProjectId(QueryRequest request) { } return builder.build(); } + + private LoadConfiguration setProjectId(LoadConfiguration configuration) { + LoadConfiguration.Builder builder = configuration.toBuilder(); + builder.destinationTable(setProjectId(configuration.destinationTable())); + return builder.build(); + } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java index a3247b78d5b8..bd346a8e1633 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java @@ -95,8 +95,8 @@ public Builder destinationTable(TableId destinationTable) { /** * Sets whether the job is allowed to create new tables. * - * @see - * Jobs: Link Configuration + * @see + * Create Disposition */ public Builder createDisposition(CreateDisposition createDisposition) { this.createDisposition = createDisposition; @@ -106,8 +106,8 @@ public Builder createDisposition(CreateDisposition createDisposition) { /** * Sets the action that should occur if the destination table already exists. * - * @see - * Jobs: Link Configuration + * @see + * Write Disposition */ public Builder writeDisposition(WriteDisposition writeDisposition) { this.writeDisposition = writeDisposition; @@ -145,8 +145,8 @@ public TableId destinationTable() { /** * Returns whether the job is allowed to create new tables. * - * @see - * Jobs: Copy Configuration + * @see + * Create Disposition */ public CreateDisposition createDisposition() { return this.createDisposition; @@ -155,8 +155,8 @@ public CreateDisposition createDisposition() { /** * Returns the action that should occur if the destination table already exists. * - * @see - * Jobs: Copy Configuration + * @see + * Write Disposition */ public WriteDisposition writeDisposition() { return writeDisposition; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java new file mode 100644 index 000000000000..18cb8ae6bedb --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java @@ -0,0 +1,382 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.io.Serializable; +import java.nio.channels.SeekableByteChannel; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Configuration for a load operation. A load configuration can be used to build a + * {@link LoadJobInfo} or to load data into a table with a {@link com.google.gcloud.WriteChannel} + * ({@link BigQuery#writer(LoadConfiguration)}). + */ +public class LoadConfiguration implements Serializable { + + private static final long serialVersionUID = 470267591917413578L; + + private final TableId destinationTable; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder { + + private TableId destinationTable; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() {} + + private Builder(LoadConfiguration loadConfiguration) { + this.destinationTable = loadConfiguration.destinationTable; + this.createDisposition = loadConfiguration.createDisposition; + this.writeDisposition = loadConfiguration.writeDisposition; + this.formatOptions = loadConfiguration.formatOptions; + this.maxBadRecords = loadConfiguration.maxBadRecords; + this.schema = loadConfiguration.schema; + this.ignoreUnknownValues = loadConfiguration.ignoreUnknownValues; + this.projectionFields = loadConfiguration.projectionFields; + } + + private Builder(JobConfigurationLoad loadConfigurationPb) { + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + } + + /** + * Sets the destination table to load the data into. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV}, {@code NEWLINE_DELIMITED_JSON} and {@code DATASTORE_BACKUP}. If not + * specified, {@code CSV} format is assumed. + * + * + * Source Format + */ + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + /** + * Sets the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + /** + * Sets the schema for the destination table. The schema can be omitted if the destination table + * already exists, or if you're loading data from a Google Cloud Datastore backup (i.e. + * {@code DATASTORE_BACKUP} format option). + */ + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If {@code true}, the extra values are ignored. If {@code false}, records with extra columns + * are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + public LoadConfiguration build() { + return new LoadConfiguration(this); + } + } + + private LoadConfiguration(Builder builder) { + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + /** + * Returns the destination table to load the data into. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public CreateDisposition createDisposition() { + return this.createDisposition; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + /** + * Returns additional properties used to parse CSV data (used when {@link #format()} is set + * to CSV). Returns {@code null} if not set. + */ + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + /** + * Returns the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + public Integer maxBadRecords() { + return maxBadRecords; + } + + /** + * Returns the schema for the destination table, if set. Returns {@code null} otherwise. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the format of the data files. + */ + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If {@code true}, the extra values are ignored. If {@code true}, records with extra + * columns are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + public List projectionFields() { + return projectionFields; + } + + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields) + .toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadConfiguration + && Objects.equals(toPb(), ((LoadConfiguration) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(destinationTable, createDisposition, writeDisposition, formatOptions, + maxBadRecords, schema, ignoreUnknownValues, projectionFields); + } + + JobConfigurationLoad toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + return loadConfigurationPb; + } + + static LoadConfiguration fromPb(JobConfigurationLoad configurationPb) { + return new Builder(configurationPb).build(); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table. + */ + public static Builder builder(TableId destinationTable) { + return new Builder().destinationTable(destinationTable); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table and format. + */ + public static Builder builder(TableId destinationTable, FormatOptions format) { + return new Builder().destinationTable(destinationTable).formatOptions(format); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table. + */ + public static LoadConfiguration of(TableId destinationTable) { + return builder(destinationTable).build(); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table and format. + */ + public static LoadConfiguration of(TableId destinationTable, FormatOptions format) { + return builder(destinationTable).formatOptions(format).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java index 1120bbbacf3f..4f8d03cbc6a9 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java @@ -34,81 +34,29 @@ */ public class LoadJobInfo extends JobInfo { - private static final long serialVersionUID = 2515503817007974115L; + private static final long serialVersionUID = 6349304826867750535L; private final List sourceUris; - private final TableId destinationTable; - private final CreateDisposition createDisposition; - private final WriteDisposition writeDisposition; - private final FormatOptions formatOptions; - private final Integer maxBadRecords; - private final Schema schema; - private final Boolean ignoreUnknownValues; - private final List projectionFields; + private final LoadConfiguration configuration; public static final class Builder extends JobInfo.Builder { private List sourceUris; - private TableId destinationTable; - private CreateDisposition createDisposition; - private WriteDisposition writeDisposition; - private FormatOptions formatOptions; - private Integer maxBadRecords; - private Schema schema; - private Boolean ignoreUnknownValues; - private List projectionFields; + private LoadConfiguration configuration; private Builder() {} private Builder(LoadJobInfo jobInfo) { super(jobInfo); this.sourceUris = jobInfo.sourceUris; - this.destinationTable = jobInfo.destinationTable; - this.createDisposition = jobInfo.createDisposition; - this.writeDisposition = jobInfo.writeDisposition; - this.formatOptions = jobInfo.formatOptions; - this.maxBadRecords = jobInfo.maxBadRecords; - this.schema = jobInfo.schema; - this.ignoreUnknownValues = jobInfo.ignoreUnknownValues; - this.projectionFields = jobInfo.projectionFields; + this.configuration = jobInfo.configuration; } private Builder(Job jobPb) { super(jobPb); JobConfigurationLoad loadConfigurationPb = jobPb.getConfiguration().getLoad(); + this.configuration = LoadConfiguration.fromPb(loadConfigurationPb); this.sourceUris = loadConfigurationPb.getSourceUris(); - this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); - if (loadConfigurationPb.getCreateDisposition() != null) { - this.createDisposition = - CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); - } - if (loadConfigurationPb.getWriteDisposition() != null) { - this.writeDisposition = WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); - } - if (loadConfigurationPb.getSourceFormat() != null) { - this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); - } - if (loadConfigurationPb.getAllowJaggedRows() != null - || loadConfigurationPb.getAllowQuotedNewlines() != null - || loadConfigurationPb.getEncoding() != null - || loadConfigurationPb.getFieldDelimiter() != null - || loadConfigurationPb.getQuote() != null - || loadConfigurationPb.getSkipLeadingRows() != null) { - CsvOptions.Builder builder = CsvOptions.builder() - .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) - .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) - .encoding(loadConfigurationPb.getEncoding()) - .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) - .quote(loadConfigurationPb.getQuote()) - .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); - this.formatOptions = builder.build(); - } - this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); - if (loadConfigurationPb.getSchema() != null) { - this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); - } - this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); - this.projectionFields = loadConfigurationPb.getProjectionFields(); } /** @@ -122,88 +70,10 @@ public Builder sourceUris(List sourceUris) { } /** - * Sets the destination table to load the data into. + * Sets the configuration for the BigQuery Load Job. */ - public Builder destinationTable(TableId destinationTable) { - this.destinationTable = destinationTable; - return this; - } - - /** - * Sets whether the job is allowed to create new tables. - * - * @see - * Jobs: Load Configuration - */ - public Builder createDisposition(CreateDisposition createDisposition) { - this.createDisposition = createDisposition; - return this; - } - - /** - * Sets the action that should occur if the destination table already exists. - * - * @see - * Jobs: Load Configuration - */ - public Builder writeDisposition(WriteDisposition writeDisposition) { - this.writeDisposition = writeDisposition; - return this; - } - - /** - * Sets the source format, and possibly some parsing options, of the external data. Supported - * formats are {@code CSV}, {@code NEWLINE_DELIMITED_JSON} and {@code DATASTORE_BACKUP}. If not - * specified, {@code CSV} format is assumed. - * - * - * Source Format - */ - public Builder formatOptions(FormatOptions formatOptions) { - this.formatOptions = formatOptions; - return this; - } - - /** - * Sets the maximum number of bad records that BigQuery can ignore when running the job. If the - * number of bad records exceeds this value, an invalid error is returned in the job result. - * By default no bad record is ignored. - */ - public Builder maxBadRecords(Integer maxBadRecords) { - this.maxBadRecords = maxBadRecords; - return this; - } - - /** - * Sets the schema for the destination table. The schema can be omitted if the destination table - * already exists, or if you're loading data from Google Cloud Datastore. - */ - public Builder schema(Schema schema) { - this.schema = schema; - return this; - } - - /** - * Sets whether BigQuery should allow extra values that are not represented in the table schema. - * If {@code true}, the extra values are ignored. If {@code true}, records with extra columns - * are treated as bad records, and if there are too many bad records, an invalid error is - * returned in the job result. By default unknown values are not allowed. - */ - public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { - this.ignoreUnknownValues = ignoreUnknownValues; - return this; - } - - /** - * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. This field - * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case - * sensitive and must be top-level properties. If no properties are specified, BigQuery loads - * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid - * error is returned in the job result. - */ - public Builder projectionFields(List projectionFields) { - this.projectionFields = - projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + public Builder configuration(LoadConfiguration configuration) { + this.configuration = configuration; return this; } @@ -216,14 +86,7 @@ public LoadJobInfo build() { private LoadJobInfo(Builder builder) { super(builder); this.sourceUris = builder.sourceUris; - this.destinationTable = checkNotNull(builder.destinationTable); - this.createDisposition = builder.createDisposition; - this.writeDisposition = builder.writeDisposition; - this.formatOptions = builder.formatOptions; - this.maxBadRecords = builder.maxBadRecords; - this.schema = builder.schema; - this.ignoreUnknownValues = builder.ignoreUnknownValues; - this.projectionFields = builder.projectionFields; + this.configuration = builder.configuration; } /** @@ -236,82 +99,10 @@ public List sourceUris() { } /** - * Returns the destination table to load the data into. + * Returns the configuration for the BigQuery Load Job. */ - public TableId destinationTable() { - return destinationTable; - } - - /** - * Returns whether the job is allowed to create new tables. - * - * @see - * Jobs: Load Configuration - */ - public CreateDisposition createDisposition() { - return this.createDisposition; - } - - /** - * Returns the action that should occur if the destination table already exists. - * - * @see - * Jobs: Load Configuration - */ - public WriteDisposition writeDisposition() { - return writeDisposition; - } - - /** - * Returns additional properties used to parse CSV data (used when {@link #format()} is set - * to CSV). Returns {@code null} if not set. - */ - public CsvOptions csvOptions() { - return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; - } - - /** - * Returns the maximum number of bad records that BigQuery can ignore when running the job. If the - * number of bad records exceeds this value, an invalid error is returned in the job result. - * By default no bad record is ignored. - */ - public Integer maxBadRecords() { - return maxBadRecords; - } - - /** - * Returns the schema for the destination table, if set. Returns {@code null} otherwise. - */ - public Schema schema() { - return schema; - } - - /** - * Returns the format of the data files. - */ - public String format() { - return formatOptions != null ? formatOptions.type() : null; - } - - /** - * Returns whether BigQuery should allow extra values that are not represented in the table - * schema. If {@code true}, the extra values are ignored. If {@code true}, records with extra - * columns are treated as bad records, and if there are too many bad records, an invalid error is - * returned in the job result. By default unknown values are not allowed. - */ - public Boolean ignoreUnknownValues() { - return ignoreUnknownValues; - } - - /** - * Returns which entity properties to load into BigQuery from a Cloud Datastore backup. This field - * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case - * sensitive and must be top-level properties. If no properties are specified, BigQuery loads - * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid - * error is returned in the job result. - */ - public List projectionFields() { - return projectionFields; + public LoadConfiguration configuration() { + return configuration; } @Override @@ -321,16 +112,7 @@ public Builder toBuilder() { @Override ToStringHelper toStringHelper() { - return super.toStringHelper() - .add("destinationTable", destinationTable) - .add("sourceUris", sourceUris) - .add("createDisposition", createDisposition) - .add("writeDisposition", writeDisposition) - .add("formatOptions", formatOptions) - .add("maxBadRecords", maxBadRecords) - .add("schema", schema) - .add("ignoreUnknownValue", ignoreUnknownValues) - .add("projectionFields", projectionFields); + return super.toStringHelper().add("sourceUris", sourceUris).add("configuration", configuration); } @Override @@ -340,122 +122,61 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), sourceUris, destinationTable, createDisposition, - writeDisposition, formatOptions, maxBadRecords, schema, ignoreUnknownValues, - projectionFields); + return Objects.hash(super.hashCode(), sourceUris, configuration); } @Override Job toPb() { - JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + JobConfigurationLoad loadConfigurationPb = configuration.toPb(); loadConfigurationPb.setSourceUris(sourceUris); - loadConfigurationPb.setDestinationTable(destinationTable.toPb()); - if (createDisposition != null) { - loadConfigurationPb.setCreateDisposition(createDisposition.toString()); - } - if (writeDisposition != null) { - loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); - } - if (csvOptions() != null) { - CsvOptions csvOptions = csvOptions(); - loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) - .setAllowJaggedRows(csvOptions.allowJaggedRows()) - .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) - .setEncoding(csvOptions.encoding()) - .setQuote(csvOptions.quote()) - .setSkipLeadingRows(csvOptions.skipLeadingRows()); - } - if (schema != null) { - loadConfigurationPb.setSchema(schema.toPb()); - } - if (formatOptions != null) { - loadConfigurationPb.setSourceFormat(formatOptions.type()); - } - loadConfigurationPb.setMaxBadRecords(maxBadRecords); - loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); - loadConfigurationPb.setProjectionFields(projectionFields); return super.toPb().setConfiguration(new JobConfiguration().setLoad(loadConfigurationPb)); } /** - * Creates a builder for a BigQuery Load Job given destination table and source URI. + * Creates a builder for a BigQuery Load Job given the load configuration and source URI. */ - public static Builder builder(TableId destinationTable, String sourceUri) { - return builder(destinationTable, ImmutableList.of(checkNotNull(sourceUri))); + public static Builder builder(LoadConfiguration configuration, String sourceUri) { + return builder(configuration, ImmutableList.of(checkNotNull(sourceUri))); } /** - * Creates a builder for a BigQuery Load Job given destination table and source URIs. + * Creates a builder for a BigQuery Load Job given the load configuration and source URIs. */ - public static Builder builder(TableId destinationTable, List sourceUris) { - return new Builder().destinationTable(destinationTable).sourceUris(sourceUris); + public static Builder builder(LoadConfiguration configuration, List sourceUris) { + return new Builder().configuration(configuration).sourceUris(sourceUris); } /** - * Returns a BigQuery Load Job for the given destination table and source URI. Job's id is chosen + * Returns a BigQuery Load Job for the given load configuration and source URI. Job's id is chosen * by the service. */ - public static LoadJobInfo of(TableId destinationTable, String sourceUri) { - return builder(destinationTable, sourceUri).build(); + public static LoadJobInfo of(LoadConfiguration configuration, String sourceUri) { + return builder(configuration, sourceUri).build(); } /** - * Returns a BigQuery Load Job for the given destination table and source URIs. Job's id is chosen - * by the service. - */ - public static LoadJobInfo of(TableId destinationTable, List sourceUris) { - return builder(destinationTable, sourceUris).build(); - } - - /** - * Returns a BigQuery Load Job for the given destination table, format and source URI. Job's id is + * Returns a BigQuery Load Job for the given load configuration and source URIs. Job's id is * chosen by the service. */ - public static LoadJobInfo of(TableId destinationTable, FormatOptions format, String sourceUri) { - return builder(destinationTable, sourceUri).formatOptions(format).build(); - } - - /** - * Returns a BigQuery Load Job for the given destination table, format and source URIs. Job's id - * is chosen by the service. - */ - public static LoadJobInfo of(TableId destinationTable, FormatOptions format, - List sourceUris) { - return builder(destinationTable, sourceUris).formatOptions(format).build(); + public static LoadJobInfo of(LoadConfiguration configuration, List sourceUris) { + return builder(configuration, sourceUris).build(); } /** - * Returns a BigQuery Load Job for the given destination table and source URI. Job's id is set to + * Returns a BigQuery Load Job for the given load configuration and source URI. Job's id is set to * the provided value. */ - public static LoadJobInfo of(JobId jobId, TableId destinationTable, String sourceUri) { - return builder(destinationTable, sourceUri).jobId(jobId).build(); - } - - /** - * Returns a BigQuery Load Job for the given destination table and source URIs. Job's id is set to - * the provided value. - */ - public static LoadJobInfo of(JobId jobId, TableId destinationTable, List sourceUris) { - return builder(destinationTable, sourceUris).jobId(jobId).build(); - } - - /** - * Returns a BigQuery Load Job for the given destination table, format, and source URI. Job's id - * is set to the provided value. - */ - public static LoadJobInfo of(JobId jobId, TableId destinationTable, FormatOptions format, - String sourceUri) { - return builder(destinationTable, sourceUri).formatOptions(format).jobId(jobId).build(); + public static LoadJobInfo of(JobId jobId, LoadConfiguration configuration, String sourceUri) { + return builder(configuration, sourceUri).jobId(jobId).build(); } /** - * Returns a BigQuery Load Job for the given destination table, format and source URIs. Job's id - * is set to the provided value. + * Returns a BigQuery Load Job for the given load configuration and source URIs. Job's id is set + * to the provided value. */ - public static LoadJobInfo of(JobId jobId, TableId destinationTable, FormatOptions format, + public static LoadJobInfo of(JobId jobId, LoadConfiguration configuration, List sourceUris) { - return builder(destinationTable, sourceUris).formatOptions(format).jobId(jobId).build(); + return builder(configuration, sourceUris).jobId(jobId).build(); } @SuppressWarnings("unchecked") diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java index 5a8b822e87ef..e11e8d6aa8ad 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java @@ -197,8 +197,8 @@ public Builder userDefinedFunctions(List userDefinedFunctio /** * Sets whether the job is allowed to create tables. * - * @see - * Jobs: Query Configuration + * @see + * Create Disposition */ public Builder createDisposition(CreateDisposition createDisposition) { this.createDisposition = createDisposition; @@ -208,8 +208,8 @@ public Builder createDisposition(CreateDisposition createDisposition) { /** * Sets the action that should occur if the destination table already exists. * - * @see - * Jobs: Query Configuration + * @see + * Write Disposition */ public Builder writeDisposition(WriteDisposition writeDisposition) { this.writeDisposition = writeDisposition; @@ -319,8 +319,8 @@ public Boolean allowLargeResults() { /** * Returns whether the job is allowed to create new tables. * - * @see - * Jobs: Query Configuration + * @see + * Create Disposition */ public CreateDisposition createDisposition() { return createDisposition; @@ -399,8 +399,8 @@ public List userDefinedFunctions() { /** * Returns the action that should occur if the destination table already exists. * - * @see - * Jobs: Query Configuration + * @see + * Write Disposition */ public WriteDisposition writeDisposition() { return writeDisposition; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java index 1662f266b5ec..b4cc1df1d997 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java @@ -262,8 +262,8 @@ Job load(FormatOptions format, String sourceUri, BigQuery.JobOption... options) */ Job load(FormatOptions format, List sourceUris, BigQuery.JobOption... options) throws BigQueryException { - return new Job(bigquery, bigquery.create(LoadJobInfo.of(info.tableId(), format, sourceUris), - options)); + LoadConfiguration configuration = LoadConfiguration.of(info.tableId(), format); + return new Job(bigquery, bigquery.create(LoadJobInfo.of(configuration, sourceUris), options)); } /** diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java new file mode 100644 index 000000000000..c4cee5a9a303 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; + +import java.util.Arrays; + +/** + * WriteChannel implementation to stream data into a BigQuery table. + */ +class TableDataWriteChannel extends BaseWriteChannel { + + TableDataWriteChannel(BigQueryOptions options, LoadConfiguration loadConfiguration) { + this(options, loadConfiguration, options.rpc().open(loadConfiguration.toPb())); + } + + TableDataWriteChannel(BigQueryOptions options, LoadConfiguration config, String uploadId) { + super(options, config, uploadId); + } + + @Override + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), BigQueryImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -787362105981823738L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder + extends BaseWriteChannel.BaseState.Builder { + + private Builder(BigQueryOptions options, LoadConfiguration configuration, String uploadId) { + super(options, configuration, uploadId); + } + + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(BigQueryOptions options, LoadConfiguration config, String uploadId) { + return new Builder(options, config, uploadId); + } + + @Override + public WriteChannel restore() { + TableDataWriteChannel channel = new TableDataWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java index d53ad838b802..5f17f60f2bb5 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java @@ -19,6 +19,7 @@ import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfigurationLoad; import com.google.api.services.bigquery.model.QueryRequest; import com.google.api.services.bigquery.model.QueryResponse; import com.google.api.services.bigquery.model.Table; @@ -185,4 +186,26 @@ GetQueryResultsResponse getQueryResults(String jobId, Map options) throws BigQueryException; QueryResponse query(QueryRequest request) throws BigQueryException; + + /** + * Opens a resumable upload session to load data into a BigQuery table and returns an upload URI. + * + * @param configuration load configuration + * @throws BigQueryException upon failure + */ + String open(JobConfigurationLoad configuration) throws BigQueryException; + + /** + * Uploads the provided data to the resumable upload session at the specified position. + * + * @param uploadId the resumable upload session URI + * @param toWrite a byte array of data to upload + * @param toWriteOffset offset in the {@code toWrite} param to start writing from + * @param destOffset offset in the destination where to upload data to + * @param length the number of bytes to upload + * @param last {@code true} indicates that the last chunk is being uploaded + * @throws BigQueryException upon failure + */ + void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws BigQueryException; } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java index 04e481b345c2..74fdeb74bd64 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java @@ -20,12 +20,22 @@ import static com.google.gcloud.spi.BigQueryRpc.Option.PAGE_TOKEN; import static com.google.gcloud.spi.BigQueryRpc.Option.START_INDEX; import static com.google.gcloud.spi.BigQueryRpc.Option.TIMEOUT; +import static java.net.HttpURLConnection.HTTP_CREATED; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static java.net.HttpURLConnection.HTTP_OK; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.http.ByteArrayContent; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestFactory; import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpResponseException; import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.json.JsonHttpContent; +import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson.JacksonFactory; import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.model.Dataset; @@ -33,6 +43,8 @@ import com.google.api.services.bigquery.model.DatasetReference; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationLoad; import com.google.api.services.bigquery.model.JobList; import com.google.api.services.bigquery.model.JobStatus; import com.google.api.services.bigquery.model.QueryRequest; @@ -64,6 +76,10 @@ public class DefaultBigQueryRpc implements BigQueryRpc { public static final String DEFAULT_PROJECTION = "full"; // see: https://cloud.google.com/bigquery/troubleshooting-errors private static final Set RETRYABLE_CODES = ImmutableSet.of(500, 502, 503, 504); + private static final String BASE_RESUMABLE_URI = + "https://www.googleapis.com/upload/bigquery/v2/projects/"; + // see: https://cloud.google.com/bigquery/loading-data-post-request#resume-upload + private static final int HTTP_RESUME_INCOMPLETE = 308; private final BigQueryOptions options; private final Bigquery bigquery; @@ -417,4 +433,69 @@ public QueryResponse query(QueryRequest request) throws BigQueryException { throw translate(ex); } } + + @Override + public String open(JobConfigurationLoad configuration) throws BigQueryException { + try { + Job loadJob = new Job().setConfiguration(new JobConfiguration().setLoad(configuration)); + StringBuilder builder = new StringBuilder() + .append(BASE_RESUMABLE_URI) + .append(options.projectId()) + .append("/jobs"); + GenericUrl url = new GenericUrl(builder.toString()); + url.set("uploadType", "resumable"); + JsonFactory jsonFactory = bigquery.getJsonFactory(); + HttpRequestFactory requestFactory = bigquery.getRequestFactory(); + HttpRequest httpRequest = + requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); + httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); + HttpResponse response = httpRequest.execute(); + return response.getHeaders().getLocation(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws BigQueryException { + try { + GenericUrl url = new GenericUrl(uploadId); + HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, + new ByteArrayContent(null, toWrite, toWriteOffset, length)); + long limit = destOffset + length; + StringBuilder range = new StringBuilder("bytes "); + range.append(destOffset).append('-').append(limit - 1).append('/'); + if (last) { + range.append(limit); + } else { + range.append('*'); + } + httpRequest.getHeaders().setContentRange(range.toString()); + int code; + String message; + IOException exception = null; + try { + HttpResponse response = httpRequest.execute(); + code = response.getStatusCode(); + message = response.getStatusMessage(); + } catch (HttpResponseException ex) { + exception = ex; + code = ex.getStatusCode(); + message = ex.getStatusMessage(); + } + if (!last && code != HTTP_RESUME_INCOMPLETE + || last && !(code == HTTP_OK || code == HTTP_CREATED)) { + if (exception != null) { + throw exception; + } + GoogleJsonError error = new GoogleJsonError(); + error.setCode(code); + error.setMessage(message); + throw translate(error); + } + } catch (IOException ex) { + throw translate(ex); + } + } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java index ed54e6a94111..402edfc4a42f 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -41,6 +41,7 @@ import com.google.common.collect.Lists; import com.google.gcloud.Page; import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; import com.google.gcloud.spi.BigQueryRpc; import com.google.gcloud.spi.BigQueryRpc.Tuple; @@ -107,11 +108,11 @@ public class BigQueryImplTest { private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_SCHEMA); private static final TableInfo TABLE_INFO_WITH_PROJECT = TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_SCHEMA); - private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, "URI"); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(LoadConfiguration.of(TABLE_ID), "URI"); private static final LoadJobInfo LOAD_JOB_WITH_PROJECT = - LoadJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); + LoadJobInfo.of(LoadConfiguration.of(TABLE_ID_WITH_PROJECT), "URI"); private static final LoadJobInfo COMPLETE_LOAD_JOB = - LoadJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") + LoadJobInfo.builder(LoadConfiguration.of(TABLE_ID_WITH_PROJECT), "URI") .jobId(JobId.of(PROJECT, JOB)) .build(); private static final CopyJobInfo COPY_JOB = @@ -1006,6 +1007,18 @@ public void testGetQueryResultsWithOptions() { assertEquals("cursor", response.result().nextPageCursor()); } + @Test + public void testWriter() { + LoadConfiguration loadConfiguration = LoadConfiguration.of(TABLE_ID); + EasyMock.expect(bigqueryRpcMock.open(LoadConfiguration.of(TABLE_ID_WITH_PROJECT).toPb())) + .andReturn("upload-id"); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + WriteChannel channel = bigquery.writer(loadConfiguration); + assertNotNull(channel); + assertTrue(channel.isOpen()); + } + @Test public void testRetryableException() { EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java index fa527df5aa75..528df30d0a61 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -16,6 +16,12 @@ package com.google.gcloud.bigquery; +import static com.google.gcloud.bigquery.BigQuery.DatasetField; +import static com.google.gcloud.bigquery.BigQuery.JobField; +import static com.google.gcloud.bigquery.BigQuery.JobListOption; +import static com.google.gcloud.bigquery.BigQuery.JobOption; +import static com.google.gcloud.bigquery.BigQuery.TableField; +import static com.google.gcloud.bigquery.BigQuery.TableOption; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -27,9 +33,6 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.Page; import com.google.gcloud.bigquery.BigQuery.DatasetOption; -import com.google.gcloud.bigquery.BigQuery.JobListOption; -import com.google.gcloud.bigquery.BigQuery.JobOption; -import com.google.gcloud.bigquery.BigQuery.TableOption; import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; import com.google.gcloud.storage.BlobInfo; import com.google.gcloud.storage.BucketInfo; @@ -42,7 +45,9 @@ import org.junit.Test; import org.junit.rules.Timeout; +import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.List; @@ -147,11 +152,11 @@ public static void beforeClass() throws IOException, InterruptedException { JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); bigquery.create(info); - LoadJobInfo job = LoadJobInfo.builder(TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE) + LoadConfiguration configuration = LoadConfiguration.builder(TABLE_ID, FormatOptions.json()) .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .schema(TABLE_SCHEMA) - .formatOptions(FormatOptions.json()) .build(); + LoadJobInfo job = LoadJobInfo.of(configuration, "gs://" + BUCKET + "/" + JSON_LOAD_FILE); job = bigquery.create(job); while (job.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); @@ -188,7 +193,7 @@ public void testGetDataset() { @Test public void testGetDatasetWithSelectedFields() { DatasetInfo dataset = bigquery.getDataset(DATASET, - DatasetOption.fields(BigQuery.DatasetField.CREATION_TIME)); + DatasetOption.fields(DatasetField.CREATION_TIME)); assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); assertEquals(DATASET, dataset.datasetId().dataset()); assertNotNull(dataset.creationTime()); @@ -229,7 +234,7 @@ public void testUpdateDatasetWithSelectedFields() { assertEquals("Some Description", dataset.description()); DatasetInfo updatedDataset = bigquery.update(dataset.toBuilder().description("Updated Description").build(), - DatasetOption.fields(BigQuery.DatasetField.DESCRIPTION)); + DatasetOption.fields(DatasetField.DESCRIPTION)); assertEquals("Updated Description", updatedDataset.description()); assertNull(updatedDataset.creationTime()); assertNull(updatedDataset.defaultTableLifetime()); @@ -278,7 +283,7 @@ public void testCreateAndGetTableWithSelectedField() { assertEquals(DATASET, createdTableInfo.tableId().dataset()); assertEquals(tableName, createdTableInfo.tableId().table()); BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName, - TableOption.fields(BigQuery.TableField.CREATION_TIME)); + TableOption.fields(TableField.CREATION_TIME)); assertNotNull(remoteTableInfo); assertTrue(remoteTableInfo instanceof TableInfo); assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); @@ -438,7 +443,7 @@ public void testUpdateTableWithSelectedFields() { BaseTableInfo createdTableInfo = bigquery.create(tableInfo); assertNotNull(createdTableInfo); BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder().description("newDescr") - .build(), TableOption.fields(BigQuery.TableField.DESCRIPTION)); + .build(), TableOption.fields(TableField.DESCRIPTION)); assertTrue(updatedTableInfo instanceof TableInfo); assertEquals(DATASET, updatedTableInfo.tableId().dataset()); assertEquals(tableName, updatedTableInfo.tableId().table()); @@ -659,7 +664,7 @@ public void testListJobs() { @Test public void testListJobsWithSelectedFields() { - Page jobs = bigquery.listJobs(JobListOption.fields(BigQuery.JobField.USER_EMAIL)); + Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); for (JobInfo job : jobs.values()) { assertNotNull(job.jobId()); assertNotNull(job.status()); @@ -709,7 +714,7 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertEquals(sourceTableName, createdTableInfo.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); - CopyJobInfo createdJob = bigquery.create(job, JobOption.fields(BigQuery.JobField.ETAG)); + CopyJobInfo createdJob = bigquery.create(job, JobOption.fields(JobField.ETAG)); assertNotNull(createdJob.jobId()); assertNotNull(createdJob.sourceTables()); assertNotNull(createdJob.destinationTable()); @@ -719,7 +724,7 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertNull(createdJob.selfLink()); assertNull(createdJob.userEmail()); CopyJobInfo remoteJob = bigquery.getJob(createdJob.jobId(), - JobOption.fields(BigQuery.JobField.ETAG)); + JobOption.fields(JobField.ETAG)); assertEquals(createdJob.jobId(), remoteJob.jobId()); assertEquals(createdJob.sourceTables(), remoteJob.sourceTables()); assertEquals(createdJob.destinationTable(), remoteJob.destinationTable()); @@ -810,10 +815,11 @@ public void testQueryJob() throws InterruptedException { public void testExtractJob() throws InterruptedException { String tableName = "test_export_job_table"; TableId destinationTable = TableId.of(DATASET, tableName); - LoadJobInfo remoteLoadJob = bigquery.create( - LoadJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) - .schema(SIMPLE_SCHEMA) - .build()); + LoadConfiguration configuration = LoadConfiguration.builder(destinationTable) + .schema(SIMPLE_SCHEMA) + .build(); + LoadJobInfo remoteLoadJob = + bigquery.create(LoadJobInfo.of(configuration, "gs://" + BUCKET + "/" + LOAD_FILE)); while (remoteLoadJob.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); remoteLoadJob = bigquery.getJob(remoteLoadJob.jobId()); @@ -857,4 +863,51 @@ public void testCancelJob() throws InterruptedException { public void testCancelNonExistingJob() throws InterruptedException { assertFalse(bigquery.cancel("test_cancel_non_existing_job")); } + + @Test + public void testInsertFromFile() throws InterruptedException, FileNotFoundException { + String destinationTableName = "test_insert_from_file_table"; + TableId tableId = TableId.of(DATASET, destinationTableName); + LoadConfiguration configuration = LoadConfiguration.builder(tableId) + .formatOptions(FormatOptions.json()) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .build(); + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { + channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); + } catch (IOException e) { + fail("IOException was not expected"); + } + // wait until the new table is created. If the table is never created the test will time-out + while (bigquery.getTable(tableId) == null) { + Thread.sleep(1000L); + } + Page> rows = bigquery.listTableData(tableId); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadConfigurationTest.java new file mode 100644 index 000000000000..e72101829cdf --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadConfigurationTest.java @@ -0,0 +1,123 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class LoadConfigurationTest { + + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final LoadConfiguration LOAD_CONFIGURATION = LoadConfiguration.builder(TABLE_ID) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareLoadConfiguration(LOAD_CONFIGURATION, LOAD_CONFIGURATION.toBuilder().build()); + LoadConfiguration configuration = LOAD_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", configuration.destinationTable().table()); + configuration = configuration.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadConfiguration(LOAD_CONFIGURATION, configuration); + } + + @Test + public void testOf() { + LoadConfiguration configuration = LoadConfiguration.of(TABLE_ID); + assertEquals(TABLE_ID, configuration.destinationTable()); + configuration = LoadConfiguration.of(TABLE_ID, CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + } + + @Test + public void testToBuilderIncomplete() { + LoadConfiguration configuration = LoadConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, LOAD_CONFIGURATION.destinationTable()); + assertEquals(CREATE_DISPOSITION, LOAD_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_CONFIGURATION.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_CONFIGURATION.csvOptions()); + assertEquals(FORMAT, LOAD_CONFIGURATION.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_CONFIGURATION.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_CONFIGURATION.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_CONFIGURATION.schema()); + } + + @Test + public void testToPbAndFromPb() { + assertNull(LOAD_CONFIGURATION.toPb().getSourceUris()); + compareLoadConfiguration(LOAD_CONFIGURATION, + LoadConfiguration.fromPb(LOAD_CONFIGURATION.toPb())); + LoadConfiguration configuration = LoadConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, LoadConfiguration.fromPb(configuration.toPb())); + } + + private void compareLoadConfiguration(LoadConfiguration expected, LoadConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java index 06ce0b42ad4b..499d0d939698 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java @@ -47,7 +47,6 @@ public class LoadJobInfoTest { private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; private static final Integer MAX_BAD_RECORDS = 42; - private static final String FORMAT = "CSV"; private static final Boolean IGNORE_UNKNOWN_VALUES = true; private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); private static final JobId JOB_ID = JobId.of("job"); @@ -66,13 +65,7 @@ public class LoadJobInfoTest { .inputBytes(2048L) .outputRows(24L) .build(); - private static final LoadJobInfo LOAD_JOB = LoadJobInfo.builder(TABLE_ID, SOURCE_URIS) - .etag(ETAG) - .id(ID) - .selfLink(SELF_LINK) - .userEmail(EMAIL) - .jobId(JOB_ID) - .status(JOB_STATUS) + private static final LoadConfiguration LOAD_CONFIGURATION = LoadConfiguration.builder(TABLE_ID) .createDisposition(CREATE_DISPOSITION) .writeDisposition(WRITE_DISPOSITION) .formatOptions(CSV_OPTIONS) @@ -80,63 +73,47 @@ public class LoadJobInfoTest { .maxBadRecords(MAX_BAD_RECORDS) .projectionFields(PROJECTION_FIELDS) .schema(TABLE_SCHEMA) + .build(); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.builder(LOAD_CONFIGURATION, SOURCE_URIS) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) .statistics(JOB_STATISTICS) .build(); @Test public void testToBuilder() { compareLoadJobInfo(LOAD_JOB, LOAD_JOB.toBuilder().build()); - LoadJobInfo job = LOAD_JOB.toBuilder() - .destinationTable(TableId.of("dataset", "newTable")) - .build(); - assertEquals("newTable", job.destinationTable().table()); - job = job.toBuilder().destinationTable(TABLE_ID).build(); + LoadJobInfo job = LOAD_JOB.toBuilder().etag("newEtag").build(); + assertEquals("newEtag", job.etag()); + job = job.toBuilder().etag(ETAG).build(); compareLoadJobInfo(LOAD_JOB, job); } @Test public void testOf() { - LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); - assertEquals(TABLE_ID, job.destinationTable()); - assertEquals(SOURCE_URIS, job.sourceUris()); - job = LoadJobInfo.of(TABLE_ID, SOURCE_URI); - assertEquals(TABLE_ID, job.destinationTable()); - assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); - job = LoadJobInfo.of(TABLE_ID, CSV_OPTIONS, SOURCE_URIS); - assertEquals(TABLE_ID, job.destinationTable()); + LoadJobInfo job = LoadJobInfo.of(LOAD_CONFIGURATION, SOURCE_URIS); + assertEquals(LOAD_CONFIGURATION, job.configuration()); assertEquals(SOURCE_URIS, job.sourceUris()); - assertEquals(FORMAT, job.format()); - assertEquals(CSV_OPTIONS, job.csvOptions()); - job = LoadJobInfo.of(TABLE_ID, CSV_OPTIONS, SOURCE_URI); - assertEquals(TABLE_ID, job.destinationTable()); - assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); - assertEquals(FORMAT, job.format()); - assertEquals(CSV_OPTIONS, job.csvOptions()); - job = LoadJobInfo.of(JOB_ID, TABLE_ID, SOURCE_URIS); - assertEquals(JOB_ID, job.jobId()); - assertEquals(TABLE_ID, job.destinationTable()); - assertEquals(SOURCE_URIS, job.sourceUris()); - job = LoadJobInfo.of(JOB_ID, TABLE_ID, SOURCE_URI); - assertEquals(JOB_ID, job.jobId()); - assertEquals(TABLE_ID, job.destinationTable()); + job = LoadJobInfo.of(LOAD_CONFIGURATION, SOURCE_URI); + assertEquals(LOAD_CONFIGURATION, job.configuration()); assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); - job = LoadJobInfo.of(JOB_ID, TABLE_ID, CSV_OPTIONS, SOURCE_URIS); + job = LoadJobInfo.of(JOB_ID, LOAD_CONFIGURATION, SOURCE_URIS); assertEquals(JOB_ID, job.jobId()); - assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(LOAD_CONFIGURATION, job.configuration()); assertEquals(SOURCE_URIS, job.sourceUris()); - assertEquals(FORMAT, job.format()); - assertEquals(CSV_OPTIONS, job.csvOptions()); - job = LoadJobInfo.of(JOB_ID, TABLE_ID, CSV_OPTIONS, SOURCE_URI); + job = LoadJobInfo.of(JOB_ID, LOAD_CONFIGURATION, SOURCE_URI); assertEquals(JOB_ID, job.jobId()); - assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(LOAD_CONFIGURATION, job.configuration()); assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); - assertEquals(FORMAT, job.format()); - assertEquals(CSV_OPTIONS, job.csvOptions()); } @Test public void testToBuilderIncomplete() { - LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + LoadJobInfo job = LoadJobInfo.of(LOAD_CONFIGURATION, SOURCE_URIS); compareLoadJobInfo(job, job.toBuilder().build()); } @@ -148,16 +125,8 @@ public void testBuilder() { assertEquals(EMAIL, LOAD_JOB.userEmail()); assertEquals(JOB_ID, LOAD_JOB.jobId()); assertEquals(JOB_STATUS, LOAD_JOB.status()); - assertEquals(TABLE_ID, LOAD_JOB.destinationTable()); + assertEquals(LOAD_CONFIGURATION, LOAD_JOB.configuration()); assertEquals(SOURCE_URIS, LOAD_JOB.sourceUris()); - assertEquals(CREATE_DISPOSITION, LOAD_JOB.createDisposition()); - assertEquals(WRITE_DISPOSITION, LOAD_JOB.writeDisposition()); - assertEquals(CSV_OPTIONS, LOAD_JOB.csvOptions()); - assertEquals(FORMAT, LOAD_JOB.format()); - assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_JOB.ignoreUnknownValues()); - assertEquals(MAX_BAD_RECORDS, LOAD_JOB.maxBadRecords()); - assertEquals(PROJECTION_FIELDS, LOAD_JOB.projectionFields()); - assertEquals(TABLE_SCHEMA, LOAD_JOB.schema()); assertEquals(JOB_STATISTICS, LOAD_JOB.statistics()); } @@ -170,7 +139,7 @@ public void testToPbAndFromPb() { assertEquals(JOB_STATISTICS, JobStatistics.fromPb(LOAD_JOB.toPb().getStatistics())); compareLoadJobInfo(LOAD_JOB, LoadJobInfo.fromPb(LOAD_JOB.toPb())); compareLoadJobInfo(LOAD_JOB, (LoadJobInfo) JobInfo.fromPb(LOAD_JOB.toPb())); - LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + LoadJobInfo job = LoadJobInfo.of(LOAD_CONFIGURATION, SOURCE_URIS); compareLoadJobInfo(job, LoadJobInfo.fromPb(job.toPb())); compareLoadJobInfo(job, (LoadJobInfo) JobInfo.fromPb(job.toPb())); } @@ -186,15 +155,7 @@ private void compareLoadJobInfo(LoadJobInfo expected, LoadJobInfo value) { assertEquals(expected.status(), value.status()); assertEquals(expected.statistics(), value.statistics()); assertEquals(expected.userEmail(), value.userEmail()); - assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.configuration(), value.configuration()); assertEquals(expected.sourceUris(), value.sourceUris()); - assertEquals(expected.createDisposition(), value.createDisposition()); - assertEquals(expected.writeDisposition(), value.writeDisposition()); - assertEquals(expected.csvOptions(), value.csvOptions()); - assertEquals(expected.format(), value.format()); - assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); - assertEquals(expected.maxBadRecords(), value.maxBadRecords()); - assertEquals(expected.projectionFields(), value.projectionFields()); - assertEquals(expected.schema(), value.schema()); } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java index 8c80bddbfefb..d407ac1630e3 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java @@ -22,7 +22,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; import com.google.gcloud.bigquery.TableInfo.StreamingBuffer; import org.junit.Test; @@ -99,9 +101,9 @@ public class SerializationTest { private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); private static final ExternalDataConfiguration EXTERNAL_DATA_CONFIGURATION = ExternalDataConfiguration.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) - .ignoreUnknownValues(true) - .maxBadRecords(42) - .build(); + .ignoreUnknownValues(true) + .maxBadRecords(42) + .build(); private static final UserDefinedFunction INLINE_FUNCTION = new UserDefinedFunction.InlineFunction("inline"); private static final UserDefinedFunction URI_FUNCTION = @@ -130,10 +132,10 @@ public class SerializationTest { .id(ID) .build(); private static final JobStatistics JOB_STATISTICS = JobStatistics.builder() - .creationTime(1L) - .endTime(3L) - .startTime(2L) - .build(); + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); private static final JobStatistics.ExtractStatistics EXTRACT_STATISTICS = JobStatistics.ExtractStatistics.builder() .creationTime(1L) @@ -168,7 +170,15 @@ public class SerializationTest { private static final JobId JOB_ID = JobId.of("project", "job"); private static final CopyJobInfo COPY_JOB = CopyJobInfo.of(TABLE_ID, TABLE_ID); private static final ExtractJobInfo EXTRACT_JOB = ExtractJobInfo.of(TABLE_ID, SOURCE_URIS); - private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + private static final LoadConfiguration LOAD_CONFIGURATION = LoadConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .schema(TABLE_SCHEMA) + .build(); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(LOAD_CONFIGURATION, SOURCE_URIS); private static final QueryJobInfo QUERY_JOB = QueryJobInfo.of("query"); private static final Map CONTENT1 = ImmutableMap.of("key", "val1"); @@ -231,8 +241,8 @@ public void testModelAndRequests() throws Exception { DATASET_INFO, TABLE_ID, CSV_OPTIONS, STREAMING_BUFFER, EXTERNAL_DATA_CONFIGURATION, TABLE_SCHEMA, TABLE_INFO, VIEW_INFO, EXTERNAL_TABLE_INFO, INLINE_FUNCTION, URI_FUNCTION, JOB_STATISTICS, EXTRACT_STATISTICS, LOAD_STATISTICS, QUERY_STATISTICS, BIGQUERY_ERROR, - JOB_STATUS, JOB_ID, COPY_JOB, EXTRACT_JOB, LOAD_JOB, QUERY_JOB, INSERT_ALL_REQUEST, - INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, + JOB_STATUS, JOB_ID, COPY_JOB, EXTRACT_JOB, LOAD_CONFIGURATION, LOAD_JOB, QUERY_JOB, + INSERT_ALL_REQUEST, INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, BigQuery.DatasetOption.fields(), BigQuery.DatasetDeleteOption.deleteContents(), BigQuery.DatasetListOption.all(), BigQuery.TableOption.fields(), BigQuery.TableListOption.maxResults(42L), BigQuery.JobOption.fields(), @@ -246,8 +256,25 @@ public void testModelAndRequests() throws Exception { } } + @Test + public void testWriteChannelState() throws IOException, ClassNotFoundException { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .build(); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + TableDataWriteChannel writer = + new TableDataWriteChannel(options, LOAD_CONFIGURATION, "upload-id"); + RestorableState state = writer.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); + assertEquals(state, deserializedState); + assertEquals(state.hashCode(), deserializedState.hashCode()); + assertEquals(state.toString(), deserializedState.toString()); + } + @SuppressWarnings("unchecked") - private T serializeAndDeserialize(T obj) + private T serializeAndDeserialize(T obj) throws IOException, ClassNotFoundException { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java new file mode 100644 index 000000000000..67933407e377 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java @@ -0,0 +1,248 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.captureLong; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.expectLastCall; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.gcloud.RestorableState; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.CaptureType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +public class TableDataWriteChannelTest { + + private static final String UPLOAD_ID = "uploadid"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final LoadConfiguration LOAD_CONFIGURATION = LoadConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(FormatOptions.json()) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .build(); + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private TableDataWriteChannel writer; + + @Before + public void setUp() { + rpcFactoryMock = createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = createMock(BigQueryRpc.class); + expect(rpcFactoryMock.create(anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId("projectid") + .serviceRpcFactory(rpcFactoryMock) + .build(); + } + + @After + public void tearDown() throws Exception { + verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testCreate() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + } + + @Test + public void testWriteWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); + } + + @Test + public void testWriteWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(CUSTOM_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.chunkSize(CUSTOM_CHUNK_SIZE); + ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); + assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); + assertArrayEquals(buffer.array(), capturedBuffer.getValue()); + } + + @Test + public void testWritesAndFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(DEFAULT_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + for (int i = 0; i < buffers.length; i++) { + assertArrayEquals( + buffers[i].array(), + Arrays.copyOfRange( + capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1))); + } + } + + @Test + public void testCloseWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.close(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertTrue(!writer.isOpen()); + } + + @Test + public void testCloseWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), + eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.write(buffer); + writer.close(); + assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length); + assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE)); + assertTrue(!writer.isOpen()); + } + + @Test + public void testWriteClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + try { + writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); + fail("Expected TableDataWriteChannel write to throw IOException"); + } catch (IOException ex) { + // expected + } + } + + @Test + public void testSaveAndRestore() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); + Capture capturedPosition = Capture.newInstance(CaptureType.ALL); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), + captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false)); + expectLastCall().times(2); + replay(bigqueryRpcMock); + ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); + ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); + assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); + assertEquals(new Long(0L), capturedPosition.getValues().get(0)); + RestorableState writerState = writer.capture(); + WriteChannel restoredWriter = writerState.restore(); + assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); + assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); + assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); + } + + @Test + public void testSaveAndRestoreClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + RestorableState writerState = writer.capture(); + RestorableState expectedWriterState = + TableDataWriteChannel.StateImpl.builder(options, LOAD_CONFIGURATION, UPLOAD_ID) + .buffer(null) + .chunkSize(DEFAULT_CHUNK_SIZE) + .isOpen(false) + .position(0) + .build(); + WriteChannel restoredWriter = writerState.restore(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertEquals(expectedWriterState, restoredWriter.capture()); + } + + @Test + public void testStateEquals() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID).times(2); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + WriteChannel writer2 = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + RestorableState state = writer.capture(); + RestorableState state2 = writer2.capture(); + assertEquals(state, state2); + assertEquals(state.hashCode(), state2.hashCode()); + assertEquals(state.toString(), state2.toString()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java index dfcf17c90ab3..c931d768def1 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java @@ -48,9 +48,9 @@ public class TableTest { private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); private static final JobInfo COPY_JOB_INFO = CopyJobInfo.of(TABLE_ID2, TABLE_ID1); - private static final JobInfo LOAD_JOB_INFO = - LoadJobInfo.builder(TABLE_ID1, ImmutableList.of("URI")) - .formatOptions(FormatOptions.json()) + private static final JobInfo LOAD_JOB_INFO = LoadJobInfo.builder( + LoadConfiguration.builder(TABLE_ID1).formatOptions(FormatOptions.json()).build(), + ImmutableList.of("URI")) .build(); private static final JobInfo EXTRACT_JOB_INFO = ExtractJobInfo.builder(TABLE_ID1, ImmutableList.of("URI")) diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java new file mode 100644 index 000000000000..e05383a65826 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java @@ -0,0 +1,293 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Objects; + +/** + * Base implementation for a {@link WriteChannel}. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional configuration + */ +public abstract class BaseWriteChannel< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> implements WriteChannel { + + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + + private final ServiceOptionsT options; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer = new byte[0]; + private int limit; + private boolean isOpen = true; + private int chunkSize = defaultChunkSize(); + + protected int minChunkSize() { + return MIN_CHUNK_SIZE; + } + + protected int defaultChunkSize() { + return DEFAULT_CHUNK_SIZE; + } + + /** + * Writes {@code length} bytes of {@link #buffer()} to the {@link #uploadId()} URL. + * + * @param length the number of bytes to write from {@link #buffer()} + * @param last if {@code true} the resumable session is closed + */ + protected abstract void flushBuffer(int length, boolean last); + + protected ServiceOptionsT options() { + return options; + } + + protected EntityT entity() { + return entity; + } + + protected String uploadId() { + return uploadId; + } + + protected int position() { + return position; + } + + protected byte[] buffer() { + return buffer; + } + + protected int limit() { + return limit; + } + + protected int chunkSize() { + return chunkSize; + } + + @Override + public final void chunkSize(int chunkSize) { + chunkSize = (chunkSize / minChunkSize()) * minChunkSize(); + this.chunkSize = Math.max(minChunkSize(), chunkSize); + } + + protected BaseWriteChannel(ServiceOptionsT options, EntityT entity, String uploadId) { + this.options = options; + this.entity = entity; + this.uploadId = uploadId; + } + + private void flush() { + if (limit >= chunkSize) { + final int length = limit - limit % minChunkSize(); + flushBuffer(length, false); + position += length; + limit -= length; + byte[] temp = new byte[chunkSize]; + System.arraycopy(buffer, length, temp, 0, limit); + buffer = temp; + } + } + + private void validateOpen() throws IOException { + if (!isOpen) { + throw new IOException("stream is closed"); + } + } + + @Override + public final int write(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + int toWrite = byteBuffer.remaining(); + int spaceInBuffer = buffer.length - limit; + if (spaceInBuffer >= toWrite) { + byteBuffer.get(buffer, limit, toWrite); + } else { + buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); + byteBuffer.get(buffer, limit, toWrite); + } + limit += toWrite; + flush(); + return toWrite; + } + + @Override + public boolean isOpen() { + return isOpen; + } + + @Override + public final void close() throws IOException { + if (isOpen) { + flushBuffer(limit, true); + position += buffer.length; + isOpen = false; + buffer = null; + } + } + + /** + * Creates a {@link BaseState.Builder} for the current write channel. + */ + protected abstract BaseState.Builder stateBuilder(); + + @Override + public RestorableState capture() { + byte[] bufferToSave = null; + if (isOpen) { + flush(); + bufferToSave = Arrays.copyOf(buffer, limit); + } + return stateBuilder() + .position(position) + .buffer(bufferToSave) + .isOpen(isOpen) + .chunkSize(chunkSize) + .build(); + } + + /** + * Restores the state of the current write channel given a {@link BaseState} object. + */ + protected void restore(BaseState state) { + if (state.buffer != null) { + this.buffer = state.buffer.clone(); + this.limit = state.buffer.length; + } + this.position = state.position; + this.isOpen = state.isOpen; + this.chunkSize = state.chunkSize; + } + + protected abstract static class BaseState< + ServiceOptionsT extends ServiceOptions, EntityT extends Serializable> + implements RestorableState, Serializable { + + private static final long serialVersionUID = 8541062465055125619L; + + protected final ServiceOptionsT serviceOptions; + protected final EntityT entity; + protected final String uploadId; + protected final int position; + protected final byte[] buffer; + protected final boolean isOpen; + protected final int chunkSize; + + protected BaseState(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.entity = builder.entity; + this.uploadId = builder.uploadId; + this.position = builder.position; + this.buffer = builder.buffer; + this.isOpen = builder.isOpen; + this.chunkSize = builder.chunkSize; + } + + /** + * Base builder for a write channel's state. Users are not supposed to access this class + * directly. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional + * configuration + */ + public abstract static class Builder< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> { + private final ServiceOptionsT serviceOptions; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer; + private boolean isOpen; + private int chunkSize; + + protected Builder(ServiceOptionsT options, EntityT entity, String uploadId) { + this.serviceOptions = options; + this.entity = entity; + this.uploadId = uploadId; + } + + public Builder position(int position) { + this.position = position; + return this; + } + + public Builder buffer(byte[] buffer) { + this.buffer = buffer; + return this; + } + + public Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + public Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + public abstract RestorableState build(); + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, entity, uploadId, position, isOpen, chunkSize, + Arrays.hashCode(buffer)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof BaseState)) { + return false; + } + final BaseState other = (BaseState) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.entity, other.entity) + && Objects.equals(this.uploadId, other.uploadId) + && Objects.deepEquals(this.buffer, other.buffer) + && this.position == other.position + && this.isOpen == other.isOpen + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("entity", entity) + .add("uploadId", uploadId) + .add("position", position) + .add("isOpen", isOpen) + .toString(); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java new file mode 100644 index 000000000000..7537c5a8ce0b --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java @@ -0,0 +1,57 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.channels.ReadableByteChannel; + +/** + * A channel for reading data from a Google Cloud object. + * + *

Implementations of this class may buffer data internally to reduce remote calls. This + * interface implements {@link Restorable} to allow saving the reader's state to continue reading + * afterwards. + *

+ */ +public interface ReadChannel extends ReadableByteChannel, Closeable, Restorable { + + /** + * Overridden to remove IOException. + * + * @see java.nio.channels.Channel#close() + */ + @Override + void close(); + + void seek(int position) throws IOException; + + /** + * Sets the minimum size that will be read by a single RPC. + * Read data will be locally buffered until consumed. + */ + void chunkSize(int chunkSize); + + /** + * Captures the read channel state so that it can be saved and restored afterwards. + * + * @return a {@link RestorableState} object that contains the read channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java new file mode 100644 index 000000000000..e6f06e23dc04 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java @@ -0,0 +1,48 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.nio.channels.WritableByteChannel; + +/** + * A channel for writing data to Google Cloud services. + * + *

Implementations of this class may further buffer data internally to reduce remote calls. + * Written data will only be visible after calling {@link #close()}. This interface implements + * {@link Restorable} to allow saving the writer's state to continue writing afterwards. + *

+ */ +public interface WriteChannel extends WritableByteChannel, Closeable, Restorable { + + /** + * Sets the minimum size that will be written by a single RPC. + * Written data will be buffered and only flushed upon reaching this size or closing the channel. + */ + void chunkSize(int chunkSize); + + /** + * Captures the write channel state so that it can be saved and restored afterwards. The original + * {@code WriteChannel} and the restored one should not both be used. Closing one channel + * causes the other channel to close; subsequent writes will fail. + * + * @return a {@link RestorableState} object that contains the write channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java new file mode 100644 index 000000000000..e49a17b019e0 --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java @@ -0,0 +1,144 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.gcloud.spi.ServiceRpcFactory; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +public class BaseWriteChannelTest { + + private abstract static class CustomService implements Service {} + private abstract static class CustomServiceOptions + extends ServiceOptions { + + private static final long serialVersionUID = 3302358029307467197L; + + protected CustomServiceOptions( + Class> serviceFactoryClass, + Class> rpcFactoryClass, + Builder builder) { + super(serviceFactoryClass, rpcFactoryClass, builder); + } + } + + private static final Serializable ENTITY = 42L; + private static final String UPLOAD_ID = "uploadId"; + private static final byte[] CONTENT = {0xD, 0xE, 0xA, 0xD}; + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + private static BaseWriteChannel channel; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + channel = new BaseWriteChannel(null, ENTITY, UPLOAD_ID) { + @Override + public RestorableState capture() { + return null; + } + + @Override + protected void flushBuffer(int length, boolean last) {} + + @Override + protected BaseState.Builder stateBuilder() { + return null; + } + }; + } + + @Test + public void testConstructor() throws IOException { + assertEquals(null, channel.options()); + assertEquals(ENTITY, channel.entity()); + assertEquals(0, channel.position()); + assertEquals(UPLOAD_ID, channel.uploadId()); + assertEquals(0, channel.limit()); + assertTrue(channel.isOpen()); + assertArrayEquals(new byte[0], channel.buffer()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testClose() throws IOException { + channel.close(); + assertFalse(channel.isOpen()); + assertNull(channel.buffer()); + } + + @Test + public void testValidateOpen() throws IOException { + channel.close(); + thrown.expect(IOException.class); + thrown.expectMessage("stream is closed"); + channel.write(ByteBuffer.allocate(42)); + } + + @Test + public void testChunkSize() throws IOException { + channel.chunkSize(42); + assertEquals(MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(2 * MIN_CHUNK_SIZE); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(512 * 1025); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testWrite() throws IOException { + channel.write(ByteBuffer.wrap(CONTENT)); + assertEquals(CONTENT.length, channel.limit()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.buffer().length); + assertArrayEquals(Arrays.copyOf(CONTENT, DEFAULT_CHUNK_SIZE), channel.buffer()); + } + + @Test + public void testWriteAndFlush() throws IOException { + ByteBuffer content = randomBuffer(DEFAULT_CHUNK_SIZE + 1); + channel.write(content); + assertEquals(DEFAULT_CHUNK_SIZE, channel.position()); + assertEquals(1, channel.limit()); + byte[] newContent = new byte[DEFAULT_CHUNK_SIZE]; + newContent[0] = content.get(DEFAULT_CHUNK_SIZE); + assertArrayEquals(newContent, channel.buffer()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java index 1754be4df7dc..2f8a768f3669 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java @@ -17,6 +17,7 @@ package com.google.gcloud.examples; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.WriteChannel; import com.google.gcloud.bigquery.BaseTableInfo; import com.google.gcloud.bigquery.BigQuery; import com.google.gcloud.bigquery.BigQueryError; @@ -33,6 +34,7 @@ import com.google.gcloud.bigquery.JobId; import com.google.gcloud.bigquery.JobInfo; import com.google.gcloud.bigquery.JobStatus; +import com.google.gcloud.bigquery.LoadConfiguration; import com.google.gcloud.bigquery.LoadJobInfo; import com.google.gcloud.bigquery.QueryRequest; import com.google.gcloud.bigquery.QueryResponse; @@ -42,6 +44,9 @@ import com.google.gcloud.bigquery.ViewInfo; import com.google.gcloud.spi.BigQueryRpc.Tuple; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Paths; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; @@ -77,7 +82,8 @@ * copy | * load
+ | * extract
+ | - * query "} + * query | + * load-file
"} * * * @@ -523,7 +529,7 @@ void run(BigQuery bigquery, JobInfo job) throws Exception { startedJob = bigquery.getJob(startedJob.jobId()); } if (startedJob.status().error() == null) { - System.out.println("Job " + startedJob.jobId().job() + " suceeded"); + System.out.println("Job " + startedJob.jobId().job() + " succeeded"); } else { System.out.println("Job " + startedJob.jobId().job() + " failed"); System.out.println("Error: " + startedJob.status().error()); @@ -544,8 +550,8 @@ LoadJobInfo parse(String... args) throws Exception { String table = args[1]; String format = args[2]; TableId tableId = TableId.of(dataset, table); - return LoadJobInfo.builder(tableId, Arrays.asList(args).subList(3, args.length)) - .formatOptions(FormatOptions.of(format)) + LoadConfiguration configuration = LoadConfiguration.of(tableId, FormatOptions.of(format)); + return LoadJobInfo.builder(configuration, Arrays.asList(args).subList(3, args.length)) .build(); } throw new IllegalArgumentException("Missing required arguments."); @@ -659,6 +665,47 @@ protected String params() { } } + /** + * This class demonstrates how to load data into a BigQuery Table from a local file. + * + * @see Resumable + * Upload + */ + private static class LoadFileAction extends BigQueryAction> { + @Override + void run(BigQuery bigquery, Tuple configuration) throws Exception { + System.out.println("Running insert"); + try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { + ByteBuffer buffer = ByteBuffer.allocate(256 * 1024); + WriteChannel writeChannel = bigquery.writer(configuration.x()); + while (fileChannel.read(buffer) > 0) { + buffer.flip(); + writeChannel.write(buffer); + buffer.clear(); + } + writeChannel.close(); + } + } + + @Override + Tuple parse(String... args) throws Exception { + if (args.length == 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + LoadConfiguration configuration = LoadConfiguration.of(tableId, FormatOptions.of(format)); + return Tuple.of(configuration, args[3]); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
"; + } + } + static { CREATE_ACTIONS.put("dataset", new CreateDatasetAction()); CREATE_ACTIONS.put("table", new CreateSimpleTableAction()); @@ -682,6 +729,7 @@ protected String params() { ACTIONS.put("extract", new ExtractAction()); ACTIONS.put("copy", new CopyAction()); ACTIONS.put("query", new QueryAction()); + ACTIONS.put("load-file", new LoadFileAction()); } private static void printUsage() { diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java index dc3dba6c72ab..e3bee626f49c 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java @@ -18,12 +18,12 @@ import com.google.gcloud.AuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BlobReadChannel; -import com.google.gcloud.storage.BlobWriteChannel; import com.google.gcloud.storage.Bucket; import com.google.gcloud.storage.BucketInfo; import com.google.gcloud.storage.CopyWriter; @@ -258,7 +258,7 @@ private void run(Storage storage, Path uploadFrom, BlobInfo blobInfo) throws IOE // When content is not available or large (1MB or more) it is recommended // to write it in chunks via the blob's channel writer. Blob blob = new Blob(storage, blobInfo); - try (BlobWriteChannel writer = blob.writer()) { + try (WriteChannel writer = blob.writer()) { byte[] buffer = new byte[1024]; try (InputStream input = Files.newInputStream(uploadFrom)) { int limit; @@ -326,7 +326,7 @@ private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOExcep writeTo.write(content); } else { // When Blob size is big or unknown use the blob's channel reader. - try (BlobReadChannel reader = blob.reader()) { + try (ReadChannel reader = blob.reader()) { WritableByteChannel channel = Channels.newChannel(writeTo); ByteBuffer bytes = ByteBuffer.allocate(64 * 1024); while (reader.read(bytes) > 0) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index 5b305d15cee4..fe65f6ee010b 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -24,6 +24,8 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; @@ -321,24 +323,24 @@ public CopyWriter copyTo(String targetBucket, String targetBlob, BlobSourceOptio } /** - * Returns a {@code BlobReadChannel} object for reading this blob's content. + * Returns a {@code ReadChannel} object for reading this blob's content. * * @param options blob read options * @throws StorageException upon failure */ - public BlobReadChannel reader(BlobSourceOption... options) { + public ReadChannel reader(BlobSourceOption... options) { return storage.reader(info.blobId(), toSourceOptions(info, options)); } /** - * Returns a {@code BlobWriteChannel} object for writing to this blob. By default any md5 and + * Returns a {@code WriteChannel} object for writing to this blob. By default any md5 and * crc32c values in the current blob are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * * @param options target blob options * @throws StorageException upon failure */ - public BlobWriteChannel writer(BlobWriteOption... options) { + public WriteChannel writer(BlobWriteOption... options) { return storage.writer(info, options); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index 106d18466dac..984f5d1f72e9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -16,46 +16,264 @@ package com.google.gcloud.storage; -import com.google.gcloud.Restorable; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.storage.model.StorageObject; +import com.google.common.base.MoreObjects; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.Tuple; -import java.io.Closeable; import java.io.IOException; -import java.nio.channels.ReadableByteChannel; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; /** - * A channel for reading data from a Google Cloud Storage object. - * - *

Implementations of this class may buffer data internally to reduce remote calls. This - * interface implements {@link Restorable} to allow saving the reader's state to continue reading - * afterwards. - *

+ * Default implementation for ReadChannel. */ -public interface BlobReadChannel extends ReadableByteChannel, Closeable, - Restorable { - - /** - * Overridden to remove IOException. - * - * @see java.nio.channels.Channel#close() - */ +class BlobReadChannel implements ReadChannel { + + private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize = DEFAULT_CHUNK_SIZE; + + private final StorageRpc storageRpc; + private final StorageObject storageObject; + private int bufferPos; + private byte[] buffer; + + BlobReadChannel(StorageOptions serviceOptions, BlobId blob, + Map requestOptions) { + this.serviceOptions = serviceOptions; + this.blob = blob; + this.requestOptions = requestOptions; + isOpen = true; + storageRpc = serviceOptions.rpc(); + storageObject = blob.toPb(); + } + + @Override + public RestorableState capture() { + StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) + .position(position) + .isOpen(isOpen) + .endOfStream(endOfStream) + .chunkSize(chunkSize); + if (buffer != null) { + builder.position(position + bufferPos); + builder.endOfStream(false); + } + return builder.build(); + } + @Override - void close(); - - void seek(int position) throws IOException; - - /** - * Sets the minimum size that will be read by a single RPC. - * Read data will be locally buffered until consumed. - */ - void chunkSize(int chunkSize); - - /** - * Captures the read channel state so that it can be saved and restored afterwards. - * - * @return a {@link RestorableState} object that contains the read channel state and can restore - * it afterwards. - */ + public boolean isOpen() { + return isOpen; + } + + @Override + public void close() { + if (isOpen) { + buffer = null; + isOpen = false; + } + } + + private void validateOpen() throws IOException { + if (!isOpen) { + throw new IOException("stream is closed"); + } + } + + @Override + public void seek(int position) throws IOException { + validateOpen(); + this.position = position; + buffer = null; + bufferPos = 0; + endOfStream = false; + } + + @Override + public void chunkSize(int chunkSize) { + this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; + } + @Override - RestorableState capture(); + public int read(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + if (buffer == null) { + if (endOfStream) { + return -1; + } + final int toRead = Math.max(byteBuffer.remaining(), chunkSize); + try { + Tuple result = runWithRetries(new Callable>() { + @Override + public Tuple call() { + return storageRpc.read(storageObject, requestOptions, position, toRead); + } + }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); + if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); + throw new StorageException(0, messageBuilder.toString(), false); + } + lastEtag = result.x(); + buffer = result.y(); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + if (toRead > buffer.length) { + endOfStream = true; + if (buffer.length == 0) { + buffer = null; + return -1; + } + } + } + int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); + byteBuffer.put(buffer, bufferPos, toWrite); + bufferPos += toWrite; + if (bufferPos >= buffer.length) { + position += buffer.length; + buffer = null; + bufferPos = 0; + } + return toWrite; + } + + static class StateImpl implements RestorableState, Serializable { + + private static final long serialVersionUID = 3889420316004453706L; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private final String lastEtag; + private final int position; + private final boolean isOpen; + private final boolean endOfStream; + private final int chunkSize; + + StateImpl(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.blob = builder.blob; + this.requestOptions = builder.requestOptions; + this.lastEtag = builder.lastEtag; + this.position = builder.position; + this.isOpen = builder.isOpen; + this.endOfStream = builder.endOfStream; + this.chunkSize = builder.chunkSize; + } + + static class Builder { + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize; + + private Builder(StorageOptions options, BlobId blob, Map reqOptions) { + this.serviceOptions = options; + this.blob = blob; + this.requestOptions = reqOptions; + } + + Builder lastEtag(String lastEtag) { + this.lastEtag = lastEtag; + return this; + } + + Builder position(int position) { + this.position = position; + return this; + } + + Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + Builder endOfStream(boolean endOfStream) { + this.endOfStream = endOfStream; + return this; + } + + Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder( + StorageOptions options, BlobId blob, Map reqOptions) { + return new Builder(options, blob, reqOptions); + } + + @Override + public ReadChannel restore() { + BlobReadChannel channel = new BlobReadChannel(serviceOptions, blob, requestOptions); + channel.lastEtag = lastEtag; + channel.position = position; + channel.isOpen = isOpen; + channel.endOfStream = endOfStream; + channel.chunkSize = chunkSize; + return channel; + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, blob, requestOptions, lastEtag, position, isOpen, + endOfStream, chunkSize); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof StateImpl)) { + return false; + } + final StateImpl other = (StateImpl) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.blob, other.blob) + && Objects.equals(this.requestOptions, other.requestOptions) + && Objects.equals(this.lastEtag, other.lastEtag) + && this.position == other.position + && this.isOpen == other.isOpen + && this.endOfStream == other.endOfStream + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("blob", blob) + .add("position", position) + .add("isOpen", isOpen) + .add("endOfStream", endOfStream) + .toString(); + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java deleted file mode 100644 index 8fe6eae66d8f..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.Callable; - -/** - * Default implementation for BlobReadChannel. - */ -class BlobReadChannelImpl implements BlobReadChannel { - - private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private String lastEtag; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - private int bufferPos; - private byte[] buffer; - - BlobReadChannelImpl(StorageOptions serviceOptions, BlobId blob, - Map requestOptions) { - this.serviceOptions = serviceOptions; - this.blob = blob; - this.requestOptions = requestOptions; - isOpen = true; - storageRpc = serviceOptions.rpc(); - storageObject = blob.toPb(); - } - - @Override - public RestorableState capture() { - StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) - .position(position) - .isOpen(isOpen) - .endOfStream(endOfStream) - .chunkSize(chunkSize); - if (buffer != null) { - builder.position(position + bufferPos); - builder.endOfStream(false); - } - return builder.build(); - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() { - if (isOpen) { - buffer = null; - isOpen = false; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public void seek(int position) throws IOException { - validateOpen(); - this.position = position; - buffer = null; - bufferPos = 0; - endOfStream = false; - } - - @Override - public void chunkSize(int chunkSize) { - this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; - } - - @Override - public int read(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - if (buffer == null) { - if (endOfStream) { - return -1; - } - final int toRead = Math.max(byteBuffer.remaining(), chunkSize); - try { - Tuple result = runWithRetries(new Callable>() { - @Override - public Tuple call() { - return storageRpc.read(storageObject, requestOptions, position, toRead); - } - }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); - if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { - StringBuilder messageBuilder = new StringBuilder(); - messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); - throw new StorageException(0, messageBuilder.toString(), false); - } - lastEtag = result.x(); - buffer = result.y(); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - if (toRead > buffer.length) { - endOfStream = true; - if (buffer.length == 0) { - buffer = null; - return -1; - } - } - } - int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); - byteBuffer.put(buffer, bufferPos, toWrite); - bufferPos += toWrite; - if (bufferPos >= buffer.length) { - position += buffer.length; - buffer = null; - bufferPos = 0; - } - return toWrite; - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 3889420316004453706L; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private final String lastEtag; - private final int position; - private final boolean isOpen; - private final boolean endOfStream; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blob = builder.blob; - this.requestOptions = builder.requestOptions; - this.lastEtag = builder.lastEtag; - this.position = builder.position; - this.isOpen = builder.isOpen; - this.endOfStream = builder.endOfStream; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private String lastEtag; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize; - - private Builder(StorageOptions options, BlobId blob, Map reqOptions) { - this.serviceOptions = options; - this.blob = blob; - this.requestOptions = reqOptions; - } - - Builder lastEtag(String lastEtag) { - this.lastEtag = lastEtag; - return this; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder endOfStream(boolean endOfStream) { - this.endOfStream = endOfStream; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder( - StorageOptions options, BlobId blob, Map reqOptions) { - return new Builder(options, blob, reqOptions); - } - - @Override - public BlobReadChannel restore() { - BlobReadChannelImpl channel = new BlobReadChannelImpl(serviceOptions, blob, requestOptions); - channel.lastEtag = lastEtag; - channel.position = position; - channel.isOpen = isOpen; - channel.endOfStream = endOfStream; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blob, requestOptions, lastEtag, position, isOpen, - endOfStream, chunkSize); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blob, other.blob) - && Objects.equals(this.requestOptions, other.requestOptions) - && Objects.equals(this.lastEtag, other.lastEtag) - && this.position == other.position - && this.isOpen == other.isOpen - && this.endOfStream == other.endOfStream - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blob", blob) - .add("position", position) - .add("isOpen", isOpen) - .add("endOfStream", endOfStream) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index 9682c6345659..d1d12ec77638 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -16,37 +16,77 @@ package com.google.gcloud.storage; -import com.google.gcloud.Restorable; +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.spi.StorageRpc; -import java.io.Closeable; -import java.nio.channels.WritableByteChannel; +import java.util.Map; /** - * A channel for writing data to a Google Cloud Storage object. - * - *

Implementations of this class may further buffer data internally to reduce remote calls. - * Written data will only be visible after calling {@link #close()}. This interface implements - * {@link Restorable} to allow saving the writer's state to continue writing afterwards. - *

+ * Write channel implementation to upload Google Cloud Storage blobs. */ -public interface BlobWriteChannel extends WritableByteChannel, Closeable, - Restorable { - - /** - * Sets the minimum size that will be written by a single RPC. - * Written data will be buffered and only flushed upon reaching this size or closing the channel. - */ - void chunkSize(int chunkSize); - - /** - * Captures the write channel state so that it can be saved and restored afterwards. The original - * {@code BlobWriteChannel} and the restored one should not both be used. Closing one channel - * causes the other channel to close, subsequent writes will fail. - * - * @return a {@link RestorableState} object that contains the write channel state and can restore - * it afterwards. - */ +class BlobWriteChannel extends BaseWriteChannel { + + BlobWriteChannel(StorageOptions options, BlobInfo blob, Map optionsMap) { + this(options, blob, options.rpc().open(blob.toPb(), optionsMap)); + } + + BlobWriteChannel(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + @Override - RestorableState capture(); + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), StorageImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + } + + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -9028324143780151286L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder extends BaseWriteChannel.BaseState.Builder { + + private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + + @Override + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + return new Builder(options, blobInfo, uploadId); + } + + @Override + public WriteChannel restore() { + BlobWriteChannel channel = new BlobWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java deleted file mode 100644 index acde4178533c..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; -import static java.util.concurrent.Executors.callable; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Map; -import java.util.Objects; - -/** - * Default implementation for BlobWriteChannel. - */ -class BlobWriteChannelImpl implements BlobWriteChannel { - - private static final int MIN_CHUNK_SIZE = 256 * 1024; - private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; - - private final StorageOptions options; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer = new byte[0]; - private int limit; - private boolean isOpen = true; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, - Map optionsMap) { - this.options = options; - this.blobInfo = blobInfo; - storageRpc = options.rpc(); - storageObject = blobInfo.toPb(); - uploadId = storageRpc.open(storageObject, optionsMap); - } - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.options = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - storageRpc = options.rpc(); - storageObject = blobInfo.toPb(); - } - - @Override - public RestorableState capture() { - byte[] bufferToSave = null; - if (isOpen) { - flush(); - bufferToSave = Arrays.copyOf(buffer, limit); - } - return StateImpl.builder(options, blobInfo, uploadId) - .position(position) - .buffer(bufferToSave) - .isOpen(isOpen) - .chunkSize(chunkSize) - .build(); - } - - private void flush() { - if (limit >= chunkSize) { - final int length = limit - limit % MIN_CHUNK_SIZE; - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, position, length, false); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += length; - limit -= length; - byte[] temp = new byte[chunkSize]; - System.arraycopy(buffer, length, temp, 0, limit); - buffer = temp; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public int write(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - int toWrite = byteBuffer.remaining(); - int spaceInBuffer = buffer.length - limit; - if (spaceInBuffer >= toWrite) { - byteBuffer.get(buffer, limit, toWrite); - } else { - buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); - byteBuffer.get(buffer, limit, toWrite); - } - limit += toWrite; - flush(); - return toWrite; - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() throws IOException { - if (isOpen) { - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, position, limit, true); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += buffer.length; - isOpen = false; - buffer = null; - } - } - - @Override - public void chunkSize(int chunkSize) { - chunkSize = (chunkSize / MIN_CHUNK_SIZE) * MIN_CHUNK_SIZE; - this.chunkSize = Math.max(MIN_CHUNK_SIZE, chunkSize); - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 8541062465055125619L; - - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private final int position; - private final byte[] buffer; - private final boolean isOpen; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blobInfo = builder.blobInfo; - this.uploadId = builder.uploadId; - this.position = builder.position; - this.buffer = builder.buffer; - this.isOpen = builder.isOpen; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer; - private boolean isOpen; - private int chunkSize; - - private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.serviceOptions = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder buffer(byte[] buffer) { - this.buffer = buffer; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - return new Builder(options, blobInfo, uploadId); - } - - @Override - public BlobWriteChannel restore() { - BlobWriteChannelImpl channel = new BlobWriteChannelImpl(serviceOptions, blobInfo, uploadId); - if (buffer != null) { - channel.buffer = buffer.clone(); - channel.limit = buffer.length; - } - channel.position = position; - channel.isOpen = isOpen; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blobInfo, uploadId, position, isOpen, chunkSize, - Arrays.hashCode(buffer)); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blobInfo, other.blobInfo) - && Objects.equals(this.uploadId, other.uploadId) - && Objects.deepEquals(this.buffer, other.buffer) - && this.position == other.position - && this.isOpen == other.isOpen - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blobInfo", blobInfo) - .add("uploadId", uploadId) - .add("position", position) - .add("isOpen", isOpen) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index 85e79a8e9abf..f8c90ff42930 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -26,7 +26,9 @@ import com.google.common.collect.Sets; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.Service; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpc.Tuple; @@ -1423,7 +1425,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * * @throws StorageException upon failure */ - BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options); + ReadChannel reader(String bucket, String blob, BlobSourceOption... options); /** * Return a channel for reading the blob's content. If {@code blob.generation()} is set @@ -1439,7 +1441,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * * @throws StorageException upon failure */ - BlobReadChannel reader(BlobId blob, BlobSourceOption... options); + ReadChannel reader(BlobId blob, BlobSourceOption... options); /** * Create a blob and return a channel for writing its content. By default any md5 and crc32c @@ -1448,7 +1450,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * * @throws StorageException upon failure */ - BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); + WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); /** * Generates a signed URL for a blob. diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index 93fc202febef..a6c851d0f638 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -49,6 +49,7 @@ import com.google.gcloud.Page; import com.google.gcloud.PageImpl; import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpc.RewriteResponse; @@ -517,15 +518,15 @@ private List> transformBatch } @Override - public BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { Map optionsMap = optionMap(options); - return new BlobReadChannelImpl(options(), BlobId.of(bucket, blob), optionsMap); + return new BlobReadChannel(options(), BlobId.of(bucket, blob), optionsMap); } @Override - public BlobReadChannel reader(BlobId blob, BlobSourceOption... options) { + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { Map optionsMap = optionMap(blob, options); - return new BlobReadChannelImpl(options(), blob, optionsMap); + return new BlobReadChannel(options(), blob, optionsMap); } @Override @@ -536,7 +537,7 @@ public BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { private BlobWriteChannel writer(BlobInfo blobInfo, BlobTargetOption... options) { final Map optionsMap = optionMap(blobInfo, options); - return new BlobWriteChannelImpl(options(), blobInfo, optionsMap); + return new BlobWriteChannel(options(), blobInfo, optionsMap); } @Override diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java similarity index 86% rename from gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java rename to gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java index 7daf4a6fb468..ffb37e8c5032 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java @@ -27,6 +27,7 @@ import static org.junit.Assert.fail; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; import com.google.gcloud.spi.StorageRpc; @@ -42,7 +43,7 @@ import java.util.Map; import java.util.Random; -public class BlobReadChannelImplTest { +public class BlobReadChannelTest { private static final String BUCKET_NAME = "b"; private static final String BLOB_NAME = "n"; @@ -55,7 +56,7 @@ public class BlobReadChannelImplTest { private StorageOptions options; private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; - private BlobReadChannelImpl reader; + private BlobReadChannel reader; @Before public void setUp() { @@ -78,13 +79,13 @@ public void tearDown() throws Exception { @Test public void testCreate() { replay(storageRpcMock); - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); assertTrue(reader.isOpen()); } @Test public void testReadBuffered() throws IOException { - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(42); ByteBuffer secondReadBuffer = ByteBuffer.allocate(42); @@ -102,7 +103,7 @@ public void testReadBuffered() throws IOException { @Test public void testReadBig() throws IOException { - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.chunkSize(CUSTOM_CHUNK_SIZE); byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); @@ -123,7 +124,7 @@ public void testReadBig() throws IOException { @Test public void testReadFinish() throws IOException { - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); byte[] result = {}; ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) @@ -134,7 +135,7 @@ public void testReadFinish() throws IOException { @Test public void testSeek() throws IOException { - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.seek(42); byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); @@ -148,7 +149,7 @@ public void testSeek() throws IOException { @Test public void testClose() { replay(storageRpcMock); - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); assertTrue(reader.isOpen()); reader.close(); assertTrue(!reader.isOpen()); @@ -157,7 +158,7 @@ public void testClose() { @Test public void testReadClosed() { replay(storageRpcMock); - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.close(); try { ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); @@ -171,7 +172,7 @@ public void testReadClosed() { @Test public void testReadGenerationChanged() throws IOException { BlobId blobId = BlobId.of(BUCKET_NAME, BLOB_NAME); - reader = new BlobReadChannelImpl(options, blobId, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, blobId, EMPTY_RPC_OPTIONS); byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); @@ -185,7 +186,7 @@ public void testReadGenerationChanged() throws IOException { reader.read(firstReadBuffer); try { reader.read(secondReadBuffer); - fail("Expected BlobReadChannel read to throw StorageException"); + fail("Expected ReadChannel read to throw StorageException"); } catch (StorageException ex) { StringBuilder messageBuilder = new StringBuilder(); messageBuilder.append("Blob ").append(blobId).append(" was updated while reading"); @@ -204,10 +205,10 @@ public void testSaveAndRestore() throws IOException { expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) .andReturn(StorageRpc.Tuple.of("etag", secondResult)); replay(storageRpcMock); - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.read(firstReadBuffer); - RestorableState readerState = reader.capture(); - BlobReadChannel restoredReader = readerState.restore(); + RestorableState readerState = reader.capture(); + ReadChannel restoredReader = readerState.restore(); restoredReader.read(secondReadBuffer); assertArrayEquals(Arrays.copyOf(firstResult, firstReadBuffer.capacity()), firstReadBuffer.array()); @@ -217,11 +218,11 @@ public void testSaveAndRestore() throws IOException { @Test public void testStateEquals() { replay(storageRpcMock); - reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); @SuppressWarnings("resource") // avoid closing when you don't want partial writes to GCS - BlobReadChannel secondReader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); - RestorableState state = reader.capture(); - RestorableState secondState = secondReader.capture(); + ReadChannel secondReader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); + RestorableState state = reader.capture(); + RestorableState secondState = secondReader.capture(); assertEquals(state, secondState); assertEquals(state.hashCode(), secondState.hashCode()); assertEquals(state.toString(), secondState.toString()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java index bc6a4725d7e7..586e7fd0fd39 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java @@ -31,6 +31,7 @@ import static org.junit.Assert.assertTrue; import com.google.api.client.util.Lists; +import com.google.gcloud.ReadChannel; import com.google.gcloud.storage.Storage.CopyRequest; import org.easymock.Capture; @@ -188,7 +189,7 @@ public void testCopyToBlobId() throws Exception { @Test public void testReader() throws Exception { - BlobReadChannel channel = createMock(BlobReadChannel.class); + ReadChannel channel = createMock(ReadChannel.class); expect(storage.reader(BLOB_INFO.blobId())).andReturn(channel); replay(storage); assertSame(channel, blob.reader()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java similarity index 85% rename from gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java rename to gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java index 518ba8e14c65..e499f6b9de52 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java @@ -33,6 +33,7 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpcFactory; @@ -48,7 +49,7 @@ import java.util.Map; import java.util.Random; -public class BlobWriteChannelImplTest { +public class BlobWriteChannelTest { private static final String BUCKET_NAME = "b"; private static final String BLOB_NAME = "n"; @@ -63,7 +64,7 @@ public class BlobWriteChannelImplTest { private StorageOptions options; private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; - private BlobWriteChannelImpl writer; + private BlobWriteChannel writer; @Before public void setUp() { @@ -88,7 +89,7 @@ public void tearDown() throws Exception { public void testCreate() { expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); } @@ -96,7 +97,7 @@ public void testCreate() { public void testWriteWithoutFlush() throws IOException { expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); } @@ -107,7 +108,7 @@ public void testWriteWithFlush() throws IOException { storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(CUSTOM_CHUNK_SIZE), eq(false)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); writer.chunkSize(CUSTOM_CHUNK_SIZE); ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); @@ -121,7 +122,7 @@ public void testWritesAndFlush() throws IOException { storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(DEFAULT_CHUNK_SIZE), eq(false)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; for (int i = 0; i < buffers.length; i++) { buffers[i] = randomBuffer(MIN_CHUNK_SIZE); @@ -141,7 +142,7 @@ public void testCloseWithoutFlush() throws IOException { Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); writer.close(); assertArrayEquals(new byte[0], capturedBuffer.getValue()); @@ -156,7 +157,7 @@ public void testCloseWithFlush() throws IOException { storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); writer.write(buffer); writer.close(); @@ -171,7 +172,7 @@ public void testWriteClosed() throws IOException { Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); writer.close(); try { writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); @@ -192,12 +193,12 @@ public void testSaveAndRestore() throws IOException { replay(storageRpcMock); ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); assertEquals(new Long(0L), capturedPosition.getValues().get(0)); - RestorableState writerState = writer.capture(); - BlobWriteChannel restoredWriter = writerState.restore(); + RestorableState writerState = writer.capture(); + WriteChannel restoredWriter = writerState.restore(); assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); @@ -209,32 +210,31 @@ public void testSaveAndRestoreClosed() throws IOException { Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); writer.close(); - RestorableState writerState = writer.capture(); - RestorableState expectedWriterState = - BlobWriteChannelImpl.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID) + RestorableState writerState = writer.capture(); + RestorableState expectedWriterState = + BlobWriteChannel.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID) .buffer(null) .chunkSize(DEFAULT_CHUNK_SIZE) .isOpen(false) .position(0) .build(); - BlobWriteChannel restoredWriter = writerState.restore(); + WriteChannel restoredWriter = writerState.restore(); assertArrayEquals(new byte[0], capturedBuffer.getValue()); assertEquals(expectedWriterState, restoredWriter.capture()); } @Test public void testStateEquals() { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID) - .times(2); + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2); replay(storageRpcMock); - writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") - BlobWriteChannel writer2 = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS); - RestorableState state = writer.capture(); - RestorableState state2 = writer2.capture(); + WriteChannel writer2 = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + RestorableState state = writer.capture(); + RestorableState state2 = writer2.capture(); assertEquals(state, state2); assertEquals(state.hashCode(), state2.hashCode()); assertEquals(state.toString(), state2.toString()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 30ce858dc20e..614ceee7b61e 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -29,7 +29,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.WriteChannel; import com.google.gcloud.storage.Storage.BlobField; import com.google.gcloud.storage.Storage.BucketField; import com.google.gcloud.storage.testing.RemoteGcsHelper; @@ -702,14 +704,14 @@ public void testReadAndWriteChannels() throws IOException { String blobName = "test-read-and-write-channels-blob"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); byte[] stringBytes; - try (BlobWriteChannel writer = storage.writer(blob)) { + try (WriteChannel writer = storage.writer(blob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); writer.write(ByteBuffer.wrap(stringBytes)); } ByteBuffer readBytes; ByteBuffer readStringBytes; - try (BlobReadChannel reader = storage.reader(blob.blobId())) { + try (ReadChannel reader = storage.reader(blob.blobId())) { readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); readStringBytes = ByteBuffer.allocate(stringBytes.length); reader.read(readBytes); @@ -725,21 +727,21 @@ public void testReadAndWriteCaptureChannels() throws IOException { String blobName = "test-read-and-write-capture-channels-blob"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); byte[] stringBytes; - BlobWriteChannel writer = storage.writer(blob); + WriteChannel writer = storage.writer(blob); stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); - RestorableState writerState = writer.capture(); - BlobWriteChannel secondWriter = writerState.restore(); + RestorableState writerState = writer.capture(); + WriteChannel secondWriter = writerState.restore(); secondWriter.write(ByteBuffer.wrap(stringBytes)); secondWriter.close(); ByteBuffer readBytes; ByteBuffer readStringBytes; - BlobReadChannel reader = storage.reader(blob.blobId()); + ReadChannel reader = storage.reader(blob.blobId()); reader.chunkSize(BLOB_BYTE_CONTENT.length); readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); reader.read(readBytes); - RestorableState readerState = reader.capture(); - BlobReadChannel secondReader = readerState.restore(); + RestorableState readerState = reader.capture(); + ReadChannel secondReader = readerState.restore(); readStringBytes = ByteBuffer.allocate(stringBytes.length); secondReader.read(readStringBytes); reader.close(); @@ -754,14 +756,14 @@ public void testReadChannelFail() throws IOException { String blobName = "test-read-channel-blob-fail"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - try (BlobReadChannel reader = + try (ReadChannel reader = storage.reader(blob.blobId(), Storage.BlobSourceOption.metagenerationMatch(-1L))) { reader.read(ByteBuffer.allocate(42)); fail("StorageException was expected"); } catch (StorageException ex) { // expected } - try (BlobReadChannel reader = + try (ReadChannel reader = storage.reader(blob.blobId(), Storage.BlobSourceOption.generationMatch(-1L))) { reader.read(ByteBuffer.allocate(42)); fail("StorageException was expected"); @@ -769,7 +771,7 @@ public void testReadChannelFail() throws IOException { // expected } BlobId blobIdWrongGeneration = BlobId.of(BUCKET, blobName, -1L); - try (BlobReadChannel reader = + try (ReadChannel reader = storage.reader(blobIdWrongGeneration, Storage.BlobSourceOption.generationMatch())) { reader.read(ByteBuffer.allocate(42)); fail("StorageException was expected"); @@ -791,13 +793,13 @@ public void testReadChannelFailUpdatedGeneration() throws IOException { BlobInfo remoteBlob = storage.create(blob, content); assertNotNull(remoteBlob); assertEquals(blobSize, (long) remoteBlob.size()); - try (BlobReadChannel reader = storage.reader(blob.blobId())) { + try (ReadChannel reader = storage.reader(blob.blobId())) { reader.chunkSize(chunkSize); ByteBuffer readBytes = ByteBuffer.allocate(chunkSize); int numReadBytes = reader.read(readBytes); assertEquals(chunkSize, numReadBytes); assertArrayEquals(Arrays.copyOf(content, chunkSize), readBytes.array()); - try (BlobWriteChannel writer = storage.writer(blob)) { + try (WriteChannel writer = storage.writer(blob)) { byte[] newContent = new byte[blobSize]; random.nextBytes(newContent); int numWrittenBytes = writer.write(ByteBuffer.wrap(newContent)); @@ -819,8 +821,7 @@ public void testWriteChannelFail() throws IOException { String blobName = "test-write-channel-blob-fail"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName, -1L).build(); try { - try (BlobWriteChannel writer = - storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { + try (WriteChannel writer = storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { writer.write(ByteBuffer.allocate(42)); } fail("StorageException was expected"); @@ -835,7 +836,7 @@ public void testWriteChannelExistingBlob() throws IOException { BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob); byte[] stringBytes; - try (BlobWriteChannel writer = storage.writer(remoteBlob)) { + try (WriteChannel writer = storage.writer(remoteBlob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(stringBytes)); } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java index 555e231f7f0e..8506e8b48f6b 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java @@ -22,8 +22,10 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; import com.google.gcloud.PageImpl; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Acl.Project.ProjectRole; @@ -112,10 +114,10 @@ public void testReadChannelState() throws IOException, ClassNotFoundException { .projectId("p2") .retryParams(RetryParams.defaultInstance()) .build(); - BlobReadChannel reader = - new BlobReadChannelImpl(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS); - RestorableState state = reader.capture(); - RestorableState deserializedState = serializeAndDeserialize(state); + ReadChannel reader = + new BlobReadChannel(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS); + RestorableState state = reader.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); assertEquals(state, deserializedState); assertEquals(state.hashCode(), deserializedState.hashCode()); assertEquals(state.toString(), deserializedState.toString()); @@ -130,10 +132,10 @@ public void testWriteChannelState() throws IOException, ClassNotFoundException { .build(); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") - BlobWriteChannelImpl writer = new BlobWriteChannelImpl( - options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); - RestorableState state = writer.capture(); - RestorableState deserializedState = serializeAndDeserialize(state); + BlobWriteChannel writer = + new BlobWriteChannel(options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); + RestorableState state = writer.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); assertEquals(state, deserializedState); assertEquals(state.hashCode(), deserializedState.hashCode()); assertEquals(state.toString(), deserializedState.toString()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index 3aaec047714f..0e1f1a0b2f52 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -33,8 +33,10 @@ import com.google.common.io.BaseEncoding; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryParams; import com.google.gcloud.ServiceOptions; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.spi.StorageRpcFactory; @@ -1011,7 +1013,7 @@ public Tuple apply(StorageObject f) { public void testReader() { EasyMock.replay(storageRpcMock); storage = options.service(); - BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1); + ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1); assertNotNull(channel); assertTrue(channel.isOpen()); } @@ -1024,7 +1026,7 @@ public void testReaderWithOptions() throws IOException { .andReturn(StorageRpc.Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); storage = options.service(); - BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, + ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION); assertNotNull(channel); assertTrue(channel.isOpen()); @@ -1039,7 +1041,7 @@ public void testReaderWithOptionsFromBlobId() throws IOException { .andReturn(StorageRpc.Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); storage = options.service(); - BlobReadChannel channel = storage.reader(BLOB_INFO1.blobId(), + ReadChannel channel = storage.reader(BLOB_INFO1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); assertNotNull(channel); assertTrue(channel.isOpen()); @@ -1055,7 +1057,7 @@ public void testWriter() { .andReturn("upload-id"); EasyMock.replay(storageRpcMock); storage = options.service(); - BlobWriteChannel channel = storage.writer(infoWithHashes); + WriteChannel channel = storage.writer(infoWithHashes); assertNotNull(channel); assertTrue(channel.isOpen()); } @@ -1067,7 +1069,7 @@ public void testWriterWithOptions() { .andReturn("upload-id"); EasyMock.replay(storageRpcMock); storage = options.service(); - BlobWriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, + WriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, BLOB_WRITE_PREDEFINED_ACL, BLOB_WRITE_CRC2C, BLOB_WRITE_MD5_HASH); assertNotNull(channel); assertTrue(channel.isOpen()); From 2bf4925cb3fa8fa782b78de164e3a841bf5fdf4f Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Fri, 15 Jan 2016 17:54:12 -0800 Subject: [PATCH 02/18] Initial project for Google Cloud DNS in gcloud-java --- gcloud-java-dns/pom.xml | 54 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 gcloud-java-dns/pom.xml diff --git a/gcloud-java-dns/pom.xml b/gcloud-java-dns/pom.xml new file mode 100644 index 000000000000..55d720bc0a36 --- /dev/null +++ b/gcloud-java-dns/pom.xml @@ -0,0 +1,54 @@ + + + 4.0.0 + com.google.gcloud + gcloud-java-dns + jar + GCloud Java DNS + + Java idiomatic client for Google Cloud DNS. + + + com.google.gcloud + gcloud-java-pom + 0.1.3-SNAPSHOT + + + gcloud-java-dns + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + com.google.apis + google-api-services-dns + v1-rev7-1.21.0 + compile + + + com.google.guava + guava-jdk5 + + + com.google.api-client + google-api-client + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.3 + test + + + From fe4e137fce65882465ca0e092761e080f366fece Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Fri, 15 Jan 2016 17:56:24 -0800 Subject: [PATCH 03/18] Added DnsRecord as a part of the basic data model. ManagedZoneInfo is to be completed and it is included only as it is required as a builder parameter. This class will change in the near future. --- .../java/com/google/gcloud/dns/DnsRecord.java | 254 ++++++++++++++++++ .../google/gcloud/dns/ManagedZoneInfo.java | 44 +++ .../com/google/gcloud/dns/DnsRecordTest.java | 94 +++++++ 3 files changed, 392 insertions(+) create mode 100644 gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java create mode 100644 gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java create mode 100644 gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java new file mode 100644 index 000000000000..8abf335969f8 --- /dev/null +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java @@ -0,0 +1,254 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.gcloud.dns; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.collect.ImmutableList; + +import java.util.LinkedList; +import java.util.List; + +/** + * A class that represents Google Cloud DNS record set. + * + *

+ * A unit of data that will be returned by the DNS servers. + * + * @see Google + * Cloud DNS documentation + */ +public class DnsRecord { + + private String name; + private List rrdatas = new LinkedList<>(); + private Integer ttl = 86400; // the default ttl of 24 hours + private DnsRecordType type; + private String parentName; + private Long parentId; + + /** + * A private constructor. Obtain an instance using {@link DnsRecord#Builder}. + */ + private DnsRecord() { + } + + DnsRecord(Builder builder) { + this.name = builder.name; + this.rrdatas = ImmutableList.copyOf(builder.rrdatas); + this.ttl = builder.ttl; + this.type = builder.type; + this.parentName = builder.parentName; + this.parentId = builder.parentId; + } + + /** + * Enum for the DNS record types supported by Cloud DNS. + * + *

+ * Google Cloud DNS currently supports records of type A, AAAA, CNAME, MX + * NAPTR, NS, PTR, SOA, SPF, SRV, TXT. + * + * @see + * Cloud + * DNS supported record types + */ + public enum DnsRecordType { + A("A"), + AAAA("AAAA"), + CNAME("CNAME"), + MX("MX"), + NAPTR("NAPTR"), + NS("NS"), + PTR("PTR"), + SOA("SOA"), + SPF("SPF"), + SRV("SRV"), + TXT("TXT"); + + private final String type; + + private DnsRecordType(String type) { + this.type = type; + } + } + + public static class Builder { + + private List rrdatas = new LinkedList<>(); + private String name; + private Integer ttl = 86400; // default ttl of 24 hours + private DnsRecordType type; + private String parentName; + private Long parentId; + + private Builder() { + } + + /** + * Creates a builder and pre-populates attributes with the values from the + * provided DnsRecord instance. + */ + public Builder(DnsRecord record) { + this.name = record.name; + this.ttl = record.ttl; + this.type = record.type; + this.parentId = record.parentId; + this.parentName = record.parentName; + this.rrdatas.addAll(record.rrdatas); + } + + /** + * Adds a record to the record set. + * + *

+ * The records should be as defined in RFC 1035 (section 5) and RFC 1034 + * (section 3.6.1). Examples of records are available in + * Cloud + * DNS documentation. + */ + public Builder add(String record) { + this.rrdatas.add(checkNotNull(record)); + return this; + } + + /** + * Sets name for this DNS record set. For example, www.example.com. + */ + public Builder name(String name) { + this.name = checkNotNull(name); + return this; + } + + /** + * Sets the number of seconds that this record can be cached by resolvers. + * This number must be non-negative. + * + * @param ttl A non-negative number of seconds + */ + public Builder ttl(int ttl) { + // change only if + if (ttl < 0) { + throw new IllegalArgumentException( + "TTL cannot be negative. The supplied value was " + ttl + "." + ); + } + this.ttl = ttl; + return this; + } + + /** + * The identifier of a supported record type, for example, A, AAAA, MX, TXT, + * and so on. + */ + public Builder type(DnsRecordType type) { + this.type = checkNotNull(type); + return this; + } + + /** + * Builds the DNS record. + */ + public DnsRecord build() { + return new DnsRecord(this); + } + + /** + * Sets references to the managed zone that this DNS record belongs to. + */ + public Builder managedZone(ManagedZoneInfo parent) { + checkNotNull(parent); + this.parentId = parent.id(); + this.parentName = parent.name(); + return this; + } + } + + /** + * Creates a builder pre-populated with the attribute values of this instance. + */ + public Builder toBuilder() { + return new Builder(this); + } + + /** + * Creates an empty builder + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Get user assigned name of this DNS record. + * + * TODO: is this field mandatory? + */ + public String name() { + return name; + } + + /** + * Returns a list of DNS record stored in this record set. + */ + public List rrdatas() { + return rrdatas; + } + + /** + * Returns the number of seconds that this ResourceRecordSet can be cached by + * resolvers. + * + *

+ * This number is provided by the user. If this values is not set, we use + * default of 86400. + */ + public Integer ttl() { + return ttl; + } + + /** + * Returns the type of this DNS record. + */ + public DnsRecordType type() { + return type; + } + + /** + * Returns name of the managed zone that this record belongs to. + * + *

+ * The name of the managed zone is provided by the user when the managed zone + * is created. It is unique within a project. If this DNS record is not + * associated with a managed zone, this returns null. + */ + public String parentName() { + return parentName; + } + + /** + * Returns name of the managed zone that this record belongs to. + * + *

+ * The id of the managed zone is determined by the server when the managed + * zone is created. It is a read only value. If this DNS record is not + * associated with a managed zone, or if the id of the managed zone was not + * loaded from the cloud service, this returns null. + */ + public Long parentId() { + return parentId; + } + +} diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java new file mode 100644 index 000000000000..003854a91918 --- /dev/null +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java @@ -0,0 +1,44 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.gcloud.dns; + +/** + * TODO: Implement. + * TODO: Add documentation. + */ +public class ManagedZoneInfo { + + private final String name; + private final Long id; + + public String name() { + throw new UnsupportedOperationException("Not implemented yet."); + // TODO: Implement + } + + public Long id() { + return id; + // TODO: Implement + } + + private ManagedZoneInfo() { + name = null; + id = null; + throw new UnsupportedOperationException("Not implemented yet"); + // TODO: Implement + } + +} diff --git a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java new file mode 100644 index 000000000000..0709ca3bf0e4 --- /dev/null +++ b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.gcloud.dns; + +import org.easymock.EasyMock; + +import org.junit.Test; +import org.junit.Before; + +import static org.junit.Assert.*; + +public class DnsRecordTest { + + private static final String NAME = "example.com."; + private static final Integer TTL = 3600; + private static final DnsRecord.DnsRecordType TYPE = DnsRecord.DnsRecordType.AAAA; + private static final ManagedZoneInfo MANAGED_ZONE_INFO_MOCK + = EasyMock.createMock(ManagedZoneInfo.class); + private static final Long PARENT_ID = 12L; + private static final String PARENT_NAME = "name"; + static { + EasyMock.expect(MANAGED_ZONE_INFO_MOCK.id()).andReturn(PARENT_ID); + EasyMock.expect(MANAGED_ZONE_INFO_MOCK.name()).andReturn(PARENT_NAME); + EasyMock.replay(MANAGED_ZONE_INFO_MOCK); + } + private static final DnsRecord RECORD = DnsRecord.builder() + .name(NAME) + .ttl(TTL) + .managedZone(MANAGED_ZONE_INFO_MOCK) + .build(); + private static final Integer DEFAULT_TTL = 86400; + + @Test + public void testDefaultDnsRecord() { + DnsRecord record = DnsRecord.builder().build(); + assertEquals(DEFAULT_TTL, record.ttl()); + assertEquals(0, record.rrdatas().size()); + } + + @Test + public void testBuilder() { + + assertEquals(NAME, RECORD.name()); + assertEquals(TTL, RECORD.ttl()); + + assertEquals(PARENT_ID, RECORD.parentId()); // this was never assigned + assertEquals(PARENT_NAME, RECORD.parentName()); + assertEquals(0, RECORD.rrdatas().size()); + // verify that one can add records to the record set + String testingRecord = "Testing record"; + String anotherTestingRecord = "Another record 123"; + DnsRecord anotherRecord = RECORD.toBuilder() + .add(testingRecord) + .add(anotherTestingRecord) + .build(); + assertEquals(2, anotherRecord.rrdatas().size()); + assertTrue(anotherRecord.rrdatas().contains(testingRecord)); + assertTrue(anotherRecord.rrdatas().contains(anotherTestingRecord)); + } + + @Test + public void testValidTtl() { + try { + DnsRecord.builder().ttl(-1); + fail("A negative value is not acceptable for ttl."); + } catch (IllegalArgumentException e) { + // ok + } + try { + DnsRecord.builder().ttl(0); + } catch (IllegalArgumentException e) { + fail("0 is a valid value."); + } + try { + DnsRecord.builder().ttl(Integer.MAX_VALUE); + } catch (Exception e) { + fail("Large numbers should be ok too."); + } + } + +} From f652277e166b7d3cb3f248e0dafd92ea063c0caf Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Tue, 19 Jan 2016 15:37:51 -0800 Subject: [PATCH 04/18] Implemented comments by @mziccard --- .../java/com/google/gcloud/dns/DnsRecord.java | 188 ++++++++++-------- .../google/gcloud/dns/ManagedZoneInfo.java | 10 +- .../com/google/gcloud/dns/DnsRecordTest.java | 82 +++++--- 3 files changed, 165 insertions(+), 115 deletions(-) diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java index 8abf335969f8..56da63dc5fe3 100644 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java @@ -15,35 +15,45 @@ */ package com.google.gcloud.dns; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.ImmutableList; +import java.io.Serializable; + import java.util.LinkedList; import java.util.List; +import java.util.Objects; /** * A class that represents Google Cloud DNS record set. * - *

- * A unit of data that will be returned by the DNS servers. + *

A unit of data that will be returned by the DNS servers. * - * @see Google - * Cloud DNS documentation + * @see Google Cloud DNS + * documentation */ -public class DnsRecord { +public class DnsRecord implements Serializable { - private String name; - private List rrdatas = new LinkedList<>(); - private Integer ttl = 86400; // the default ttl of 24 hours - private DnsRecordType type; - private String parentName; - private Long parentId; + private static final long serialVersionUID = 2016011914302204L; + private final String name; + private final List rrdatas; + private final Integer ttl; + private final DnsRecordType type; + private final String zoneName; + private final Long zoneId; /** * A private constructor. Obtain an instance using {@link DnsRecord#Builder}. */ private DnsRecord() { + this.name = null; + this.rrdatas = null; + this.ttl = null; + this.type = null; + this.zoneName = null; + this.zoneId = null; } DnsRecord(Builder builder) { @@ -51,74 +61,64 @@ private DnsRecord() { this.rrdatas = ImmutableList.copyOf(builder.rrdatas); this.ttl = builder.ttl; this.type = builder.type; - this.parentName = builder.parentName; - this.parentId = builder.parentId; + this.zoneName = builder.zoneName; + this.zoneId = builder.zoneId; } /** * Enum for the DNS record types supported by Cloud DNS. * - *

- * Google Cloud DNS currently supports records of type A, AAAA, CNAME, MX - * NAPTR, NS, PTR, SOA, SPF, SRV, TXT. + *

Google Cloud DNS currently supports records of type A, AAAA, CNAME, MX NAPTR, NS, PTR, SOA, + * SPF, SRV, TXT. * - * @see - * Cloud - * DNS supported record types + * @see Cloud DNS + * supported record types */ public enum DnsRecordType { - A("A"), - AAAA("AAAA"), - CNAME("CNAME"), - MX("MX"), - NAPTR("NAPTR"), - NS("NS"), - PTR("PTR"), - SOA("SOA"), - SPF("SPF"), - SRV("SRV"), - TXT("TXT"); - - private final String type; - - private DnsRecordType(String type) { - this.type = type; - } + A, + AAAA, + CNAME, + MX, + NAPTR, + NS, + PTR, + SOA, + SPF, + SRV, + TXT; } public static class Builder { private List rrdatas = new LinkedList<>(); private String name; - private Integer ttl = 86400; // default ttl of 24 hours + private Integer ttl; private DnsRecordType type; - private String parentName; - private Long parentId; + private String zoneName; + private Long zoneId; private Builder() { } /** - * Creates a builder and pre-populates attributes with the values from the - * provided DnsRecord instance. + * Creates a builder and pre-populates attributes with the values from the provided DnsRecord + * instance. */ public Builder(DnsRecord record) { this.name = record.name; this.ttl = record.ttl; this.type = record.type; - this.parentId = record.parentId; - this.parentName = record.parentName; + this.zoneId = record.zoneId; + this.zoneName = record.zoneName; this.rrdatas.addAll(record.rrdatas); } /** - * Adds a record to the record set. + * Adds a record to the record set. The records should be as defined in RFC 1035 (section 5) and + * RFC 1034 (section 3.6.1). Examples of records are available in Google DNS documentation. * - *

- * The records should be as defined in RFC 1035 (section 5) and RFC 1034 - * (section 3.6.1). Examples of records are available in - * Cloud - * DNS documentation. + * @see Google + * DNS documentation . */ public Builder add(String record) { this.rrdatas.add(checkNotNull(record)); @@ -134,25 +134,19 @@ public Builder name(String name) { } /** - * Sets the number of seconds that this record can be cached by resolvers. - * This number must be non-negative. + * Sets the number of seconds that this record can be cached by resolvers. This number must be + * non-negative. * * @param ttl A non-negative number of seconds */ public Builder ttl(int ttl) { - // change only if - if (ttl < 0) { - throw new IllegalArgumentException( - "TTL cannot be negative. The supplied value was " + ttl + "." - ); - } + checkArgument(ttl >= 0, "TTL cannot be negative. The supplied value was " + ttl + "."); this.ttl = ttl; return this; } /** - * The identifier of a supported record type, for example, A, AAAA, MX, TXT, - * and so on. + * The identifier of a supported record type, for example, A, AAAA, MX, TXT, and so on. */ public Builder type(DnsRecordType type) { this.type = checkNotNull(type); @@ -171,8 +165,8 @@ public DnsRecord build() { */ public Builder managedZone(ManagedZoneInfo parent) { checkNotNull(parent); - this.parentId = parent.id(); - this.parentName = parent.name(); + this.zoneId = parent.id(); + this.zoneName = parent.name(); return this; } } @@ -192,9 +186,7 @@ public static Builder builder() { } /** - * Get user assigned name of this DNS record. - * - * TODO: is this field mandatory? + * Get the mandatory user assigned name of this DNS record. */ public String name() { return name; @@ -204,16 +196,12 @@ public String name() { * Returns a list of DNS record stored in this record set. */ public List rrdatas() { - return rrdatas; + return ImmutableList.copyOf(rrdatas); } /** - * Returns the number of seconds that this ResourceRecordSet can be cached by - * resolvers. - * - *

- * This number is provided by the user. If this values is not set, we use - * default of 86400. + * Returns the number of seconds that this ResourceRecordSet can be cached by resolvers. This + * number is provided by the user. */ public Integer ttl() { return ttl; @@ -227,28 +215,56 @@ public DnsRecordType type() { } /** - * Returns name of the managed zone that this record belongs to. - * - *

- * The name of the managed zone is provided by the user when the managed zone - * is created. It is unique within a project. If this DNS record is not - * associated with a managed zone, this returns null. + * Returns name of the managed zone that this record belongs to. The name of the managed zone is + * provided by the user when the managed zone is created. It is unique within a project. If this + * DNS record is not associated with a managed zone, this returns null. */ - public String parentName() { - return parentName; + public String zoneName() { + return zoneName; } /** * Returns name of the managed zone that this record belongs to. * - *

- * The id of the managed zone is determined by the server when the managed - * zone is created. It is a read only value. If this DNS record is not - * associated with a managed zone, or if the id of the managed zone was not - * loaded from the cloud service, this returns null. + *

The id of the managed zone is determined by the server when the managed zone is created. It + * is a read only value. If this DNS record is not associated with a managed zone, or if the id of + * the managed zone was not loaded from the cloud service, this returns null. */ - public Long parentId() { - return parentId; + public Long zoneId() { + return zoneId; + } + + @Override + public int hashCode() { + return Objects.hash(name, rrdatas, ttl, type, zoneName, zoneId); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof DnsRecord) { + DnsRecord other = (DnsRecord) obj; + return zoneId == other.zoneId() + && zoneName == other.zoneName + && this.toRRSet().equals(other.toRRSet()); + } + return false; + } + + com.google.api.services.dns.model.ResourceRecordSet toRRSet() { + com.google.api.services.dns.model.ResourceRecordSet rrset = + new com.google.api.services.dns.model.ResourceRecordSet(); + rrset.setName(name); + rrset.setRrdatas(this.rrdatas()); + rrset.setTtl(ttl); + rrset.setType(type == null ? null : type.name()); + return rrset; + } + + @Override + public String toString() { + return "DnsRecord{" + "name=" + name + ", rrdatas=" + rrdatas + + ", ttl=" + ttl + ", type=" + type + ", zoneName=" + + zoneName + ", zoneId=" + zoneId + '}'; } } diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java index 003854a91918..d5ed8351dc34 100644 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java @@ -16,8 +16,8 @@ package com.google.gcloud.dns; /** - * TODO: Implement. - * TODO: Add documentation. + * todo(mderka): Implement. + * todo(mderka): Add documentation. */ public class ManagedZoneInfo { @@ -26,19 +26,19 @@ public class ManagedZoneInfo { public String name() { throw new UnsupportedOperationException("Not implemented yet."); - // TODO: Implement + // todo(mderka): Implement } public Long id() { return id; - // TODO: Implement + // todo(mderka): Implement } private ManagedZoneInfo() { name = null; id = null; throw new UnsupportedOperationException("Not implemented yet"); - // TODO: Implement + // todo(mderka): Implement } } diff --git a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java index 0709ca3bf0e4..55c72d794e87 100644 --- a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java +++ b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java @@ -15,38 +15,42 @@ */ package com.google.gcloud.dns; -import org.easymock.EasyMock; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertNotEquals; +import org.junit.BeforeClass; import org.junit.Test; -import org.junit.Before; -import static org.junit.Assert.*; +import org.easymock.EasyMock; + public class DnsRecordTest { private static final String NAME = "example.com."; private static final Integer TTL = 3600; private static final DnsRecord.DnsRecordType TYPE = DnsRecord.DnsRecordType.AAAA; - private static final ManagedZoneInfo MANAGED_ZONE_INFO_MOCK - = EasyMock.createMock(ManagedZoneInfo.class); - private static final Long PARENT_ID = 12L; - private static final String PARENT_NAME = "name"; + private static final ManagedZoneInfo MANAGED_ZONE_INFO_MOCK = + EasyMock.createMock(ManagedZoneInfo.class); + private static final Long ZONE_ID = 12L; + private static final String ZONE_NAME = "name"; + static { - EasyMock.expect(MANAGED_ZONE_INFO_MOCK.id()).andReturn(PARENT_ID); - EasyMock.expect(MANAGED_ZONE_INFO_MOCK.name()).andReturn(PARENT_NAME); + EasyMock.expect(MANAGED_ZONE_INFO_MOCK.id()).andReturn(ZONE_ID); + EasyMock.expect(MANAGED_ZONE_INFO_MOCK.name()).andReturn(ZONE_NAME); EasyMock.replay(MANAGED_ZONE_INFO_MOCK); } + private static final DnsRecord RECORD = DnsRecord.builder() .name(NAME) .ttl(TTL) .managedZone(MANAGED_ZONE_INFO_MOCK) .build(); - private static final Integer DEFAULT_TTL = 86400; @Test public void testDefaultDnsRecord() { DnsRecord record = DnsRecord.builder().build(); - assertEquals(DEFAULT_TTL, record.ttl()); assertEquals(0, record.rrdatas().size()); } @@ -56,8 +60,8 @@ public void testBuilder() { assertEquals(NAME, RECORD.name()); assertEquals(TTL, RECORD.ttl()); - assertEquals(PARENT_ID, RECORD.parentId()); // this was never assigned - assertEquals(PARENT_NAME, RECORD.parentName()); + assertEquals(ZONE_ID, RECORD.zoneId()); // this was never assigned + assertEquals(ZONE_NAME, RECORD.zoneName()); assertEquals(0, RECORD.rrdatas().size()); // verify that one can add records to the record set String testingRecord = "Testing record"; @@ -77,18 +81,48 @@ public void testValidTtl() { DnsRecord.builder().ttl(-1); fail("A negative value is not acceptable for ttl."); } catch (IllegalArgumentException e) { - // ok - } - try { - DnsRecord.builder().ttl(0); - } catch (IllegalArgumentException e) { - fail("0 is a valid value."); - } - try { - DnsRecord.builder().ttl(Integer.MAX_VALUE); - } catch (Exception e) { - fail("Large numbers should be ok too."); + // expected } + DnsRecord.builder().ttl(0); + DnsRecord.builder().ttl(Integer.MAX_VALUE); + } + + @Test + public void testEqualsAndNotEquals() { + DnsRecord clone = RECORD.toBuilder().build(); + assertEquals(clone, RECORD); + clone = RECORD.toBuilder().add("another record").build(); + final String differentName = "totally different name"; + clone = RECORD.toBuilder().name(differentName).build(); + assertNotEquals(clone, RECORD); + clone = RECORD.toBuilder().ttl(RECORD.ttl() + 1).build(); + assertNotEquals(clone, RECORD); + clone = RECORD.toBuilder().type(DnsRecord.DnsRecordType.TXT).build(); + assertNotEquals(clone, RECORD); + ManagedZoneInfo anotherMock = EasyMock.createMock(ManagedZoneInfo.class); + EasyMock.expect(anotherMock.id()).andReturn(ZONE_ID + 1); + EasyMock.expect(anotherMock.name()).andReturn(ZONE_NAME + "more text"); + EasyMock.replay(anotherMock); + clone = RECORD.toBuilder().managedZone(anotherMock).build(); + assertNotEquals(clone, RECORD); + } + + @Test + public void testSameHashCodeOnEquals() { + int hash = RECORD.hashCode(); + DnsRecord clone = RECORD.toBuilder().build(); + assertEquals(clone.hashCode(), hash); + } + + @Test + public void testDifferentHashCodeOnDifferent() { + int hash = RECORD.hashCode(); + final String differentName = "totally different name"; + DnsRecord clone = RECORD.toBuilder().name(differentName).build(); + assertNotEquals(differentName, RECORD.name()); + assertNotEquals(clone.hashCode(), hash); + DnsRecord anotherClone = RECORD.toBuilder().add("another record").build(); + assertNotEquals(anotherClone.hashCode(), hash); } } From b29945fd1fd920571d115b746185dfef36d2a0ab Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Wed, 20 Jan 2016 11:14:09 -0800 Subject: [PATCH 05/18] Second round of comments from @mziccard --- .../java/com/google/gcloud/dns/DnsRecord.java | 68 +++++++++------- .../com/google/gcloud/dns/DnsRecordTest.java | 78 ++++++++++--------- 2 files changed, 80 insertions(+), 66 deletions(-) diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java index 56da63dc5fe3..91278fa2a1e7 100644 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java @@ -18,6 +18,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import java.io.Serializable; @@ -29,7 +30,7 @@ /** * A class that represents Google Cloud DNS record set. * - *

A unit of data that will be returned by the DNS servers. + *

A unit of data that will be returned by the DNS servers. * * @see Google Cloud DNS * documentation @@ -44,9 +45,6 @@ public class DnsRecord implements Serializable { private final String zoneName; private final Long zoneId; - /** - * A private constructor. Obtain an instance using {@link DnsRecord#Builder}. - */ private DnsRecord() { this.name = null; this.rrdatas = null; @@ -68,7 +66,7 @@ private DnsRecord() { /** * Enum for the DNS record types supported by Cloud DNS. * - *

Google Cloud DNS currently supports records of type A, AAAA, CNAME, MX NAPTR, NS, PTR, SOA, + *

Google Cloud DNS currently supports records of type A, AAAA, CNAME, MX NAPTR, NS, PTR, SOA, * SPF, SRV, TXT. * * @see Cloud DNS @@ -85,7 +83,7 @@ public enum DnsRecordType { SOA, SPF, SRV, - TXT; + TXT } public static class Builder { @@ -162,13 +160,23 @@ public DnsRecord build() { /** * Sets references to the managed zone that this DNS record belongs to. + * + * todo(mderka): consider if this method is needed; may not be possible when listing records */ - public Builder managedZone(ManagedZoneInfo parent) { + Builder managedZone(ManagedZoneInfo parent) { checkNotNull(parent); this.zoneId = parent.id(); this.zoneName = parent.name(); return this; } + + /** + * Sets name reference to the managed zone that this DNS record belongs to. + */ + Builder managedZone(String managedZoneName) { + this.zoneName = checkNotNull(managedZoneName); + return this; + } } /** @@ -196,12 +204,12 @@ public String name() { * Returns a list of DNS record stored in this record set. */ public List rrdatas() { - return ImmutableList.copyOf(rrdatas); + return rrdatas; } /** - * Returns the number of seconds that this ResourceRecordSet can be cached by resolvers. This - * number is provided by the user. + * Returns the number of seconds that this DnsResource can be cached by resolvers. This number is + * provided by the user. */ public Integer ttl() { return ttl; @@ -224,9 +232,9 @@ public String zoneName() { } /** - * Returns name of the managed zone that this record belongs to. + * Returns id of the managed zone that this record belongs to. * - *

The id of the managed zone is determined by the server when the managed zone is created. It + *

The id of the managed zone is determined by the server when the managed zone is created. It * is a read only value. If this DNS record is not associated with a managed zone, or if the id of * the managed zone was not loaded from the cloud service, this returns null. */ @@ -241,30 +249,32 @@ public int hashCode() { @Override public boolean equals(Object obj) { - if (obj instanceof DnsRecord) { - DnsRecord other = (DnsRecord) obj; - return zoneId == other.zoneId() - && zoneName == other.zoneName - && this.toRRSet().equals(other.toRRSet()); - } - return false; + return (obj instanceof DnsRecord) && Objects.equals(this.toPb(), ((DnsRecord) obj).toPb()) + && this.zoneId().equals(((DnsRecord) obj).zoneId()) + && this.zoneName().equals(((DnsRecord) obj).zoneName()); + } - com.google.api.services.dns.model.ResourceRecordSet toRRSet() { - com.google.api.services.dns.model.ResourceRecordSet rrset = + com.google.api.services.dns.model.ResourceRecordSet toPb() { + com.google.api.services.dns.model.ResourceRecordSet pb = new com.google.api.services.dns.model.ResourceRecordSet(); - rrset.setName(name); - rrset.setRrdatas(this.rrdatas()); - rrset.setTtl(ttl); - rrset.setType(type == null ? null : type.name()); - return rrset; + pb.setName(this.name()); + pb.setRrdatas(this.rrdatas()); + pb.setTtl(this.ttl()); + pb.setType(this.type() == null ? null : this.type().name()); + return pb; } @Override public String toString() { - return "DnsRecord{" + "name=" + name + ", rrdatas=" + rrdatas - + ", ttl=" + ttl + ", type=" + type + ", zoneName=" - + zoneName + ", zoneId=" + zoneId + '}'; + return MoreObjects.toStringHelper(this) + .add("name", name()) + .add("rrdatas", rrdatas()) + .add("ttl", ttl()) + .add("type", type()) + .add("zoneName", zoneName()) + .add("zoneId", zoneId()) + .toString(); } } diff --git a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java index 55c72d794e87..ee9e6e58d61d 100644 --- a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java +++ b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java @@ -25,29 +25,30 @@ import org.easymock.EasyMock; - public class DnsRecordTest { private static final String NAME = "example.com."; private static final Integer TTL = 3600; private static final DnsRecord.DnsRecordType TYPE = DnsRecord.DnsRecordType.AAAA; - private static final ManagedZoneInfo MANAGED_ZONE_INFO_MOCK = - EasyMock.createMock(ManagedZoneInfo.class); private static final Long ZONE_ID = 12L; private static final String ZONE_NAME = "name"; - - static { - EasyMock.expect(MANAGED_ZONE_INFO_MOCK.id()).andReturn(ZONE_ID); - EasyMock.expect(MANAGED_ZONE_INFO_MOCK.name()).andReturn(ZONE_NAME); - EasyMock.replay(MANAGED_ZONE_INFO_MOCK); + // the following is initialized in @BeforeClass setUp() + private static DnsRecord record; + private static ManagedZoneInfo managedZoneInfoMock; + + @BeforeClass + public static void setUp() { + managedZoneInfoMock = EasyMock.createMock(ManagedZoneInfo.class); + EasyMock.expect(managedZoneInfoMock.id()).andReturn(ZONE_ID); + EasyMock.expect(managedZoneInfoMock.name()).andReturn(ZONE_NAME); + EasyMock.replay(managedZoneInfoMock); + record = DnsRecord.builder() + .name(NAME) + .ttl(TTL) + .managedZone(managedZoneInfoMock) + .build(); } - private static final DnsRecord RECORD = DnsRecord.builder() - .name(NAME) - .ttl(TTL) - .managedZone(MANAGED_ZONE_INFO_MOCK) - .build(); - @Test public void testDefaultDnsRecord() { DnsRecord record = DnsRecord.builder().build(); @@ -57,20 +58,23 @@ public void testDefaultDnsRecord() { @Test public void testBuilder() { - assertEquals(NAME, RECORD.name()); - assertEquals(TTL, RECORD.ttl()); + assertEquals(NAME, record.name()); + assertEquals(TTL, record.ttl()); - assertEquals(ZONE_ID, RECORD.zoneId()); // this was never assigned - assertEquals(ZONE_NAME, RECORD.zoneName()); - assertEquals(0, RECORD.rrdatas().size()); + assertEquals(ZONE_ID, record.zoneId()); // this was never assigned + assertEquals(ZONE_NAME, record.zoneName()); + assertEquals(0, record.rrdatas().size()); // verify that one can add records to the record set String testingRecord = "Testing record"; String anotherTestingRecord = "Another record 123"; - DnsRecord anotherRecord = RECORD.toBuilder() + String differentName = ZONE_NAME + "something"; + DnsRecord anotherRecord = record.toBuilder() .add(testingRecord) .add(anotherTestingRecord) + .managedZone(differentName) .build(); assertEquals(2, anotherRecord.rrdatas().size()); + assertEquals(differentName, anotherRecord.zoneName()); assertTrue(anotherRecord.rrdatas().contains(testingRecord)); assertTrue(anotherRecord.rrdatas().contains(anotherTestingRecord)); } @@ -89,39 +93,39 @@ public void testValidTtl() { @Test public void testEqualsAndNotEquals() { - DnsRecord clone = RECORD.toBuilder().build(); - assertEquals(clone, RECORD); - clone = RECORD.toBuilder().add("another record").build(); + DnsRecord clone = record.toBuilder().build(); + assertEquals(clone, record); + clone = record.toBuilder().add("another record").build(); final String differentName = "totally different name"; - clone = RECORD.toBuilder().name(differentName).build(); - assertNotEquals(clone, RECORD); - clone = RECORD.toBuilder().ttl(RECORD.ttl() + 1).build(); - assertNotEquals(clone, RECORD); - clone = RECORD.toBuilder().type(DnsRecord.DnsRecordType.TXT).build(); - assertNotEquals(clone, RECORD); + clone = record.toBuilder().name(differentName).build(); + assertNotEquals(clone, record); + clone = record.toBuilder().ttl(record.ttl() + 1).build(); + assertNotEquals(clone, record); + clone = record.toBuilder().type(DnsRecord.DnsRecordType.TXT).build(); + assertNotEquals(clone, record); ManagedZoneInfo anotherMock = EasyMock.createMock(ManagedZoneInfo.class); EasyMock.expect(anotherMock.id()).andReturn(ZONE_ID + 1); EasyMock.expect(anotherMock.name()).andReturn(ZONE_NAME + "more text"); EasyMock.replay(anotherMock); - clone = RECORD.toBuilder().managedZone(anotherMock).build(); - assertNotEquals(clone, RECORD); + clone = record.toBuilder().managedZone(anotherMock).build(); + assertNotEquals(clone, record); } @Test public void testSameHashCodeOnEquals() { - int hash = RECORD.hashCode(); - DnsRecord clone = RECORD.toBuilder().build(); + int hash = record.hashCode(); + DnsRecord clone = record.toBuilder().build(); assertEquals(clone.hashCode(), hash); } @Test public void testDifferentHashCodeOnDifferent() { - int hash = RECORD.hashCode(); + int hash = record.hashCode(); final String differentName = "totally different name"; - DnsRecord clone = RECORD.toBuilder().name(differentName).build(); - assertNotEquals(differentName, RECORD.name()); + DnsRecord clone = record.toBuilder().name(differentName).build(); + assertNotEquals(differentName, record.name()); assertNotEquals(clone.hashCode(), hash); - DnsRecord anotherClone = RECORD.toBuilder().add("another record").build(); + DnsRecord anotherClone = record.toBuilder().add("another record").build(); assertNotEquals(anotherClone.hashCode(), hash); } From 03d5d30313281209550730375df7a0bbe8f4d284 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Fri, 15 Jan 2016 15:10:21 +0100 Subject: [PATCH 06/18] Refactor BaseServiceException --- .../java/com/google/gcloud/bigquery/Acl.java | 2 +- .../gcloud/bigquery/BigQueryException.java | 42 +++-- .../google/gcloud/spi/DefaultBigQueryRpc.java | 34 +--- .../bigquery/BigQueryExceptionTest.java | 115 +++++++++++++ .../gcloud/bigquery/BigQueryImplTest.java | 4 +- .../google/gcloud/BaseServiceException.java | 157 +++++++++++++++++- .../gcloud/BaseServiceExceptionTest.java | 134 +++++++++++++-- .../gcloud/datastore/DatastoreException.java | 145 +++++----------- .../gcloud/datastore/DatastoreImpl.java | 22 ++- .../gcloud/datastore/DatastoreOptions.java | 23 +-- .../com/google/gcloud/spi/DatastoreRpc.java | 84 +--------- .../gcloud/spi/DefaultDatastoreRpc.java | 71 +++----- .../datastore/DatastoreExceptionTest.java | 84 +++++++--- .../gcloud/datastore/DatastoreTest.java | 21 ++- .../ResourceManagerException.java | 45 +++-- .../gcloud/spi/DefaultResourceManagerRpc.java | 26 +-- .../ResourceManagerExceptionTest.java | 94 +++++++++++ .../ResourceManagerImplTest.java | 4 +- .../google/gcloud/spi/DefaultStorageRpc.java | 24 +-- .../gcloud/storage/BlobReadChannel.java | 2 +- .../gcloud/storage/StorageException.java | 47 ++++-- .../gcloud/storage/RemoteGcsHelperTest.java | 4 +- .../gcloud/storage/StorageExceptionTest.java | 125 ++++++++++++++ .../gcloud/storage/StorageImplTest.java | 4 +- 24 files changed, 873 insertions(+), 440 deletions(-) create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java create mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java index 2a042c108e00..c1fca9e3b350 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java @@ -104,7 +104,7 @@ static Entity fromPb(Access access) { } // Unreachable throw new BigQueryException(BigQueryException.UNKNOWN_CODE, - "Unrecognized access configuration", false); + "Unrecognized access configuration"); } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java index e92ffacd8f09..930d06d523ab 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -16,10 +16,14 @@ package com.google.gcloud.bigquery; +import com.google.common.collect.ImmutableSet; import com.google.gcloud.BaseServiceException; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryHelper.RetryInterruptedException; +import java.io.IOException; +import java.util.Set; + /** * BigQuery service exception. * @@ -28,20 +32,30 @@ */ public class BigQueryException extends BaseServiceException { - private static final long serialVersionUID = -5504832700512784654L; - public static final int UNKNOWN_CODE = -1; + // see: https://cloud.google.com/bigquery/troubleshooting-errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(500, null), + new Error(502, null), + new Error(503, null), + new Error(504, null)); + private static final long serialVersionUID = -5006625989225438209L; private final BigQueryError error; - public BigQueryException(int code, String message, boolean retryable) { - this(code, message, retryable, null); + public BigQueryException(int code, String message) { + this(code, message, null); } - public BigQueryException(int code, String message, boolean retryable, BigQueryError error) { - super(code, message, retryable); + public BigQueryException(int code, String message, BigQueryError error) { + super(code, message, error != null ? error.reason() : null, true); this.error = error; } + public BigQueryException(IOException exception) { + super(exception, true); + this.error = null; + } + /** * Returns the {@link BigQueryError} that caused this exception. Returns {@code null} if none * exists. @@ -50,6 +64,11 @@ public BigQueryError error() { return error; } + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; + } + /** * Translate RetryHelperException to the BigQueryException that caused the error. This method will * always throw an exception. @@ -57,13 +76,8 @@ public BigQueryError error() { * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static BigQueryException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof BigQueryException) { - throw (BigQueryException) ex.getCause(); - } - if (ex instanceof RetryInterruptedException) { - RetryInterruptedException.propagate(); - } - throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), false); + public static BigQueryException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndThrow(ex); + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java index 74fdeb74bd64..0a1dc046bf74 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java @@ -25,7 +25,6 @@ import static java.net.HttpURLConnection.HTTP_OK; import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpRequest; @@ -58,10 +57,8 @@ import com.google.api.services.bigquery.model.TableRow; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.gcloud.bigquery.BigQueryError; import com.google.gcloud.bigquery.BigQueryException; import com.google.gcloud.bigquery.BigQueryOptions; @@ -69,13 +66,10 @@ import java.math.BigInteger; import java.util.List; import java.util.Map; -import java.util.Set; public class DefaultBigQueryRpc implements BigQueryRpc { public static final String DEFAULT_PROJECTION = "full"; - // see: https://cloud.google.com/bigquery/troubleshooting-errors - private static final Set RETRYABLE_CODES = ImmutableSet.of(500, 502, 503, 504); private static final String BASE_RESUMABLE_URI = "https://www.googleapis.com/upload/bigquery/v2/projects/"; // see: https://cloud.google.com/bigquery/loading-data-post-request#resume-upload @@ -94,28 +88,7 @@ public DefaultBigQueryRpc(BigQueryOptions options) { } private static BigQueryException translate(IOException exception) { - BigQueryException translated; - if (exception instanceof GoogleJsonResponseException - && ((GoogleJsonResponseException) exception).getDetails() != null) { - translated = translate(((GoogleJsonResponseException) exception).getDetails()); - } else { - translated = - new BigQueryException(BigQueryException.UNKNOWN_CODE, exception.getMessage(), false); - } - translated.initCause(exception); - return translated; - } - - private static BigQueryException translate(GoogleJsonError exception) { - boolean retryable = RETRYABLE_CODES.contains(exception.getCode()); - BigQueryError bigqueryError = null; - if (exception.getErrors() != null && !exception.getErrors().isEmpty()) { - GoogleJsonError.ErrorInfo error = exception.getErrors().get(0); - bigqueryError = new BigQueryError(error.getReason(), error.getLocation(), error.getMessage(), - (String) error.get("debugInfo")); - } - return new BigQueryException(exception.getCode(), exception.getMessage(), retryable, - bigqueryError); + return new BigQueryException(exception); } @Override @@ -489,10 +462,7 @@ public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destO if (exception != null) { throw exception; } - GoogleJsonError error = new GoogleJsonError(); - error.setCode(code); - error.setMessage(message); - throw translate(error); + throw new BigQueryException(code, message); } } catch (IOException ex) { throw translate(ex); diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java new file mode 100644 index 000000000000..66e5289424e2 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java @@ -0,0 +1,115 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class BigQueryExceptionTest { + + @Test + public void testBigqueryException() { + BigQueryException exception = new BigQueryException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(502, "message"); + assertEquals(502, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(504, "message"); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(400, "message"); + assertEquals(400, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + BigQueryError error = new BigQueryError("reason", null, null); + exception = new BigQueryException(504, "message", error); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertEquals("reason", exception.reason()); + assertEquals(error, exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new BigQueryException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BigQueryException cause = new BigQueryException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BigQueryException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java index 402edfc4a42f..b54a989fb5e5 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -1022,7 +1022,7 @@ public void testWriter() { @Test public void testRetryableException() { EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) - .andThrow(new BigQueryException(500, "InternalError", true)) + .andThrow(new BigQueryException(500, "InternalError")) .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); @@ -1034,7 +1034,7 @@ public void testRetryableException() { public void testNonRetryableException() { String exceptionMessage = "Not Implemented"; EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) - .andThrow(new BigQueryException(501, exceptionMessage, false)); + .andThrow(new BigQueryException(501, exceptionMessage)); EasyMock.replay(bigqueryRpcMock); bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); thrown.expect(BigQueryException.class); diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java index cd0933426756..9f4bfdab994d 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -16,26 +16,126 @@ package com.google.gcloud; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.net.SocketTimeoutException; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + /** * Base class for all service exceptions. */ public class BaseServiceException extends RuntimeException { - private static final long serialVersionUID = 5028833760039966178L; + protected static final class Error implements Serializable { + + private static final long serialVersionUID = -4019600198652965721L; + + private final Integer code; + private final String reason; + + public Error(Integer code, String reason) { + this.code = code; + this.reason = reason; + } + + /** + * Returns the code associated with this exception. + */ + public Integer code() { + return code; + } + + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("code", code).add("reason", reason).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(code, reason); + } + } + + private static final long serialVersionUID = 759921776378760835L; + public static final int UNKNOWN_CODE = 0; private final int code; private final boolean retryable; + private final String reason; + private final boolean idempotent; - public BaseServiceException(int code, String message, boolean retryable) { - super(message); - this.code = code; - this.retryable = retryable; + public BaseServiceException(IOException exception, boolean idempotent) { + super(message(exception), exception); + if (exception instanceof GoogleJsonResponseException) { + Error error = error(((GoogleJsonResponseException) exception).getDetails()); + this.code = error.code; + this.reason = error.reason; + this.retryable = isRetryable(error); + } else { + this.code = UNKNOWN_CODE; + this.reason = null; + this.retryable = idempotent && isRetryable(exception); + } + this.idempotent = idempotent; + } + + public BaseServiceException(GoogleJsonError error, boolean idempotent) { + super(error.getMessage()); + this.code = error.getCode(); + this.reason = reason(error); + this.idempotent = idempotent; + this.retryable = idempotent && isRetryable(error); + } + + public BaseServiceException(int code, String message, String reason, boolean idempotent) { + this(code, message, reason, idempotent, null); } - public BaseServiceException(int code, String message, boolean retryable, Exception cause) { + public BaseServiceException(int code, String message, String reason, boolean idempotent, + Exception cause) { super(message, cause); this.code = code; - this.retryable = retryable; + this.reason = reason; + this.idempotent = idempotent; + this.retryable = idempotent && isRetryable(new Error(code, reason)); + } + + protected Set retryableErrors() { + return Collections.emptySet(); + } + + protected boolean isRetryable(GoogleJsonError error) { + return error != null && isRetryable(error(error)); + } + + protected boolean isRetryable(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + return isRetryable(((GoogleJsonResponseException) exception).getDetails()); + } + return exception instanceof SocketTimeoutException; + } + + protected boolean isRetryable(Error error) { + for (Error retryableError : retryableErrors()) { + if ((retryableError.code() == null || retryableError.code().equals(error.code())) + && (retryableError.reason() == null || retryableError.reason().equals(error.reason()))) { + return true; + } + } + return false; } /** @@ -45,10 +145,53 @@ public int code() { return code; } + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + /** * Returns {@code true} when it is safe to retry the operation that caused this exception. */ public boolean retryable() { return retryable; } + + /** + * Returns {@code true} when the operation that caused this exception had no side effects. + */ + public boolean idempotent() { + return idempotent; + } + + protected static String reason(GoogleJsonError error) { + if (error.getErrors() != null && !error.getErrors().isEmpty()) { + return error.getErrors().get(0).getReason(); + } + return null; + } + + protected static Error error(GoogleJsonError error) { + return new Error(error.getCode(), reason(error)); + } + + protected static String message(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + return ((GoogleJsonResponseException) exception).getDetails().getMessage(); + } + return exception.getMessage(); + } + + protected static BaseServiceException translateAndThrow( + RetryHelper.RetryHelperException ex) { + if (ex.getCause() instanceof BaseServiceException) { + throw (BaseServiceException) ex.getCause(); + } + if (ex instanceof RetryHelper.RetryInterruptedException) { + RetryHelper.RetryInterruptedException.propagate(); + } + return null; + } } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java index f30fd3abfb79..a6e22866ed9f 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java @@ -16,30 +16,142 @@ package com.google.gcloud; +import static com.google.gcloud.BaseServiceException.UNKNOWN_CODE; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; import org.junit.Test; +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Set; + /** * Tests for {@link BaseServiceException}. */ public class BaseServiceExceptionTest { - private final int code = 1; - private final String message = "some message"; - private final boolean retryable = true; + private static final int CODE = 1; + private static final int CODE_NO_REASON = 2; + private static final String MESSAGE = "some message"; + private static final String REASON = "some reason"; + private static final boolean RETRYABLE = true; + private static final boolean IDEMPOTENT = true; + private static class CustomServiceException extends BaseServiceException { + + private static final long serialVersionUID = -195251309124875103L; + + public CustomServiceException(int code, String message, String reason, boolean idempotent) { + super(code, message, reason, idempotent); + } + + @Override + protected Set retryableErrors() { + return ImmutableSet.of(new Error(CODE, REASON), new Error(null, REASON), + new Error(CODE_NO_REASON, null)); + } + } @Test public void testBaseServiceException() { - BaseServiceException serviceException = new BaseServiceException(code, message, retryable); - assertEquals(serviceException.code(), code); - assertEquals(serviceException.getMessage(), message); - assertEquals(serviceException.getCause(), null); + BaseServiceException serviceException = new BaseServiceException(CODE, MESSAGE, REASON, + IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); Exception cause = new RuntimeException(); - serviceException = new BaseServiceException(code, message, retryable, cause); - assertEquals(serviceException.code(), code); - assertEquals(serviceException.getMessage(), message); - assertEquals(serviceException.getCause(), cause); + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, false, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertFalse(serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + IOException exception = new SocketTimeoutException(); + serviceException = new BaseServiceException(exception, true); + assertTrue(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + assertEquals(exception, serviceException.getCause()); + + GoogleJsonError error = new GoogleJsonError(); + error.setCode(CODE); + error.setMessage(MESSAGE); + serviceException = new BaseServiceException(error, true); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertFalse(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE_NO_REASON, MESSAGE, null, IDEMPOTENT); + assertEquals(CODE_NO_REASON, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertNull(serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(UNKNOWN_CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(UNKNOWN_CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BaseServiceException cause = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + RetryHelper.RetryHelperException exceptionMock = createMock(RetryHelper.RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BaseServiceException ex = BaseServiceException.translateAndThrow(exceptionMock); + if (ex != null) { + throw ex; + } + } catch (BaseServiceException ex) { + assertEquals(CODE, ex.code()); + assertEquals(MESSAGE, ex.getMessage()); + assertFalse(ex.retryable()); + assertEquals(IDEMPOTENT, ex.idempotent()); + } finally { + verify(exceptionMock); + } } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java index ebef6b44f6b6..946fc9190fc3 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java @@ -16,141 +16,70 @@ package com.google.gcloud.datastore; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.gcloud.BaseServiceException; -import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; +import com.google.gcloud.RetryHelper.RetryInterruptedException; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; +import java.util.Set; +/** + * Datastore service exception. + * + * @see Google Cloud + * Datastore error codes + */ public class DatastoreException extends BaseServiceException { - private static final long serialVersionUID = -2336749234060754893L; - private static final ImmutableMap REASON_TO_ERROR; - private static final ImmutableMap HTTP_TO_ERROR; - - private final DatastoreError error; - - /** - * Represents Datastore errors. - * - * @see Google Cloud - * Datastore error codes - */ - public enum DatastoreError { - - ABORTED(Reason.ABORTED), - DEADLINE_EXCEEDED(Reason.DEADLINE_EXCEEDED), - UNAVAILABLE(Reason.UNAVAILABLE), - FAILED_PRECONDITION(Reason.FAILED_PRECONDITION), - INVALID_ARGUMENT(Reason.INVALID_ARGUMENT), - PERMISSION_DENIED(Reason.PERMISSION_DENIED), - UNAUTHORIZED(false, "Unauthorized", 401), - INTERNAL(Reason.INTERNAL), - RESOURCE_EXHAUSTED(Reason.RESOURCE_EXHAUSTED), - UNKNOWN(false, "Unknown failure", -1); - - private final boolean retryable; - private final String description; - private final int httpStatus; - - DatastoreError(Reason reason) { - this(reason.retryable(), reason.description(), reason.httpStatus()); - } - - DatastoreError(boolean retryable, String description, int httpStatus) { - this.retryable = retryable; - this.description = description; - this.httpStatus = httpStatus; - } + // see https://cloud.google.com/datastore/docs/concepts/errors#Error_Codes" + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(409, "ABORTED"), + new Error(403, "DEADLINE_EXCEEDED"), + new Error(503, "UNAVAILABLE")); + private static final long serialVersionUID = 2663750991205874435L; - String description() { - return description; - } - - int httpStatus() { - return httpStatus; - } - - boolean retryable() { - return retryable; - } - - DatastoreException translate(DatastoreRpcException exception, String message) { - return new DatastoreException(this, message, exception); - } - } - - static { - ImmutableMap.Builder builder = ImmutableMap.builder(); - Map httpCodes = new HashMap<>(); - for (DatastoreError error : DatastoreError.values()) { - builder.put(error.name(), error); - httpCodes.put(error.httpStatus(), error); - } - REASON_TO_ERROR = builder.build(); - HTTP_TO_ERROR = ImmutableMap.copyOf(httpCodes); + public DatastoreException(int code, String message, String reason, Exception cause) { + super(code, message, reason, true, cause); } - public DatastoreException(DatastoreError error, String message, Exception cause) { - super(error.httpStatus(), MoreObjects.firstNonNull(message, error.description), - error.retryable(), cause); - this.error = error; + public DatastoreException(int code, String message, String reason) { + super(code, message, reason, true); } - public DatastoreException(DatastoreError error, String message) { - this(error, message, null); + public DatastoreException(IOException exception) { + super(exception, true); } - /** - * Returns the DatastoreError associated with this exception. - */ - public DatastoreError datastoreError() { - return error; - } - - static DatastoreException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof DatastoreRpcException) { - return translateAndThrow((DatastoreRpcException) ex.getCause()); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new DatastoreException(DatastoreError.UNKNOWN, ex.getMessage(), ex); + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** - * Translate DatastoreRpcExceptions to DatastoreExceptions based on their - * HTTP error codes. This method will always throw a new DatastoreException. + * Translate RetryHelperException to the DatastoreException that caused the error. This method + * will always throw an exception. * - * @throws DatastoreException every time + * @throws DatastoreException when {@code ex} was caused by a {@code DatastoreException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static DatastoreException translateAndThrow(DatastoreRpcException exception) { - String message = exception.getMessage(); - DatastoreError error = REASON_TO_ERROR.get(exception.reason()); - if (error == null) { - error = MoreObjects.firstNonNull( - HTTP_TO_ERROR.get(exception.httpStatus()), DatastoreError.UNKNOWN); - } - throw error.translate(exception, message); + public static DatastoreException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndThrow(ex); + throw new DatastoreException(UNKNOWN_CODE, ex.getMessage(), null); } /** - * Throw a DatastoreException with {@code FAILED_PRECONDITION} error and the {@code message} - * in a nested exception. + * Throw a DatastoreException with {@code FAILED_PRECONDITION} reason and the {@code message} in a + * nested exception. * * @throws DatastoreException every time */ static DatastoreException throwInvalidRequest(String massage, Object... params) { - throw new DatastoreException( - DatastoreError.FAILED_PRECONDITION, String.format(massage, params)); + throw new DatastoreException(UNKNOWN_CODE, String.format(massage, params), + "FAILED_PRECONDITION"); } static DatastoreException propagateUserException(Exception ex) { - throw new DatastoreException(DatastoreError.UNKNOWN, ex.getMessage(), ex); + throw new DatastoreException(BaseServiceException.UNKNOWN_CODE, ex.getMessage(), null, ex); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java index 43fd75396538..bfcba58f3f2f 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java @@ -29,7 +29,6 @@ import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryParams; import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; import com.google.protobuf.ByteString; import java.util.Arrays; @@ -42,7 +41,6 @@ import java.util.Set; import java.util.concurrent.Callable; - final class DatastoreImpl extends BaseService implements Datastore { @@ -58,15 +56,15 @@ public RetryResult afterEval(Exception exception, RetryResult retryResult) { @Override public RetryResult beforeEval(Exception exception) { - if (exception instanceof DatastoreRpcException) { - boolean retryable = ((DatastoreRpcException) exception).retryable(); + if (exception instanceof DatastoreException) { + boolean retryable = ((DatastoreException) exception).retryable(); return retryable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; } return Interceptor.RetryResult.CONTINUE_EVALUATION; } }; private static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class, DatastoreRpcException.class) + .abortOn(RuntimeException.class, DatastoreException.class) .interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); private final DatastoreRpc datastoreRpc; @@ -105,7 +103,7 @@ QueryResults run(DatastoreV1.ReadOptions readOptionsPb, Query query) { DatastoreV1.RunQueryResponse runQuery(final DatastoreV1.RunQueryRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.RunQueryResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.RunQueryResponse call() throws DatastoreException { return datastoreRpc.runQuery(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -139,7 +137,7 @@ public List allocateId(IncompleteKey... keys) { DatastoreV1.AllocateIdsResponse allocateIds(final DatastoreV1.AllocateIdsRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.AllocateIdsResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.AllocateIdsResponse call() throws DatastoreException { return datastoreRpc.allocateIds(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -176,7 +174,7 @@ public List add(FullEntity... entities) { if (completeEntity != null) { if (completeEntities.put(completeEntity.key(), completeEntity) != null) { throw DatastoreException.throwInvalidRequest( - "Duplicate entity with the key %s", entity.key()); + "Duplicate entity with the key %s", entity.key()); } mutationPb.addInsert(completeEntity.toPb()); } else { @@ -263,7 +261,7 @@ protected Entity computeNext() { DatastoreV1.LookupResponse lookup(final DatastoreV1.LookupRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.LookupResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.LookupResponse call() throws DatastoreException { return datastoreRpc.lookup(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -334,7 +332,7 @@ private DatastoreV1.CommitResponse commitMutation(DatastoreV1.Mutation.Builder m DatastoreV1.CommitResponse commit(final DatastoreV1.CommitRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.CommitResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.CommitResponse call() throws DatastoreException { return datastoreRpc.commit(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -352,7 +350,7 @@ DatastoreV1.BeginTransactionResponse beginTransaction( try { return RetryHelper.runWithRetries(new Callable() { @Override - public DatastoreV1.BeginTransactionResponse call() throws DatastoreRpcException { + public DatastoreV1.BeginTransactionResponse call() throws DatastoreException { return datastoreRpc.beginTransaction(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -370,7 +368,7 @@ void rollbackTransaction(ByteString transaction) { void rollback(final DatastoreV1.RollbackRequest requestPb) { try { RetryHelper.runWithRetries(new Callable() { - @Override public Void call() throws DatastoreRpcException { + @Override public Void call() throws DatastoreException { datastoreRpc.rollback(requestPb); return null; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java index 227419d8acc8..2ec0f2be8f2b 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java @@ -25,7 +25,6 @@ import com.google.common.collect.Iterables; import com.google.gcloud.ServiceOptions; import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; import com.google.gcloud.spi.DatastoreRpcFactory; import com.google.gcloud.spi.DefaultDatastoreRpc; @@ -126,20 +125,16 @@ private DatastoreOptions normalize() { .addPathElement(DatastoreV1.Key.PathElement.newBuilder().setKind("__foo__").setName("bar")) .build(); requestPb.addKey(key); - try { - LookupResponse responsePb = rpc().lookup(requestPb.build()); - if (responsePb.getDeferredCount() > 0) { - key = responsePb.getDeferred(0); - } else { - Iterator combinedIter = - Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); - key = combinedIter.next().getEntity().getKey(); - } - builder.projectId(key.getPartitionId().getDatasetId()); - return new DatastoreOptions(builder); - } catch (DatastoreRpcException e) { - throw DatastoreException.translateAndThrow(e); + LookupResponse responsePb = rpc().lookup(requestPb.build()); + if (responsePb.getDeferredCount() > 0) { + key = responsePb.getDeferred(0); + } else { + Iterator combinedIter = + Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); + key = combinedIter.next().getEntity().getKey(); } + builder.projectId(key.getPartitionId().getDatasetId()); + return new DatastoreOptions(builder); } @Override diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java index 3e875ff2b8ba..fd916e0a1c87 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.gcloud.spi; import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; @@ -27,92 +28,23 @@ import com.google.api.services.datastore.DatastoreV1.RollbackResponse; import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; +import com.google.gcloud.datastore.DatastoreException; /** * Provides access to the remote Datastore service. */ public interface DatastoreRpc { - public class DatastoreRpcException extends Exception { - - /** - * The reason for the exception. - * - * @see Google - * Cloud Datastore error codes - */ - public enum Reason { - - ABORTED(true, "Request aborted", 409), - DEADLINE_EXCEEDED(true, "Deadline exceeded", 403), - FAILED_PRECONDITION(false, "Invalid request", 412), - INTERNAL(false, "Server returned an error", 500), - INVALID_ARGUMENT(false, "Request parameter has an invalid value", 400), - PERMISSION_DENIED(false, "Unauthorized request", 403), - RESOURCE_EXHAUSTED(false, "Quota exceeded", 402), - UNAVAILABLE(true, "Could not reach service", 503); - - private final boolean retryable; - private final String description; - private final int httpStatus; - - private Reason(boolean retryable, String description, int httpStatus) { - this.retryable = retryable; - this.description = description; - this.httpStatus = httpStatus; - } - - public boolean retryable() { - return retryable; - } - - public String description() { - return description; - } - - public int httpStatus() { - return httpStatus; - } - } - - private final String reason; - private final int httpStatus; - private final boolean retryable; - - public DatastoreRpcException(Reason reason, Throwable cause) { - this(reason.name(), reason.httpStatus, reason.retryable, reason.description, cause); - } - - public DatastoreRpcException(String reason, int httpStatus, boolean retryable, String message, Throwable cause) { - super(message, cause); - this.reason = reason; - this.httpStatus = httpStatus; - this.retryable = retryable; - } - - public String reason() { - return reason; - } - - public int httpStatus() { - return httpStatus; - } - - public boolean retryable() { - return retryable; - } - } - - AllocateIdsResponse allocateIds(AllocateIdsRequest request) throws DatastoreRpcException; + AllocateIdsResponse allocateIds(AllocateIdsRequest request) throws DatastoreException; BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreRpcException; + throws DatastoreException; - CommitResponse commit(CommitRequest request) throws DatastoreRpcException; + CommitResponse commit(CommitRequest request) throws DatastoreException; - LookupResponse lookup(LookupRequest request) throws DatastoreRpcException; + LookupResponse lookup(LookupRequest request) throws DatastoreException; - RollbackResponse rollback(RollbackRequest request) throws DatastoreRpcException; + RollbackResponse rollback(RollbackRequest request) throws DatastoreException; - RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreRpcException; + RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java index fa993c508a0b..ac00d94692de 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java @@ -29,47 +29,28 @@ import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; import com.google.api.services.datastore.client.Datastore; -import com.google.api.services.datastore.client.DatastoreException; import com.google.api.services.datastore.client.DatastoreFactory; import com.google.api.services.datastore.client.DatastoreOptions.Builder; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; +import com.google.gcloud.datastore.DatastoreException; import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; +import java.io.IOException; import java.net.InetAddress; -import java.net.SocketTimeoutException; import java.net.URL; -import java.util.HashMap; -import java.util.Map; public class DefaultDatastoreRpc implements DatastoreRpc { private final Datastore client; - private static final ImmutableMap STR_TO_REASON; - private static final ImmutableMap HTTP_STATUS_TO_REASON; - - static { - ImmutableMap.Builder builder = ImmutableMap.builder(); - Map httpCodes = new HashMap<>(); - for (Reason reason : Reason.values()) { - builder.put(reason.name(), reason); - httpCodes.put(reason.httpStatus(), reason); - } - STR_TO_REASON = builder.build(); - HTTP_STATUS_TO_REASON = ImmutableMap.copyOf(httpCodes); - } - public DefaultDatastoreRpc(DatastoreOptions options) { String normalizedHost = normalizeHost(options.host()); client = DatastoreFactory.get().create( new Builder() - .transport(options.httpTransportFactory().create()) .dataset(options.projectId()) .host(normalizedHost) .initializer(options.httpRequestInitializer()) @@ -106,90 +87,82 @@ private static boolean includesScheme(String url) { return url.startsWith("http://") || url.startsWith("https://"); } - private static DatastoreRpcException translate(DatastoreException exception) { + private static DatastoreException translate( + com.google.api.services.datastore.client.DatastoreException exception) { String message = exception.getMessage(); - String reasonStr = ""; + int code = exception.getCode(); + String reason = ""; if (message != null) { try { JSONObject json = new JSONObject(new JSONTokener(message)); JSONObject error = json.getJSONObject("error").getJSONArray("errors").getJSONObject(0); - reasonStr = error.getString("reason"); + reason = error.getString("reason"); message = error.getString("message"); } catch (JSONException ignore) { // ignore - will be converted to unknown } } - Reason reason = STR_TO_REASON.get(reasonStr); if (reason == null) { - reason = HTTP_STATUS_TO_REASON.get(exception.getCode()); - } - if (reason != null) { - return new DatastoreRpcException(reason, exception); - } else { - boolean retryable = false; - reasonStr = "Unknown"; - if (exception.getCause() instanceof SocketTimeoutException) { - retryable = true; - reasonStr = "Request timeout"; + if (exception.getCause() instanceof IOException) { + return new DatastoreException((IOException) exception.getCause()); } - return new DatastoreRpcException(reasonStr, exception.getCode(), retryable, message, exception); } + return new DatastoreException(code, message, reason); } @Override public AllocateIdsResponse allocateIds(AllocateIdsRequest request) - throws DatastoreRpcException { + throws DatastoreException { try { return client.allocateIds(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreRpcException { + throws DatastoreException { try { return client.beginTransaction(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public CommitResponse commit(CommitRequest request) throws DatastoreRpcException { + public CommitResponse commit(CommitRequest request) throws DatastoreException { try { return client.commit(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public LookupResponse lookup(LookupRequest request) throws DatastoreRpcException { + public LookupResponse lookup(LookupRequest request) throws DatastoreException { try { return client.lookup(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public RollbackResponse rollback(RollbackRequest request) throws DatastoreRpcException { + public RollbackResponse rollback(RollbackRequest request) throws DatastoreException { try { return client.rollback(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreRpcException { + public RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException { try { return client.runQuery(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } } - diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java index 019d69cb737b..4d62224172f9 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java @@ -16,42 +16,80 @@ package com.google.gcloud.datastore; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import com.google.gcloud.datastore.DatastoreException.DatastoreError; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper; import org.junit.Test; +import java.io.IOException; +import java.net.SocketTimeoutException; + public class DatastoreExceptionTest { @Test - public void testDatastoreError() throws Exception { - for (Reason reason : Reason.values()) { - DatastoreError error = DatastoreError.valueOf(reason.name()); - assertEquals(reason.retryable(), error.retryable()); - assertEquals(reason.description(), error.description()); - assertEquals(reason.httpStatus(), error.httpStatus()); - } + public void testDatastoreException() throws Exception { + DatastoreException exception = new DatastoreException(409, "message", "ABORTED"); + assertEquals(409, exception.code()); + assertEquals("ABORTED", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(403, "message", "DEADLINE_EXCEEDED"); + assertEquals(403, exception.code()); + assertEquals("DEADLINE_EXCEEDED", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(503, "message", "UNAVAILABLE"); + assertEquals(503, exception.code()); + assertEquals("UNAVAILABLE", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(500, "message", "INTERNAL"); + assertEquals(500, exception.code()); + assertEquals("INTERNAL", exception.reason()); + assertEquals("message", exception.getMessage()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new DatastoreException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); - DatastoreException exception = new DatastoreException(DatastoreError.ABORTED, "bla"); - assertEquals(DatastoreError.ABORTED, exception.datastoreError()); } @Test public void testTranslateAndThrow() throws Exception { - DatastoreRpcException toTranslate = null; // should be preserved as a cause - for (Reason reason : Reason.values()) { - try { - toTranslate = new DatastoreRpcException(reason, null); - DatastoreException.translateAndThrow(toTranslate); - fail("Exception expected"); - } catch (DatastoreException ex) { - assertEquals(reason.name(), ex.datastoreError().name()); - assertEquals(toTranslate, ex.getCause()); - } + DatastoreException cause = new DatastoreException(503, "message", "UNAVAILABLE"); + RetryHelper.RetryHelperException exceptionMock = createMock(RetryHelper.RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + DatastoreException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); } } @@ -61,7 +99,7 @@ public void testThrowInvalidRequest() throws Exception { DatastoreException.throwInvalidRequest("message %s %d", "a", 1); fail("Exception expected"); } catch (DatastoreException ex) { - assertEquals(DatastoreError.FAILED_PRECONDITION, ex.datastoreError()); + assertEquals("FAILED_PRECONDITION", ex.reason()); assertEquals("message a 1", ex.getMessage()); } } diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java index e6f84c76ad40..8cb88f9e7795 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java @@ -38,8 +38,6 @@ import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; import com.google.gcloud.datastore.testing.LocalGcdHelper; import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; import com.google.gcloud.spi.DatastoreRpcFactory; import org.easymock.EasyMock; @@ -201,7 +199,7 @@ public void testTransactionWithRead() { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { - assertEquals(DatastoreException.DatastoreError.ABORTED, expected.datastoreError()); + assertEquals("ABORTED", expected.reason()); } } @@ -229,7 +227,7 @@ public void testTransactionWithQuery() { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { - assertEquals(DatastoreException.DatastoreError.ABORTED, expected.datastoreError()); + assertEquals("ABORTED", expected.reason()); } } @@ -466,7 +464,7 @@ public void testRunStructuredQuery() { } @Test - public void testQueryPaginationWithLimit() throws DatastoreRpcException { + public void testQueryPaginationWithLimit() throws DatastoreException { DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class); DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class); EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) @@ -639,7 +637,7 @@ public void testGetArrayNoDeferredResults() { assertFalse(result.hasNext()); } - public void testGetArrayDeferredResults() throws DatastoreRpcException { + public void testGetArrayDeferredResults() throws DatastoreException { Set requestedKeys = new HashSet<>(); requestedKeys.add(KEY1); requestedKeys.add(KEY2); @@ -654,7 +652,7 @@ public void testGetArrayDeferredResults() throws DatastoreRpcException { assertEquals(requestedKeys, keysOfFoundEntities); } - public void testFetchArrayDeferredResults() throws DatastoreRpcException { + public void testFetchArrayDeferredResults() throws DatastoreException { List foundEntities = createDatastoreForDeferredLookup().fetch(KEY1, KEY2, KEY3, KEY4, KEY5); assertEquals(foundEntities.get(0).key(), KEY1); @@ -665,7 +663,7 @@ public void testFetchArrayDeferredResults() throws DatastoreRpcException { assertEquals(foundEntities.size(), 5); } - private Datastore createDatastoreForDeferredLookup() throws DatastoreRpcException { + private Datastore createDatastoreForDeferredLookup() throws DatastoreException { List keysPb = new ArrayList<>(); keysPb.add(KEY1.toPb()); keysPb.add(KEY2.toPb()); @@ -821,7 +819,7 @@ public void testRetryableException() throws Exception { EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) .andReturn(rpcMock); EasyMock.expect(rpcMock.lookup(requestPb)) - .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.UNAVAILABLE, null)) + .andThrow(new DatastoreException(503, "UNAVAILABLE", "UNAVAILABLE", null)) .andReturn(responsePb); EasyMock.replay(rpcFactoryMock, rpcMock); DatastoreOptions options = this.options.toBuilder() @@ -843,7 +841,8 @@ public void testNonRetryableException() throws Exception { EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) .andReturn(rpcMock); EasyMock.expect(rpcMock.lookup(requestPb)) - .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.PERMISSION_DENIED, null)) + .andThrow( + new DatastoreException(DatastoreException.UNKNOWN_CODE, "denied", "PERMISSION_DENIED")) .times(1); EasyMock.replay(rpcFactoryMock, rpcMock); RetryParams retryParams = RetryParams.builder().retryMinAttempts(2).build(); @@ -853,7 +852,7 @@ public void testNonRetryableException() throws Exception { .build(); Datastore datastore = options.service(); thrown.expect(DatastoreException.class); - thrown.expectMessage(Reason.PERMISSION_DENIED.description()); + thrown.expectMessage("denied"); datastore.get(KEY1); EasyMock.verify(rpcFactoryMock, rpcMock); } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java index 22b5e8bfed7c..3510f7728a8f 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java @@ -16,11 +16,14 @@ package com.google.gcloud.resourcemanager; +import com.google.common.collect.ImmutableSet; import com.google.gcloud.BaseServiceException; -import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryHelper.RetryInterruptedException; +import java.io.IOException; +import java.util.Set; + /** * Resource Manager service exception. * @@ -29,11 +32,32 @@ */ public class ResourceManagerException extends BaseServiceException { - private static final long serialVersionUID = 6841689911565501705L; - private static final int UNKNOWN_CODE = -1; + // see https://cloud.google.com/resource-manager/v1/errors/core_errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(503, null), + new Error(500, null), + new Error(429, null), + new Error(403, "concurrentLimitExceeded"), + new Error(403, "limitExceeded"), + new Error(403, "rateLimitExceeded"), + new Error(403, "rateLimitExceededUnreg"), + new Error(403, "servingLimitExceeded"), + new Error(403, "userRateLimitExceeded"), + new Error(403, "userRateLimitExceededUnreg"), + new Error(403, "variableTermLimitExceeded")); + private static final long serialVersionUID = -9207194488966554136L; + + public ResourceManagerException(int code, String message) { + super(code, message, null, true); + } + + public ResourceManagerException(IOException exception) { + super(exception, true); + } - public ResourceManagerException(int code, String message, boolean retryable) { - super(code, message, retryable); + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** @@ -44,13 +68,8 @@ public ResourceManagerException(int code, String message, boolean retryable) { * ResourceManagerException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static ResourceManagerException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof ResourceManagerException) { - throw (ResourceManagerException) ex.getCause(); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new ResourceManagerException(UNKNOWN_CODE, ex.getMessage(), false); + public static ResourceManagerException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndThrow(ex); + throw new ResourceManagerException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java index ec95207c2e7b..61c622fa0c33 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java @@ -7,30 +7,20 @@ import static java.net.HttpURLConnection.HTTP_FORBIDDEN; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.jackson.JacksonFactory; import com.google.api.services.cloudresourcemanager.Cloudresourcemanager; import com.google.api.services.cloudresourcemanager.model.ListProjectsResponse; import com.google.api.services.cloudresourcemanager.model.Project; -import com.google.common.collect.ImmutableSet; import com.google.gcloud.resourcemanager.ResourceManagerException; import com.google.gcloud.resourcemanager.ResourceManagerOptions; import java.io.IOException; import java.util.Map; -import java.util.Set; public class DefaultResourceManagerRpc implements ResourceManagerRpc { - // see https://cloud.google.com/resource-manager/v1/errors/core_errors - private static final Set RETRYABLE_CODES = ImmutableSet.of(503, 500, 429); - private static final Set RETRYABLE_REASONS = ImmutableSet.of("concurrentLimitExceeded", - "limitExceeded", "rateLimitExceeded", "rateLimitExceededUnreg", "servingLimitExceeded", - "userRateLimitExceeded", "userRateLimitExceededUnreg", "variableTermLimitExceeded"); - private final Cloudresourcemanager resourceManager; public DefaultResourceManagerRpc(ResourceManagerOptions options) { @@ -44,21 +34,7 @@ public DefaultResourceManagerRpc(ResourceManagerOptions options) { } private static ResourceManagerException translate(IOException exception) { - ResourceManagerException translated; - if (exception instanceof GoogleJsonResponseException) { - translated = translate(((GoogleJsonResponseException) exception).getDetails()); - } else { - translated = new ResourceManagerException(0, exception.getMessage(), false); - } - translated.initCause(exception); - return translated; - } - - private static ResourceManagerException translate(GoogleJsonError exception) { - boolean retryable = - RETRYABLE_CODES.contains(exception.getCode()) || (!exception.getErrors().isEmpty() - && RETRYABLE_REASONS.contains(exception.getErrors().get(0).getReason())); - return new ResourceManagerException(exception.getCode(), exception.getMessage(), retryable); + return new ResourceManagerException(exception); } @Override diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java new file mode 100644 index 000000000000..388f38f31c35 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class ResourceManagerExceptionTest { + + @Test + public void testResourceManagerException() { + ResourceManagerException exception = new ResourceManagerException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(429, "message"); + assertEquals(429, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(403, "message"); + assertEquals(403, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new ResourceManagerException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + ResourceManagerException cause = new ResourceManagerException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + ResourceManagerException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java index fedd10eacdc6..7d1e00496463 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -273,7 +273,7 @@ public void testRetryableException() { .build() .service(); EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) - .andThrow(new ResourceManagerException(500, "Internal Error", true)) + .andThrow(new ResourceManagerException(500, "Internal Error")) .andReturn(PARTIAL_PROJECT.toPb()); EasyMock.replay(resourceManagerRpcMock); ProjectInfo returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); @@ -293,7 +293,7 @@ public void testNonRetryableException() { .service(); EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) .andThrow(new ResourceManagerException( - 403, "Project " + PARTIAL_PROJECT.projectId() + " not found.", false)) + 403, "Project " + PARTIAL_PROJECT.projectId() + " not found.")) .once(); EasyMock.replay(resourceManagerRpcMock); thrown.expect(ResourceManagerException.class); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java index 29fdc651af9f..dc84a1de5559 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java @@ -34,7 +34,6 @@ import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpHeaders; @@ -59,7 +58,6 @@ import com.google.api.services.storage.model.StorageObject; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.storage.StorageException; @@ -68,12 +66,10 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; public class DefaultStorageRpc implements StorageRpc { @@ -81,8 +77,6 @@ public class DefaultStorageRpc implements StorageRpc { private final StorageOptions options; private final Storage storage; - // see: https://cloud.google.com/storage/docs/concepts-techniques#practices - private static final Set RETRYABLE_CODES = ImmutableSet.of(504, 503, 502, 500, 429, 408); private static final long MEGABYTE = 1024L * 1024L; private static final int MAX_BATCH_DELETES = 100; @@ -97,25 +91,11 @@ public DefaultStorageRpc(StorageOptions options) { } private static StorageException translate(IOException exception) { - StorageException translated; - if (exception instanceof GoogleJsonResponseException - && ((GoogleJsonResponseException) exception).getDetails() != null) { - translated = translate(((GoogleJsonResponseException) exception).getDetails()); - } else { - boolean retryable = false; - if (exception instanceof SocketTimeoutException) { - retryable = true; - } - translated = new StorageException(0, exception.getMessage(), retryable); - } - translated.initCause(exception); - return translated; + return new StorageException(exception); } private static StorageException translate(GoogleJsonError exception) { - boolean retryable = RETRYABLE_CODES.contains(exception.getCode()) - || "InternalError".equals(exception.getMessage()); - return new StorageException(exception.getCode(), exception.getMessage(), retryable); + return new StorageException(exception); } @Override diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index 984f5d1f72e9..121f2eb63589 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -129,7 +129,7 @@ public Tuple call() { if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { StringBuilder messageBuilder = new StringBuilder(); messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); - throw new StorageException(0, messageBuilder.toString(), false); + throw new StorageException(0, messageBuilder.toString()); } lastEtag = result.x(); buffer = result.y(); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java index c1075ae28c8b..e724d8ac6850 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java @@ -16,11 +16,15 @@ package com.google.gcloud.storage; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; import com.google.gcloud.BaseServiceException; -import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryHelper.RetryInterruptedException; +import java.io.IOException; +import java.util.Set; + /** * Storage service exception. * @@ -29,11 +33,33 @@ */ public class StorageException extends BaseServiceException { - private static final long serialVersionUID = 8088235105953640145L; - private static final int UNKNOWN_CODE = -1; + // see: https://cloud.google.com/storage/docs/concepts-techniques#practices + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(504, null), + new Error(503, null), + new Error(502, null), + new Error(500, null), + new Error(429, null), + new Error(408, null), + new Error(null, "internalError")); + + private static final long serialVersionUID = -4168430271327813063L; + + public StorageException(int code, String message) { + super(code, message, null, true); + } + + public StorageException(IOException exception) { + super(exception, true); + } + + public StorageException(GoogleJsonError error) { + super(error, true); + } - public StorageException(int code, String message, boolean retryable) { - super(code, message, retryable); + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** @@ -43,13 +69,8 @@ public StorageException(int code, String message, boolean retryable) { * @throws StorageException when {@code ex} was caused by a {@code StorageException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static StorageException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof StorageException) { - throw (StorageException) ex.getCause(); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new StorageException(UNKNOWN_CODE, ex.getMessage(), false); + public static StorageException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndThrow(ex); + throw new StorageException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java index 05b7f5f6fd8c..d06f004fe84c 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java @@ -69,8 +69,8 @@ public class RemoteGcsHelperTest { private static final List BLOB_LIST = ImmutableList.of( BlobInfo.builder(BUCKET_NAME, "n1").build(), BlobInfo.builder(BUCKET_NAME, "n2").build()); - private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, "", true); - private static final StorageException FATAL_EXCEPTION = new StorageException(500, "", false); + private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, ""); + private static final StorageException FATAL_EXCEPTION = new StorageException(500, ""); private static final Page BLOB_PAGE = new Page() { @Override diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java new file mode 100644 index 000000000000..cf1d4b394e57 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class StorageExceptionTest { + + @Test + public void testStorageException() { + StorageException exception = new StorageException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(502, "message"); + assertEquals(502, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(504, "message"); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(429, "message"); + assertEquals(429, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(408, "message"); + assertEquals(408, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(400, "message"); + assertEquals(400, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new StorageException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + + GoogleJsonError error = new GoogleJsonError(); + error.setCode(503); + error.setMessage("message"); + exception = new StorageException(error); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + StorageException cause = new StorageException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + StorageException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index 0e1f1a0b2f52..b8da0580cd4a 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -1266,7 +1266,7 @@ public Tuple apply(StorageObject f) { public void testRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) - .andThrow(new StorageException(500, "InternalError", true)) + .andThrow(new StorageException(500, "internalError")) .andReturn(BLOB_INFO1.toPb()); EasyMock.replay(storageRpcMock); storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); @@ -1279,7 +1279,7 @@ public void testNonRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); String exceptionMessage = "Not Implemented"; EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) - .andThrow(new StorageException(501, exceptionMessage, false)); + .andThrow(new StorageException(501, exceptionMessage)); EasyMock.replay(storageRpcMock); storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); thrown.expect(StorageException.class); From acf260ce07f7fdd0729682d74c15c5c88abf7c84 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Fri, 15 Jan 2016 18:20:26 +0100 Subject: [PATCH 07/18] Move exception handler and interceptor to BaseService class --- .../google/gcloud/bigquery/BigQueryImpl.java | 23 --------------- .../java/com/google/gcloud/BaseService.java | 25 +++++++++++++++++ .../gcloud/datastore/DatastoreImpl.java | 28 +------------------ .../resourcemanager/ResourceManagerImpl.java | 25 ----------------- .../google/gcloud/storage/StorageImpl.java | 22 --------------- 5 files changed, 26 insertions(+), 97 deletions(-) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index 3a1cc658bef3..ad55056474fb 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -34,8 +34,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.Page; import com.google.gcloud.PageImpl; import com.google.gcloud.PageImpl.NextPageFetcher; @@ -49,27 +47,6 @@ final class BigQueryImpl extends BaseService implements BigQuery { - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { - - private static final long serialVersionUID = -7478333733015750774L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof BigQueryException) { - boolean retriable = ((BigQueryException) exception).retryable(); - return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); - private static class DatasetPageFetcher implements NextPageFetcher { private static final long serialVersionUID = -3057564042439021278L; diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java index c028eaede331..d9e6f2db7c95 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java @@ -16,6 +16,8 @@ package com.google.gcloud; +import com.google.gcloud.ExceptionHandler.Interceptor; + /** * Base class for service objects. * @@ -24,6 +26,29 @@ public abstract class BaseService> implements Service { + public static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { + + private static final long serialVersionUID = -8429573486870467828L; + + @Override + public RetryResult afterEval(Exception exception, RetryResult retryResult) { + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + + @Override + public RetryResult beforeEval(Exception exception) { + if (exception instanceof BaseServiceException) { + boolean retriable = ((BaseServiceException) exception).retryable(); + return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; + } + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + }; + public static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() + .abortOn(RuntimeException.class) + .interceptor(EXCEPTION_HANDLER_INTERCEPTOR) + .build(); + private final OptionsT options; protected BaseService(OptionsT options) { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java index bfcba58f3f2f..92d18ed4787c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java @@ -23,8 +23,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryParams; @@ -41,31 +39,7 @@ import java.util.Set; import java.util.concurrent.Callable; -final class DatastoreImpl extends BaseService - implements Datastore { - - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = - new Interceptor() { - - private static final long serialVersionUID = 6911242958397733203L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof DatastoreException) { - boolean retryable = ((DatastoreException) exception).retryable(); - return retryable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - private static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class, DatastoreException.class) - .interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); +final class DatastoreImpl extends BaseService implements Datastore { private final DatastoreRpc datastoreRpc; private final RetryParams retryParams; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java index 2a0e09d9fb31..22f2b350d2f3 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java @@ -25,8 +25,6 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.Page; import com.google.gcloud.PageImpl; import com.google.gcloud.PageImpl.NextPageFetcher; @@ -40,29 +38,6 @@ final class ResourceManagerImpl extends BaseService implements ResourceManager { - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { - - private static final long serialVersionUID = 2091576149969931704L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof ResourceManagerException) { - boolean retriable = ((ResourceManagerException) exception).retryable(); - return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class) - .interceptor(EXCEPTION_HANDLER_INTERCEPTOR) - .build(); - private final ResourceManagerRpc resourceManagerRpc; ResourceManagerImpl(ResourceManagerOptions options) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index a6c851d0f638..a4b6c56e5ede 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -44,8 +44,6 @@ import com.google.common.primitives.Ints; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.Page; import com.google.gcloud.PageImpl; import com.google.gcloud.PageImpl.NextPageFetcher; @@ -76,26 +74,6 @@ final class StorageImpl extends BaseService implements Storage { - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { - - private static final long serialVersionUID = -7758580330857881124L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof StorageException) { - boolean retriable = ((StorageException) exception).retryable(); - return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); private static final byte[] EMPTY_BYTE_ARRAY = {}; private static final String EMPTY_BYTE_ARRAY_MD5 = "1B2M2Y8AsgTpgAmY7PhCfg=="; private static final String EMPTY_BYTE_ARRAY_CRC32C = "AAAAAA=="; From 67e5dfce1f7d6903fc75a67d472c85ebb900258e Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Wed, 20 Jan 2016 07:40:53 +0100 Subject: [PATCH 08/18] Rename translateAndThrow in BaseServiceException and make it package scope in other exceptions --- .../com/google/gcloud/bigquery/BigQueryException.java | 4 ++-- .../main/java/com/google/gcloud/BaseServiceException.java | 4 +--- .../java/com/google/gcloud/BaseServiceExceptionTest.java | 8 +++----- .../com/google/gcloud/datastore/DatastoreException.java | 4 ++-- .../gcloud/resourcemanager/ResourceManagerException.java | 4 ++-- .../java/com/google/gcloud/storage/StorageException.java | 4 ++-- 6 files changed, 12 insertions(+), 16 deletions(-) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java index 930d06d523ab..b0cca68e3e0a 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -76,8 +76,8 @@ protected Set retryableErrors() { * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - public static BigQueryException translateAndThrow(RetryHelperException ex) { - BaseServiceException.translateAndThrow(ex); + static BaseServiceException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); throw new BigQueryException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java index 9f4bfdab994d..351ad6cd188a 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -184,14 +184,12 @@ protected static String message(IOException exception) { return exception.getMessage(); } - protected static BaseServiceException translateAndThrow( - RetryHelper.RetryHelperException ex) { + protected static void translateAndPropagateIfPossible(RetryHelper.RetryHelperException ex) { if (ex.getCause() instanceof BaseServiceException) { throw (BaseServiceException) ex.getCause(); } if (ex instanceof RetryHelper.RetryInterruptedException) { RetryHelper.RetryInterruptedException.propagate(); } - return null; } } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java index a6e22866ed9f..e3c6abb7d1ee 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java @@ -137,14 +137,12 @@ public void testBaseServiceException() { @Test public void testTranslateAndThrow() throws Exception { BaseServiceException cause = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); - RetryHelper.RetryHelperException exceptionMock = createMock(RetryHelper.RetryHelperException.class); + RetryHelper.RetryHelperException exceptionMock = + createMock(RetryHelper.RetryHelperException.class); expect(exceptionMock.getCause()).andReturn(cause).times(2); replay(exceptionMock); try { - BaseServiceException ex = BaseServiceException.translateAndThrow(exceptionMock); - if (ex != null) { - throw ex; - } + BaseServiceException.translateAndPropagateIfPossible(exceptionMock); } catch (BaseServiceException ex) { assertEquals(CODE, ex.code()); assertEquals(MESSAGE, ex.getMessage()); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java index 946fc9190fc3..a7e6785b8a8c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java @@ -63,8 +63,8 @@ protected Set retryableErrors() { * @throws DatastoreException when {@code ex} was caused by a {@code DatastoreException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - public static DatastoreException translateAndThrow(RetryHelperException ex) { - BaseServiceException.translateAndThrow(ex); + static DatastoreException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); throw new DatastoreException(UNKNOWN_CODE, ex.getMessage(), null); } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java index 3510f7728a8f..32a2998791c9 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java @@ -68,8 +68,8 @@ protected Set retryableErrors() { * ResourceManagerException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - public static ResourceManagerException translateAndThrow(RetryHelperException ex) { - BaseServiceException.translateAndThrow(ex); + static ResourceManagerException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); throw new ResourceManagerException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java index e724d8ac6850..0c952c9a65d6 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java @@ -69,8 +69,8 @@ protected Set retryableErrors() { * @throws StorageException when {@code ex} was caused by a {@code StorageException} * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - public static StorageException translateAndThrow(RetryHelperException ex) { - BaseServiceException.translateAndThrow(ex); + static StorageException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); throw new StorageException(UNKNOWN_CODE, ex.getMessage()); } } From 34e6806becf14f1b9bf0aeac1f7cc11d2aad95b1 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Wed, 20 Jan 2016 07:55:39 +0100 Subject: [PATCH 09/18] Add throwable cause to DatastoreException --- .../src/main/java/com/google/gcloud/BaseServiceException.java | 2 +- .../java/com/google/gcloud/datastore/DatastoreException.java | 2 +- .../main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java index 351ad6cd188a..b91090f94e82 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -105,7 +105,7 @@ public BaseServiceException(int code, String message, String reason, boolean ide } public BaseServiceException(int code, String message, String reason, boolean idempotent, - Exception cause) { + Throwable cause) { super(message, cause); this.code = code; this.reason = reason; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java index a7e6785b8a8c..ecad69ac635b 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java @@ -39,7 +39,7 @@ public class DatastoreException extends BaseServiceException { new Error(503, "UNAVAILABLE")); private static final long serialVersionUID = 2663750991205874435L; - public DatastoreException(int code, String message, String reason, Exception cause) { + public DatastoreException(int code, String message, String reason, Throwable cause) { super(code, message, reason, true, cause); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java index ac00d94692de..c82ff9689f68 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java @@ -107,7 +107,7 @@ private static DatastoreException translate( return new DatastoreException((IOException) exception.getCause()); } } - return new DatastoreException(code, message, reason); + return new DatastoreException(code, message, reason, exception); } @Override From 3c81f1679e7f6880d9f3c4a9b89a5831c90fb02f Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Wed, 20 Jan 2016 09:28:40 +0100 Subject: [PATCH 10/18] Move isRetryable method to BaseServiceException.Error --- .../google/gcloud/BaseServiceException.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java index b91090f94e82..0222a0d2258c 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -58,6 +58,16 @@ public String reason() { return reason; } + boolean isRetryable(Set retryableErrors) { + for (Error retryableError : retryableErrors) { + if ((retryableError.code() == null || retryableError.code().equals(this.code())) + && (retryableError.reason() == null || retryableError.reason().equals(this.reason()))) { + return true; + } + } + return false; + } + @Override public String toString() { return MoreObjects.toStringHelper(this).add("code", code).add("reason", reason).toString(); @@ -83,7 +93,7 @@ public BaseServiceException(IOException exception, boolean idempotent) { Error error = error(((GoogleJsonResponseException) exception).getDetails()); this.code = error.code; this.reason = error.reason; - this.retryable = isRetryable(error); + this.retryable = error.isRetryable(retryableErrors()); } else { this.code = UNKNOWN_CODE; this.reason = null; @@ -110,7 +120,7 @@ public BaseServiceException(int code, String message, String reason, boolean ide this.code = code; this.reason = reason; this.idempotent = idempotent; - this.retryable = idempotent && isRetryable(new Error(code, reason)); + this.retryable = idempotent && new Error(code, reason).isRetryable(retryableErrors()); } protected Set retryableErrors() { @@ -118,7 +128,7 @@ protected Set retryableErrors() { } protected boolean isRetryable(GoogleJsonError error) { - return error != null && isRetryable(error(error)); + return error != null && error(error).isRetryable(retryableErrors()); } protected boolean isRetryable(IOException exception) { @@ -128,16 +138,6 @@ protected boolean isRetryable(IOException exception) { return exception instanceof SocketTimeoutException; } - protected boolean isRetryable(Error error) { - for (Error retryableError : retryableErrors()) { - if ((retryableError.code() == null || retryableError.code().equals(error.code())) - && (retryableError.reason() == null || retryableError.reason().equals(error.reason()))) { - return true; - } - } - return false; - } - /** * Returns the code associated with this exception. */ From 1fc0e3275e352706734dc935c6d4bab77976fb1c Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Wed, 20 Jan 2016 17:55:43 -0800 Subject: [PATCH 11/18] Implemented comments by @aozarov. Also removed incomplete ManagedZoneInfo.java. --- .../java/com/google/gcloud/dns/DnsRecord.java | 170 ++++++++++-------- .../google/gcloud/dns/ManagedZoneInfo.java | 44 ----- .../com/google/gcloud/dns/DnsRecordTest.java | 68 ++----- 3 files changed, 105 insertions(+), 177 deletions(-) delete mode 100644 gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java index 91278fa2a1e7..9cc21acfa0a5 100644 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.gcloud.dns; import static com.google.common.base.Preconditions.checkArgument; @@ -20,6 +21,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import java.io.Serializable; @@ -30,7 +32,10 @@ /** * A class that represents Google Cloud DNS record set. * - *

A unit of data that will be returned by the DNS servers. + *

A DnsRecord is the unit of data that will be returned by the DNS servers upon a DNS request + * for a specific domain. The DnsRecord holds the current state of the DNS records that make up a + * managed zone. You can read the records but you do not modify them directly. Rather, you edit + * the records in a managed zone by creating a {@link ChangeRequest}. * * @see Google Cloud DNS * documentation @@ -42,26 +47,6 @@ public class DnsRecord implements Serializable { private final List rrdatas; private final Integer ttl; private final DnsRecordType type; - private final String zoneName; - private final Long zoneId; - - private DnsRecord() { - this.name = null; - this.rrdatas = null; - this.ttl = null; - this.type = null; - this.zoneName = null; - this.zoneId = null; - } - - DnsRecord(Builder builder) { - this.name = builder.name; - this.rrdatas = ImmutableList.copyOf(builder.rrdatas); - this.ttl = builder.ttl; - this.type = builder.type; - this.zoneName = builder.zoneName; - this.zoneId = builder.zoneId; - } /** * Enum for the DNS record types supported by Cloud DNS. @@ -73,16 +58,51 @@ private DnsRecord() { * supported record types */ public enum DnsRecordType { + /** + * Address record, which is used to map host names to their IPv4 address. + */ A, + /** + * IPv6 Address record, which is used to map host names to their IPv6 address. + */ AAAA, + /** + * Canonical name record, which is used to alias names. + */ CNAME, + /** + * Mail exchange record, which is used in routing requests to mail servers. + */ MX, + /** + * Naming authority pointer record, defined by RFC3403. + */ NAPTR, + /** + * Name server record, which delegates a DNS zone to an authoritative server. + */ NS, + /** + * Pointer record, which is often used for reverse DNS lookups. + */ PTR, + /** + * Start of authority record, which specifies authoritative information about a DNS zone. + */ SOA, + /** + * Sender policy framework record, which is used in email validation systems. + */ SPF, + /** + * Service locator record, which is used by some voice over IP, instant messaging protocols and + * other applications. + */ SRV, + /** + * Text record, which can contain arbitrary text and can also be used to define machine readable + * data such as security or abuse prevention information. + */ TXT } @@ -92,8 +112,6 @@ public static class Builder { private String name; private Integer ttl; private DnsRecordType type; - private String zoneName; - private Long zoneId; private Builder() { } @@ -102,12 +120,10 @@ private Builder() { * Creates a builder and pre-populates attributes with the values from the provided DnsRecord * instance. */ - public Builder(DnsRecord record) { + private Builder(DnsRecord record) { this.name = record.name; this.ttl = record.ttl; this.type = record.type; - this.zoneId = record.zoneId; - this.zoneName = record.zoneName; this.rrdatas.addAll(record.rrdatas); } @@ -118,11 +134,46 @@ public Builder(DnsRecord record) { * @see Google * DNS documentation . */ - public Builder add(String record) { + public Builder addRecord(String record) { this.rrdatas.add(checkNotNull(record)); return this; } + /** + * Removes a record from the set. An exact match is required. + */ + public Builder removerRecord(String record) { + this.rrdatas.remove(checkNotNull(record)); + return this; + } + + /** + * Removes a record on the given index from the set. + */ + public Builder removerRecord(int index) { + checkArgument(index >= 0 && index < this.rrdatas.size(), "The index is out of bounds. An " + + "integer between 0 and " + (this.rrdatas.size() - 1) + " is required. The provided " + + "value was " + index + "."); + this.rrdatas.remove(index); + return this; + } + + /** + * Removes all the records. + */ + public Builder clearRecords() { + this.rrdatas.clear(); + return this; + } + + /** + * Replaces the current records with the provided list of records. + */ + public Builder records(List records) { + this.rrdatas = Lists.newLinkedList(checkNotNull(records)); + return this; + } + /** * Sets name for this DNS record set. For example, www.example.com. */ @@ -157,26 +208,13 @@ public Builder type(DnsRecordType type) { public DnsRecord build() { return new DnsRecord(this); } + } - /** - * Sets references to the managed zone that this DNS record belongs to. - * - * todo(mderka): consider if this method is needed; may not be possible when listing records - */ - Builder managedZone(ManagedZoneInfo parent) { - checkNotNull(parent); - this.zoneId = parent.id(); - this.zoneName = parent.name(); - return this; - } - - /** - * Sets name reference to the managed zone that this DNS record belongs to. - */ - Builder managedZone(String managedZoneName) { - this.zoneName = checkNotNull(managedZoneName); - return this; - } + DnsRecord(Builder builder) { + this.name = builder.name; + this.rrdatas = ImmutableList.copyOf(builder.rrdatas); + this.ttl = builder.ttl; + this.type = builder.type; } /** @@ -187,7 +225,7 @@ public Builder toBuilder() { } /** - * Creates an empty builder + * Creates an empty builder. */ public static Builder builder() { return new Builder(); @@ -203,13 +241,12 @@ public String name() { /** * Returns a list of DNS record stored in this record set. */ - public List rrdatas() { + public List records() { return rrdatas; } /** - * Returns the number of seconds that this DnsResource can be cached by resolvers. This number is - * provided by the user. + * Returns the number of seconds that this DnsResource can be cached by resolvers. */ public Integer ttl() { return ttl; @@ -222,44 +259,21 @@ public DnsRecordType type() { return type; } - /** - * Returns name of the managed zone that this record belongs to. The name of the managed zone is - * provided by the user when the managed zone is created. It is unique within a project. If this - * DNS record is not associated with a managed zone, this returns null. - */ - public String zoneName() { - return zoneName; - } - - /** - * Returns id of the managed zone that this record belongs to. - * - *

The id of the managed zone is determined by the server when the managed zone is created. It - * is a read only value. If this DNS record is not associated with a managed zone, or if the id of - * the managed zone was not loaded from the cloud service, this returns null. - */ - public Long zoneId() { - return zoneId; - } - @Override public int hashCode() { - return Objects.hash(name, rrdatas, ttl, type, zoneName, zoneId); + return Objects.hash(name, rrdatas, ttl, type); } @Override public boolean equals(Object obj) { - return (obj instanceof DnsRecord) && Objects.equals(this.toPb(), ((DnsRecord) obj).toPb()) - && this.zoneId().equals(((DnsRecord) obj).zoneId()) - && this.zoneName().equals(((DnsRecord) obj).zoneName()); - + return (obj instanceof DnsRecord) && Objects.equals(this.toPb(), ((DnsRecord) obj).toPb()); } com.google.api.services.dns.model.ResourceRecordSet toPb() { com.google.api.services.dns.model.ResourceRecordSet pb = new com.google.api.services.dns.model.ResourceRecordSet(); pb.setName(this.name()); - pb.setRrdatas(this.rrdatas()); + pb.setRrdatas(this.records()); pb.setTtl(this.ttl()); pb.setType(this.type() == null ? null : this.type().name()); return pb; @@ -269,11 +283,9 @@ com.google.api.services.dns.model.ResourceRecordSet toPb() { public String toString() { return MoreObjects.toStringHelper(this) .add("name", name()) - .add("rrdatas", rrdatas()) + .add("rrdatas", records()) .add("ttl", ttl()) .add("type", type()) - .add("zoneName", zoneName()) - .add("zoneId", zoneId()) .toString(); } diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java deleted file mode 100644 index d5ed8351dc34..000000000000 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/ManagedZoneInfo.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2016 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.gcloud.dns; - -/** - * todo(mderka): Implement. - * todo(mderka): Add documentation. - */ -public class ManagedZoneInfo { - - private final String name; - private final Long id; - - public String name() { - throw new UnsupportedOperationException("Not implemented yet."); - // todo(mderka): Implement - } - - public Long id() { - return id; - // todo(mderka): Implement - } - - private ManagedZoneInfo() { - name = null; - id = null; - throw new UnsupportedOperationException("Not implemented yet"); - // todo(mderka): Implement - } - -} diff --git a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java index ee9e6e58d61d..4c03306ffb02 100644 --- a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java +++ b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java @@ -20,63 +20,40 @@ import static org.junit.Assert.fail; import static org.junit.Assert.assertNotEquals; -import org.junit.BeforeClass; import org.junit.Test; -import org.easymock.EasyMock; - public class DnsRecordTest { private static final String NAME = "example.com."; private static final Integer TTL = 3600; private static final DnsRecord.DnsRecordType TYPE = DnsRecord.DnsRecordType.AAAA; - private static final Long ZONE_ID = 12L; - private static final String ZONE_NAME = "name"; - // the following is initialized in @BeforeClass setUp() - private static DnsRecord record; - private static ManagedZoneInfo managedZoneInfoMock; - - @BeforeClass - public static void setUp() { - managedZoneInfoMock = EasyMock.createMock(ManagedZoneInfo.class); - EasyMock.expect(managedZoneInfoMock.id()).andReturn(ZONE_ID); - EasyMock.expect(managedZoneInfoMock.name()).andReturn(ZONE_NAME); - EasyMock.replay(managedZoneInfoMock); - record = DnsRecord.builder() - .name(NAME) - .ttl(TTL) - .managedZone(managedZoneInfoMock) - .build(); - } + private static final DnsRecord record = DnsRecord.builder() + .name(NAME) + .ttl(TTL) + .type(TYPE) + .build(); @Test public void testDefaultDnsRecord() { DnsRecord record = DnsRecord.builder().build(); - assertEquals(0, record.rrdatas().size()); + assertEquals(0, record.records().size()); } @Test public void testBuilder() { - assertEquals(NAME, record.name()); assertEquals(TTL, record.ttl()); - - assertEquals(ZONE_ID, record.zoneId()); // this was never assigned - assertEquals(ZONE_NAME, record.zoneName()); - assertEquals(0, record.rrdatas().size()); + assertEquals(0, record.records().size()); // verify that one can add records to the record set String testingRecord = "Testing record"; String anotherTestingRecord = "Another record 123"; - String differentName = ZONE_NAME + "something"; DnsRecord anotherRecord = record.toBuilder() - .add(testingRecord) - .add(anotherTestingRecord) - .managedZone(differentName) + .addRecord(testingRecord) + .addRecord(anotherTestingRecord) .build(); - assertEquals(2, anotherRecord.rrdatas().size()); - assertEquals(differentName, anotherRecord.zoneName()); - assertTrue(anotherRecord.rrdatas().contains(testingRecord)); - assertTrue(anotherRecord.rrdatas().contains(anotherTestingRecord)); + assertEquals(2, anotherRecord.records().size()); + assertTrue(anotherRecord.records().contains(testingRecord)); + assertTrue(anotherRecord.records().contains(anotherTestingRecord)); } @Test @@ -95,7 +72,8 @@ public void testValidTtl() { public void testEqualsAndNotEquals() { DnsRecord clone = record.toBuilder().build(); assertEquals(clone, record); - clone = record.toBuilder().add("another record").build(); + clone = record.toBuilder().addRecord("another record").build(); + assertNotEquals(clone, record); final String differentName = "totally different name"; clone = record.toBuilder().name(differentName).build(); assertNotEquals(clone, record); @@ -103,12 +81,6 @@ public void testEqualsAndNotEquals() { assertNotEquals(clone, record); clone = record.toBuilder().type(DnsRecord.DnsRecordType.TXT).build(); assertNotEquals(clone, record); - ManagedZoneInfo anotherMock = EasyMock.createMock(ManagedZoneInfo.class); - EasyMock.expect(anotherMock.id()).andReturn(ZONE_ID + 1); - EasyMock.expect(anotherMock.name()).andReturn(ZONE_NAME + "more text"); - EasyMock.replay(anotherMock); - clone = record.toBuilder().managedZone(anotherMock).build(); - assertNotEquals(clone, record); } @Test @@ -117,16 +89,4 @@ public void testSameHashCodeOnEquals() { DnsRecord clone = record.toBuilder().build(); assertEquals(clone.hashCode(), hash); } - - @Test - public void testDifferentHashCodeOnDifferent() { - int hash = record.hashCode(); - final String differentName = "totally different name"; - DnsRecord clone = record.toBuilder().name(differentName).build(); - assertNotEquals(differentName, record.name()); - assertNotEquals(clone.hashCode(), hash); - DnsRecord anotherClone = record.toBuilder().add("another record").build(); - assertNotEquals(anotherClone.hashCode(), hash); - } - } From 3026e77428e35d47f2e68541849082710394a8c8 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Thu, 21 Jan 2016 09:32:43 +0100 Subject: [PATCH 12/18] Rename Storage.apply to Storage.submit --- .../main/java/com/google/gcloud/storage/Bucket.java | 2 +- .../main/java/com/google/gcloud/storage/Storage.java | 2 +- .../java/com/google/gcloud/storage/StorageImpl.java | 8 ++++---- .../java/com/google/gcloud/storage/BucketTest.java | 2 +- .../java/com/google/gcloud/storage/ITStorageTest.java | 10 +++++----- .../com/google/gcloud/storage/StorageImplTest.java | 2 +- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index d0e823492ee3..3acd3f5d79b9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -319,7 +319,7 @@ public List get(String blobName1, String blobName2, String... blobNames) { batch.get(info.name(), name); } List blobs = new ArrayList<>(blobNames.length); - BatchResponse response = storage.apply(batch.build()); + BatchResponse response = storage.submit(batch.build()); for (BatchResponse.Result result : response.gets()) { BlobInfo blobInfo = result.get(); blobs.add(blobInfo != null ? new Blob(storage, blobInfo) : null); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index f8c90ff42930..272c5fdef223 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -1412,7 +1412,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * @return the batch response * @throws StorageException upon failure */ - BatchResponse apply(BatchRequest batchRequest); + BatchResponse submit(BatchRequest batchRequest); /** * Return a channel for reading the blob's content. The blob's latest generation is read. If the diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index a4b6c56e5ede..b6a833f26ab4 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -441,7 +441,7 @@ public byte[] call() { } @Override - public BatchResponse apply(BatchRequest batchRequest) { + public BatchResponse submit(BatchRequest batchRequest) { List>> toDelete = Lists.newArrayListWithCapacity(batchRequest.toDelete().size()); for (Map.Entry> entry : batchRequest.toDelete().entrySet()) { @@ -592,7 +592,7 @@ public List get(BlobId... blobIds) { for (BlobId blob : blobIds) { requestBuilder.get(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.gets(), null)); } @@ -602,7 +602,7 @@ public List update(BlobInfo... blobInfos) { for (BlobInfo blobInfo : blobInfos) { requestBuilder.update(blobInfo); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.updates(), null)); } @@ -612,7 +612,7 @@ public List delete(BlobId... blobIds) { for (BlobId blob : blobIds) { requestBuilder.delete(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.deletes(), Boolean.FALSE)); } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java index e67e7aff17dc..4e253033c6f2 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java @@ -175,7 +175,7 @@ public void testGetAll() throws Exception { } BatchResponse response = new BatchResponse(Collections.>emptyList(), Collections.>emptyList(), batchResultList); - expect(storage.apply(capture(capturedBatchRequest))).andReturn(response); + expect(storage.submit(capture(capturedBatchRequest))).andReturn(response); replay(storage); List blobs = bucket.get("n1", "n2", "n3"); Set blobInfoSet = capturedBatchRequest.getValue().toGet().keySet(); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 614ceee7b61e..63b9d739b686 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -584,7 +584,7 @@ public void testBatchRequest() { .update(updatedBlob1) .update(updatedBlob2) .build(); - BatchResponse updateResponse = storage.apply(updateRequest); + BatchResponse updateResponse = storage.submit(updateRequest); assertEquals(2, updateResponse.updates().size()); assertEquals(0, updateResponse.deletes().size()); assertEquals(0, updateResponse.gets().size()); @@ -602,7 +602,7 @@ public void testBatchRequest() { .get(BUCKET, sourceBlobName1) .get(BUCKET, sourceBlobName2) .build(); - BatchResponse getResponse = storage.apply(getRequest); + BatchResponse getResponse = storage.submit(getRequest); assertEquals(2, getResponse.gets().size()); assertEquals(0, getResponse.deletes().size()); assertEquals(0, getResponse.updates().size()); @@ -616,7 +616,7 @@ public void testBatchRequest() { .delete(BUCKET, sourceBlobName1) .delete(BUCKET, sourceBlobName2) .build(); - BatchResponse deleteResponse = storage.apply(deleteRequest); + BatchResponse deleteResponse = storage.submit(deleteRequest); assertEquals(2, deleteResponse.deletes().size()); assertEquals(0, deleteResponse.gets().size()); assertEquals(0, deleteResponse.updates().size()); @@ -646,7 +646,7 @@ public void testBatchRequestManyDeletes() { .get(BUCKET, sourceBlobName1) .update(updatedBlob2) .build(); - BatchResponse response = storage.apply(updateRequest); + BatchResponse response = storage.submit(updateRequest); assertEquals(2 * MAX_BATCH_DELETES, response.deletes().size()); assertEquals(1, response.updates().size()); assertEquals(1, response.gets().size()); @@ -685,7 +685,7 @@ public void testBatchRequestFail() { .get(BUCKET, blobName, Storage.BlobGetOption.generationMatch(-1L)) .get(BlobId.of(BUCKET, blobName, -1L)) .build(); - BatchResponse batchResponse = storage.apply(batchRequest); + BatchResponse batchResponse = storage.submit(batchRequest); assertEquals(1, batchResponse.updates().size()); assertEquals(2, batchResponse.deletes().size()); assertEquals(2, batchResponse.gets().size()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index b8da0580cd4a..f32a51507857 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -975,7 +975,7 @@ public Tuple apply(StorageObject f) { EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); EasyMock.replay(storageRpcMock); storage = options.service(); - BatchResponse batchResponse = storage.apply(req); + BatchResponse batchResponse = storage.submit(req); // Verify captured StorageRpc.BatchRequest List>> capturedToDelete = From 2e38f0219904e8d52bbd10de15610d5814f5e30b Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Thu, 21 Jan 2016 11:09:32 +0100 Subject: [PATCH 13/18] Add code to initialize BigQueryError in BigQueryException --- .../google/gcloud/bigquery/BigQueryException.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java index b0cca68e3e0a..d07843583bee 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -16,6 +16,8 @@ package com.google.gcloud.bigquery; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.common.collect.ImmutableSet; import com.google.gcloud.BaseServiceException; import com.google.gcloud.RetryHelper.RetryHelperException; @@ -53,7 +55,16 @@ public BigQueryException(int code, String message, BigQueryError error) { public BigQueryException(IOException exception) { super(exception, true); - this.error = null; + BigQueryError bigqueryError = null; + if (exception instanceof GoogleJsonResponseException) { + GoogleJsonError error = ((GoogleJsonResponseException) exception).getDetails(); + if (error != null && error.getErrors() != null && !error.getErrors().isEmpty()) { + GoogleJsonError.ErrorInfo errorInfo = error.getErrors().get(0); + bigqueryError = new BigQueryError(errorInfo.getReason(), errorInfo.getLocation(), + errorInfo.getMessage(), (String) error.get("debugInfo")); + } + } + this.error = bigqueryError; } /** From 3521bf59218bc7a0c59d53566546af3cbe1d8b52 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Thu, 21 Jan 2016 11:59:29 +0100 Subject: [PATCH 14/18] Add equals and hashCode to BaseTableInfo subclasses --- .../google/gcloud/bigquery/ExternalTableInfo.java | 13 +++++++++++++ .../java/com/google/gcloud/bigquery/TableInfo.java | 10 ++++++++++ .../java/com/google/gcloud/bigquery/ViewInfo.java | 11 +++++++++++ 3 files changed, 34 insertions(+) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java index 177f8a7db2b8..21ccb3fc1642 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java @@ -21,6 +21,8 @@ import com.google.api.services.bigquery.model.Table; import com.google.common.base.MoreObjects.ToStringHelper; +import java.util.Objects; + /** * Google BigQuery External Table information. BigQuery's external tables are tables whose data * reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are @@ -103,6 +105,17 @@ ToStringHelper toStringHelper() { return super.toStringHelper().add("configuration", configuration); } + @Override + public boolean equals(Object obj) { + return obj instanceof ExternalTableInfo + && Objects.equals(toPb(), ((ExternalTableInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), configuration); + } + @Override Table toPb() { Table tablePb = super.toPb(); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java index 05fb6908a51b..54258abc6ddd 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java @@ -213,6 +213,16 @@ ToStringHelper toStringHelper() { .add("streamingBuffer", streamingBuffer); } + @Override + public boolean equals(Object obj) { + return obj instanceof TableInfo && Objects.equals(toPb(), ((TableInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), location, streamingBuffer); + } + @Override Table toPb() { Table tablePb = super.toPb(); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java index 771a7a679c11..9af9e9d7a08e 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import java.util.List; +import java.util.Objects; /** * Google BigQuery View Table information. BigQuery's views are logical views, not materialized @@ -143,6 +144,16 @@ ToStringHelper toStringHelper() { .add("userDefinedFunctions", userDefinedFunctions); } + @Override + public boolean equals(Object obj) { + return obj instanceof ViewInfo && Objects.equals(toPb(), ((ViewInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), query, userDefinedFunctions); + } + @Override Table toPb() { Table tablePb = super.toPb(); From 01662be5e9f6bf11496d84b909676f97ca0401b9 Mon Sep 17 00:00:00 2001 From: Martin Derka Date: Thu, 21 Jan 2016 10:00:41 -0800 Subject: [PATCH 15/18] Implements comments by @ajkannan --- gcloud-java-dns/pom.xml | 12 +++--- .../java/com/google/gcloud/dns/DnsRecord.java | 37 ++++++++++++------- .../com/google/gcloud/dns/DnsRecordTest.java | 36 ++++++++++++++---- pom.xml | 1 + 4 files changed, 59 insertions(+), 27 deletions(-) diff --git a/gcloud-java-dns/pom.xml b/gcloud-java-dns/pom.xml index 55d720bc0a36..5f04f261d500 100644 --- a/gcloud-java-dns/pom.xml +++ b/gcloud-java-dns/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 com.google.gcloud gcloud-java-dns @@ -28,10 +30,10 @@ v1-rev7-1.21.0 compile - - com.google.guava - guava-jdk5 - + + com.google.guava + guava-jdk5 + com.google.api-client google-api-client diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java index 9cc21acfa0a5..f73c880f22f3 100644 --- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java +++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java @@ -24,18 +24,17 @@ import com.google.common.collect.Lists; import java.io.Serializable; - import java.util.LinkedList; import java.util.List; import java.util.Objects; /** - * A class that represents Google Cloud DNS record set. + * A class that represents a Google Cloud DNS record set. * - *

A DnsRecord is the unit of data that will be returned by the DNS servers upon a DNS request - * for a specific domain. The DnsRecord holds the current state of the DNS records that make up a - * managed zone. You can read the records but you do not modify them directly. Rather, you edit - * the records in a managed zone by creating a {@link ChangeRequest}. + *

A {@code DnsRecord} is the unit of data that will be returned by the DNS servers upon a DNS + * request for a specific domain. The {@code DnsRecord} holds the current state of the DNS records + * that make up a managed zone. You can read the records but you do not modify them directly. + * Rather, you edit the records in a managed zone by creating a ChangeRequest. * * @see Google Cloud DNS * documentation @@ -117,8 +116,8 @@ private Builder() { } /** - * Creates a builder and pre-populates attributes with the values from the provided DnsRecord - * instance. + * Creates a builder and pre-populates attributes with the values from the provided {@code + * DnsRecord} instance. */ private Builder(DnsRecord record) { this.name = record.name; @@ -142,7 +141,7 @@ public Builder addRecord(String record) { /** * Removes a record from the set. An exact match is required. */ - public Builder removerRecord(String record) { + public Builder removeRecord(String record) { this.rrdatas.remove(checkNotNull(record)); return this; } @@ -150,7 +149,7 @@ public Builder removerRecord(String record) { /** * Removes a record on the given index from the set. */ - public Builder removerRecord(int index) { + public Builder removeRecord(int index) { checkArgument(index >= 0 && index < this.rrdatas.size(), "The index is out of bounds. An " + "integer between 0 and " + (this.rrdatas.size() - 1) + " is required. The provided " + "value was " + index + "."); @@ -225,10 +224,10 @@ public Builder toBuilder() { } /** - * Creates an empty builder. + * Creates a builder for {@code DnsRecord} with mandatorily set name and type of the record. */ - public static Builder builder() { - return new Builder(); + public static Builder builder(String name, DnsRecordType type) { + return new Builder().name(name).type(type); } /** @@ -279,6 +278,17 @@ com.google.api.services.dns.model.ResourceRecordSet toPb() { return pb; } + static DnsRecord fromPb(com.google.api.services.dns.model.ResourceRecordSet pb) { + Builder b = builder(pb.getName(), DnsRecordType.valueOf(pb.getType())); + if (pb.getRrdatas() != null) { + b.records(pb.getRrdatas()); + } + if (pb.getTtl() != null) { + b.ttl(pb.getTtl()); + } + return b.build(); + } + @Override public String toString() { return MoreObjects.toStringHelper(this) @@ -288,5 +298,4 @@ public String toString() { .add("type", type()) .toString(); } - } diff --git a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java index 4c03306ffb02..e5b283a20acc 100644 --- a/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java +++ b/gcloud-java-dns/src/test/java/com/google/gcloud/dns/DnsRecordTest.java @@ -27,15 +27,13 @@ public class DnsRecordTest { private static final String NAME = "example.com."; private static final Integer TTL = 3600; private static final DnsRecord.DnsRecordType TYPE = DnsRecord.DnsRecordType.AAAA; - private static final DnsRecord record = DnsRecord.builder() - .name(NAME) + private static final DnsRecord record = DnsRecord.builder(NAME, TYPE) .ttl(TTL) - .type(TYPE) .build(); @Test public void testDefaultDnsRecord() { - DnsRecord record = DnsRecord.builder().build(); + DnsRecord record = DnsRecord.builder(NAME, TYPE).build(); assertEquals(0, record.records().size()); } @@ -59,13 +57,13 @@ public void testBuilder() { @Test public void testValidTtl() { try { - DnsRecord.builder().ttl(-1); + DnsRecord.builder(NAME, TYPE).ttl(-1); fail("A negative value is not acceptable for ttl."); } catch (IllegalArgumentException e) { // expected } - DnsRecord.builder().ttl(0); - DnsRecord.builder().ttl(Integer.MAX_VALUE); + DnsRecord.builder(NAME, TYPE).ttl(0); + DnsRecord.builder(NAME, TYPE).ttl(Integer.MAX_VALUE); } @Test @@ -89,4 +87,26 @@ public void testSameHashCodeOnEquals() { DnsRecord clone = record.toBuilder().build(); assertEquals(clone.hashCode(), hash); } -} + + @Test + public void testToAndFromPb() { + assertEquals(record, DnsRecord.fromPb(record.toPb())); + DnsRecord partial = DnsRecord.builder(NAME, TYPE).build(); + assertEquals(partial, DnsRecord.fromPb(partial.toPb())); + partial = DnsRecord.builder(NAME, TYPE).addRecord("test").build(); + assertEquals(partial, DnsRecord.fromPb(partial.toPb())); + partial = DnsRecord.builder(NAME, TYPE).ttl(15).build(); + assertEquals(partial, DnsRecord.fromPb(partial.toPb())); + } + + @Test + public void testToBuilder() { + assertEquals(record, record.toBuilder().build()); + DnsRecord partial = DnsRecord.builder(NAME, TYPE).build(); + assertEquals(partial, partial.toBuilder().build()); + partial = DnsRecord.builder(NAME, TYPE).addRecord("test").build(); + assertEquals(partial, partial.toBuilder().build()); + partial = DnsRecord.builder(NAME, TYPE).ttl(15).build(); + assertEquals(partial, partial.toBuilder().build()); + } +} \ No newline at end of file diff --git a/pom.xml b/pom.xml index cd5c19720cd8..7a435cc1dbae 100644 --- a/pom.xml +++ b/pom.xml @@ -70,6 +70,7 @@ gcloud-java-bigquery gcloud-java-core gcloud-java-datastore + gcloud-java-dns gcloud-java-examples gcloud-java-resourcemanager gcloud-java-storage From e27b5b3d81c52c1a77e9e2c8a5b6f05a8544516e Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Thu, 21 Jan 2016 20:35:31 +0100 Subject: [PATCH 16/18] Minor fixes to bigquery - Add defaultDataset to QueryJobInfo and QueryRequest that takes a string - Rename jobComplete to jobCompleted in QueryResult - Use FileChannel.transferTo in bigquery example to upload file --- .../com/google/gcloud/bigquery/BigQuery.java | 2 +- .../google/gcloud/bigquery/BigQueryImpl.java | 6 ++--- .../google/gcloud/bigquery/QueryJobInfo.java | 8 +++++++ .../google/gcloud/bigquery/QueryRequest.java | 17 +++++++++----- .../google/gcloud/bigquery/QueryResponse.java | 22 +++++++++---------- .../gcloud/bigquery/BigQueryImplTest.java | 8 +++---- .../gcloud/bigquery/ITBigQueryTest.java | 8 +++---- .../gcloud/bigquery/QueryResponseTest.java | 10 ++++----- .../gcloud/bigquery/SerializationTest.java | 2 +- .../gcloud/examples/BigQueryExample.java | 14 ++++++------ 10 files changed, 56 insertions(+), 41 deletions(-) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index aa516c31fb54..d3c712229348 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -443,7 +443,7 @@ public static QueryResultsOption startIndex(long startIndex) { /** * Returns an option that sets how long to wait for the query to complete, in milliseconds, * before returning. Default is 10 seconds. If the timeout passes before the job completes, - * {@link QueryResponse#jobComplete()} will be {@code false}. + * {@link QueryResponse#jobCompleted()} will be {@code false}. */ public static QueryResultsOption maxWaitTime(long maxWaitTime) { checkArgument(maxWaitTime >= 0); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index ad55056474fb..18368b6fa4f7 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -514,10 +514,10 @@ public com.google.api.services.bigquery.model.QueryResponse call() { QueryResponse.Builder builder = QueryResponse.builder(); JobId completeJobId = JobId.fromPb(results.getJobReference()); builder.jobId(completeJobId); - builder.jobComplete(results.getJobComplete()); + builder.jobCompleted(results.getJobComplete()); List rowsPb = results.getRows(); if (results.getJobComplete()) { - builder.jobComplete(true); + builder.jobCompleted(true); QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, results.getPageToken(), options(), ImmutableMap.of()); resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); @@ -561,7 +561,7 @@ public GetQueryResultsResponse call() { JobId completeJobId = JobId.fromPb(results.getJobReference()); builder.jobId(completeJobId); builder.etag(results.getEtag()); - builder.jobComplete(results.getJobComplete()); + builder.jobCompleted(results.getJobComplete()); List rowsPb = results.getRows(); if (results.getJobComplete()) { QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java index e11e8d6aa8ad..ad76d229bf2d 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java @@ -225,6 +225,14 @@ public Builder defaultDataset(DatasetId defaultDataset) { return self(); } + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(String defaultDataset) { + return defaultDataset(DatasetId.of(defaultDataset)); + } + /** * Sets a priority for the query. If not specified the priority is assumed to be * {@link Priority#INTERACTIVE}. diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java index 0c0cf3de761d..64fbb3e931fc 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java @@ -29,10 +29,10 @@ * a temporary table that is deleted approximately 24 hours after the query is run. The query is run * through a BigQuery Job whose identity can be accessed via {@link QueryResponse#jobId()}. If the * query does not complete within the provided {@link Builder#maxWaitTime(Long)}, the response - * returned by {@link BigQuery#query(QueryRequest)} will have {@link QueryResponse#jobComplete()} + * returned by {@link BigQuery#query(QueryRequest)} will have {@link QueryResponse#jobCompleted()} * set to {@code false} and {@link QueryResponse#result()} set to {@code null}. To obtain query * results you can use {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} until - * {@link QueryResponse#jobComplete()} returns {@code true}. + * {@link QueryResponse#jobCompleted()} returns {@code true}. * *

Example usage of a query request: *

    {@code
@@ -43,7 +43,7 @@
  *      .maxResults(1000L)
  *      .build();
  *    QueryResponse response = bigquery.query(request);
- *    while (!response.jobComplete()) {
+ *    while (!response.jobCompleted()) {
  *      Thread.sleep(1000);
  *      response = bigquery.getQueryResults(response.jobId());
  *    }
@@ -109,11 +109,18 @@ public Builder defaultDataset(DatasetId defaultDataset) {
       return this;
     }
 
+    /**
+     * Sets the default dataset to assume for any unqualified table names in the query.
+     */
+    public Builder defaultDataset(String defaultDataset) {
+      return defaultDataset(DatasetId.of(defaultDataset));
+    }
+
     /**
      * Sets how long to wait for the query to complete, in milliseconds, before the request times
      * out and returns. Note that this is only a timeout for the request, not the query. If the
      * query takes longer to run than the timeout value, the call returns without any results and
-     * with the {@link QueryResponse#jobComplete()} set to {@code false}. If not set, a wait time of
+     * with the {@link QueryResponse#jobCompleted()} set to {@code false}. If not set, a wait time of
      * 10000 milliseconds (10 seconds) is used.
      */
     public Builder maxWaitTime(Long maxWaitTime) {
@@ -182,7 +189,7 @@ public DatasetId defaultDataset() {
    * Returns how long to wait for the query to complete, in milliseconds, before the request times
    * out and returns. Note that this is only a timeout for the request, not the query. If the
    * query takes longer to run than the timeout value, the call returns without any results and
-   * with the {@link QueryResponse#jobComplete()} set to {@code false}. You can call
+   * with the {@link QueryResponse#jobCompleted()} set to {@code false}. You can call
    * {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query
    * to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds)
    * is used.
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java
index 8ef8351d9e1a..77386747754f 100644
--- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java
@@ -31,7 +31,7 @@
  * 

Example usage of a query response: *

    {@code
  *    QueryResponse response = bigquery.query(request);
- *    while (!response.jobComplete()) {
+ *    while (!response.jobCompleted()) {
  *      Thread.sleep(1000);
  *      response = bigquery.getQueryResults(response.jobId());
  *    }
@@ -56,7 +56,7 @@ public class QueryResponse implements Serializable {
   private final QueryResult result;
   private final String etag;
   private final JobId jobId;
-  private final boolean jobComplete;
+  private final boolean jobCompleted;
   private final List executionErrors;
 
   static final class Builder {
@@ -64,7 +64,7 @@ static final class Builder {
     private QueryResult result;
     private String etag;
     private JobId jobId;
-    private boolean jobComplete;
+    private boolean jobCompleted;
     private List executionErrors;
 
     private Builder() {}
@@ -84,8 +84,8 @@ Builder jobId(JobId jobId) {
       return this;
     }
 
-    Builder jobComplete(boolean jobComplete) {
-      this.jobComplete = jobComplete;
+    Builder jobCompleted(boolean jobCompleted) {
+      this.jobCompleted = jobCompleted;
       return this;
     }
 
@@ -103,13 +103,13 @@ private QueryResponse(Builder builder) {
     this.result = builder.result;
     this.etag = builder.etag;
     this.jobId = builder.jobId;
-    this.jobComplete = builder.jobComplete;
+    this.jobCompleted = builder.jobCompleted;
     this.executionErrors = builder.executionErrors != null ? builder.executionErrors
       : ImmutableList.of();
   }
 
   /**
-   * Returns the result of the query. Returns {@code null} if {@link #jobComplete()} is {@code
+   * Returns the result of the query. Returns {@code null} if {@link #jobCompleted()} is {@code
    * false}.
    */
   public QueryResult result() {
@@ -137,8 +137,8 @@ public JobId jobId() {
    * {@link #result()} returns {@code null}. This method can be used to check if query execution
    * completed and results are available.
    */
-  public boolean jobComplete() {
-    return jobComplete;
+  public boolean jobCompleted() {
+    return jobCompleted;
   }
 
   /**
@@ -164,7 +164,7 @@ public String toString() {
         .add("result", result)
         .add("etag", etag)
         .add("jobId", jobId)
-        .add("jobComplete", jobComplete)
+        .add("jobCompleted", jobCompleted)
         .add("executionErrors", executionErrors)
         .toString();
   }
@@ -183,7 +183,7 @@ public boolean equals(Object obj) {
       return false;
     }
     QueryResponse response = (QueryResponse) obj;
-    return jobComplete == response.jobComplete
+    return jobCompleted == response.jobCompleted
         && Objects.equals(etag, response.etag)
         && Objects.equals(result, response.result)
         && Objects.equals(jobId, response.jobId)
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
index b54a989fb5e5..021c356320da 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
@@ -902,7 +902,7 @@ public void testQueryRequest() {
     assertNull(response.etag());
     assertNull(response.result());
     assertEquals(queryJob, response.jobId());
-    assertEquals(false, response.jobComplete());
+    assertEquals(false, response.jobCompleted());
     assertEquals(ImmutableList.of(), response.executionErrors());
     assertFalse(response.hasErrors());
     assertEquals(null, response.result());
@@ -926,7 +926,7 @@ public void testQueryRequestCompleted() {
     QueryResponse response = bigquery.query(QUERY_REQUEST);
     assertNull(response.etag());
     assertEquals(queryJob, response.jobId());
-    assertEquals(true, response.jobComplete());
+    assertEquals(true, response.jobCompleted());
     assertEquals(false, response.result().cacheHit());
     assertEquals(ImmutableList.of(), response.executionErrors());
     assertFalse(response.hasErrors());
@@ -959,7 +959,7 @@ public void testGetQueryResults() {
     QueryResponse response = bigquery.getQueryResults(queryJob);
     assertEquals("etag", response.etag());
     assertEquals(queryJob, response.jobId());
-    assertEquals(true, response.jobComplete());
+    assertEquals(true, response.jobCompleted());
     assertEquals(false, response.result().cacheHit());
     assertEquals(ImmutableList.of(), response.executionErrors());
     assertFalse(response.hasErrors());
@@ -993,7 +993,7 @@ public void testGetQueryResultsWithOptions() {
         QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_MAX_RESULTS,
         QUERY_RESULTS_OPTION_PAGE_TOKEN);
     assertEquals(queryJob, response.jobId());
-    assertEquals(true, response.jobComplete());
+    assertEquals(true, response.jobCompleted());
     assertEquals(false, response.result().cacheHit());
     assertEquals(ImmutableList.of(), response.executionErrors());
     assertFalse(response.hasErrors());
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java
index 528df30d0a61..f672815bcb7a 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java
@@ -320,7 +320,7 @@ public void testCreateExternalTable() throws InterruptedException {
         .maxResults(1000L)
         .build();
     QueryResponse response = bigquery.query(request);
-    while (!response.jobComplete()) {
+    while (!response.jobCompleted()) {
       response = bigquery.getQueryResults(response.jobId());
       Thread.sleep(1000);
     }
@@ -382,7 +382,7 @@ public void testCreateViewTable() throws InterruptedException {
         .maxResults(1000L)
         .build();
     QueryResponse response = bigquery.query(request);
-    while (!response.jobComplete()) {
+    while (!response.jobCompleted()) {
       response = bigquery.getQueryResults(response.jobId());
       Thread.sleep(1000);
     }
@@ -627,7 +627,7 @@ public void testQuery() throws InterruptedException {
         .maxResults(1000L)
         .build();
     QueryResponse response = bigquery.query(request);
-    while (!response.jobComplete()) {
+    while (!response.jobCompleted()) {
       Thread.sleep(1000);
       response = bigquery.getQueryResults(response.jobId());
     }
@@ -786,7 +786,7 @@ public void testQueryJob() throws InterruptedException {
     assertNull(remoteJob.status().error());
 
     QueryResponse response = bigquery.getQueryResults(remoteJob.jobId());
-    while (!response.jobComplete()) {
+    while (!response.jobCompleted()) {
       Thread.sleep(1000);
       response = bigquery.getQueryResults(response.jobId());
     }
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java
index 3ecae9b76e18..08e885c8b3aa 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java
@@ -64,7 +64,7 @@ public QueryResult nextPage() {
   private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder()
       .etag(ETAG)
       .jobId(JOB_ID)
-      .jobComplete(JOB_COMPLETE)
+      .jobCompleted(JOB_COMPLETE)
       .executionErrors(ERRORS)
       .result(QUERY_RESULT)
       .build();
@@ -74,18 +74,18 @@ public void testBuilder() {
     assertEquals(ETAG, QUERY_RESPONSE.etag());
     assertEquals(QUERY_RESULT, QUERY_RESPONSE.result());
     assertEquals(JOB_ID, QUERY_RESPONSE.jobId());
-    assertEquals(JOB_COMPLETE, QUERY_RESPONSE.jobComplete());
+    assertEquals(JOB_COMPLETE, QUERY_RESPONSE.jobCompleted());
     assertEquals(ERRORS, QUERY_RESPONSE.executionErrors());
     assertTrue(QUERY_RESPONSE.hasErrors());
   }
 
   @Test
   public void testBuilderIncomplete() {
-    QueryResponse queryResponse = QueryResponse.builder().jobComplete(false).build();
+    QueryResponse queryResponse = QueryResponse.builder().jobCompleted(false).build();
     assertNull(queryResponse.etag());
     assertNull(queryResponse.result());
     assertNull(queryResponse.jobId());
-    assertFalse(queryResponse.jobComplete());
+    assertFalse(queryResponse.jobCompleted());
     assertEquals(ImmutableList.of(), queryResponse.executionErrors());
     assertFalse(queryResponse.hasErrors());
   }
@@ -100,7 +100,7 @@ private void compareQueryResponse(QueryResponse expected, QueryResponse value) {
     assertEquals(expected.etag(), value.etag());
     assertEquals(expected.result(), value.result());
     assertEquals(expected.jobId(), value.jobId());
-    assertEquals(expected.jobComplete(), value.jobComplete());
+    assertEquals(expected.jobCompleted(), value.jobCompleted());
     assertEquals(expected.executionErrors(), value.executionErrors());
     assertEquals(expected.hasErrors(), value.hasErrors());
   }
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java
index d407ac1630e3..6068f2332866 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java
@@ -213,7 +213,7 @@ public class SerializationTest {
   private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder()
       .etag(ETAG)
       .jobId(JOB_ID)
-      .jobComplete(true)
+      .jobCompleted(true)
       .result(QUERY_RESULT)
       .build();
 
diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java
index 2f8a768f3669..895502e3d6d5 100644
--- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java
+++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java
@@ -44,7 +44,6 @@
 import com.google.gcloud.bigquery.ViewInfo;
 import com.google.gcloud.spi.BigQueryRpc.Tuple;
 
-import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 import java.nio.file.Paths;
 import java.util.Arrays;
@@ -99,6 +98,7 @@
  */
 public class BigQueryExample {
 
+  private static final int CHUNK_SIZE = 8 * 256 * 1024;
   private static final Map CREATE_ACTIONS = new HashMap<>();
   private static final Map INFO_ACTIONS = new HashMap<>();
   private static final Map LIST_ACTIONS = new HashMap<>();
@@ -627,7 +627,7 @@ private static class QueryAction extends BigQueryAction {
     void run(BigQuery bigquery, QueryRequest queryRequest) throws Exception {
       System.out.println("Running query");
       QueryResponse queryResponse = bigquery.query(queryRequest);
-      while (!queryResponse.jobComplete()) {
+      while (!queryResponse.jobCompleted()) {
         System.out.println("Waiting for query job " + queryResponse.jobId() + " to complete");
         Thread.sleep(1000L);
         queryResponse = bigquery.getQueryResults(queryResponse.jobId());
@@ -676,12 +676,12 @@ private static class LoadFileAction extends BigQueryAction configuration) throws Exception {
       System.out.println("Running insert");
       try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) {
-        ByteBuffer buffer = ByteBuffer.allocate(256 * 1024);
         WriteChannel writeChannel = bigquery.writer(configuration.x());
-        while (fileChannel.read(buffer) > 0) {
-          buffer.flip();
-          writeChannel.write(buffer);
-          buffer.clear();
+        long position = 0;
+        long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel);
+        while (written > 0) {
+          position += written;
+          written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel);
         }
         writeChannel.close();
       }

From 555cc711ba3b939b792ee232ecff814e1ab10f12 Mon Sep 17 00:00:00 2001
From: Ajay Kannan 
Date: Thu, 21 Jan 2016 13:06:03 -0800
Subject: [PATCH 17/18] Run coveralls for PRs in branches

---
 utilities/after_success.sh | 58 ++++++++++++++++++++------------------
 1 file changed, 30 insertions(+), 28 deletions(-)

diff --git a/utilities/after_success.sh b/utilities/after_success.sh
index 05ab5fb373d6..73f5fa95dec3 100755
--- a/utilities/after_success.sh
+++ b/utilities/after_success.sh
@@ -8,36 +8,38 @@ echo "Travis branch:       " ${TRAVIS_BRANCH}
 echo "Travis pull request: " ${TRAVIS_PULL_REQUEST}
 echo "Travis JDK version:  " ${TRAVIS_JDK_VERSION}
 
-if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" -a "${TRAVIS_BRANCH}" == "master" ]; then
+if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" ]; then
     mvn clean cobertura:cobertura coveralls:report
-    if [ "${TRAVIS_PULL_REQUEST}" == "false" ]; then 
-      SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)')
-      if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then
-          # Deploy site if not a SNAPSHOT
-          git config --global user.name "travis-ci"
-          git config --global user.email "travis@travis-ci.org"
-          git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages
-          mkdir -p tmp_gh-pages/$SITE_VERSION
-          mvn site -DskipTests=true
-          mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/
-          cd tmp_gh-pages
-          cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/
-          sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change
-          git add $SITE_VERSION
-          echo "" > index.html
-          git add index.html
-          echo "" > apidocs/index.html
-          git add apidocs/index.html
-          git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect."
-          git config --global push.default simple
-          git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1
+    if [ "${TRAVIS_PULL_REQUEST}" == "false" -a "${TRAVIS_BRANCH}" == "master" ]; then
+        SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)')
+        if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then
+            # Deploy site if not a SNAPSHOT
+            git config --global user.name "travis-ci"
+            git config --global user.email "travis@travis-ci.org"
+            git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages
+            mkdir -p tmp_gh-pages/$SITE_VERSION
+            mvn site -DskipTests=true
+            mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/
+            cd tmp_gh-pages
+            cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/
+            sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change
+            git add $SITE_VERSION
+            echo "" > index.html
+            git add index.html
+            echo "" > apidocs/index.html
+            git add apidocs/index.html
+            git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect."
+            git config --global push.default simple
+            git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1
 
-          cd ..
-          utilities/update_docs_version.sh # Update version in READMEs
-          mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy
-      else
-          mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml
-      fi
+            cd ..
+            utilities/update_docs_version.sh # Update version in READMEs
+            mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy
+        else
+            mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml
+        fi
+    else
+        echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds."
     fi
 else
     echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds."

From 1c657158857a343c2ad4c761cb287d535f396704 Mon Sep 17 00:00:00 2001
From: Martin Derka 
Date: Thu, 21 Jan 2016 13:44:40 -0800
Subject: [PATCH 18/18] Removed the method for removing a record by index.

---
 .../main/java/com/google/gcloud/dns/DnsRecord.java    | 11 -----------
 1 file changed, 11 deletions(-)

diff --git a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java
index f73c880f22f3..41a8569d3937 100644
--- a/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java
+++ b/gcloud-java-dns/src/main/java/com/google/gcloud/dns/DnsRecord.java
@@ -146,17 +146,6 @@ public Builder removeRecord(String record) {
       return this;
     }
 
-    /**
-     * Removes a record on the given index from the set.
-     */
-    public Builder removeRecord(int index) {
-      checkArgument(index >= 0 && index < this.rrdatas.size(), "The index is out of bounds. An " +
-              "integer between 0 and " + (this.rrdatas.size() - 1) + " is required. The provided " +
-              "value was " + index + ".");
-      this.rrdatas.remove(index);
-      return this;
-    }
-
     /**
      * Removes all the records.
      */