Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

An alternate pull request for PR #559 with code coverage set up properly #569

Closed
wants to merge 26 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
b32e4b9
Add support for BigQuery resumable uploads via a write channel
mziccard Jan 11, 2016
397b2c1
Merge pull request #540 from mziccard/bigquery-insert-writer
aozarov Jan 19, 2016
2bf4925
Initial project for Google Cloud DNS in gcloud-java
mderka Jan 16, 2016
fe4e137
Added DnsRecord as a part of the basic data model.
mderka Jan 16, 2016
f652277
Implemented comments by @mziccard
mderka Jan 19, 2016
b29945f
Second round of comments from @mziccard
mderka Jan 20, 2016
03d5d30
Refactor BaseServiceException
mziccard Jan 15, 2016
acf260c
Move exception handler and interceptor to BaseService class
mziccard Jan 15, 2016
67e5dfc
Rename translateAndThrow in BaseServiceException and make it package …
mziccard Jan 20, 2016
34e6806
Add throwable cause to DatastoreException
mziccard Jan 20, 2016
3c81f16
Move isRetryable method to BaseServiceException.Error
mziccard Jan 20, 2016
1fc0e32
Implemented comments by @aozarov. Also removed incomplete
mderka Jan 21, 2016
add5924
Merge pull request #554 from mziccard/refactor-exception
mziccard Jan 21, 2016
3026e77
Rename Storage.apply to Storage.submit
mziccard Jan 21, 2016
2e38f02
Add code to initialize BigQueryError in BigQueryException
mziccard Jan 21, 2016
2e8363f
Merge pull request #563 from mziccard/populate-bigquery-error
mziccard Jan 21, 2016
3521bf5
Add equals and hashCode to BaseTableInfo subclasses
mziccard Jan 21, 2016
483b4d8
Merge pull request #565 from mziccard/bigquery-table-hash
ajkannan Jan 21, 2016
9fba603
Merge pull request #562 from mziccard/rename-apply
ajkannan Jan 21, 2016
01662be
Implements comments by @ajkannan
mderka Jan 21, 2016
e27b5b3
Minor fixes to bigquery
mziccard Jan 21, 2016
555cc71
Run coveralls for PRs in branches
Jan 21, 2016
c027e47
Merge pull request #567 from mziccard/minor-bigquery-fixes
aozarov Jan 21, 2016
0c3e935
Merge pull request #568 from ajkannan/run-coveralls-in-branches
aozarov Jan 21, 2016
1c65715
Removed the method for removing a record by index.
mderka Jan 21, 2016
6ce692e
Merge branch 'gcloud-dns' into dns-temp
mderka Jan 21, 2016
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ static Entity fromPb(Access access) {
}
// Unreachable
throw new BigQueryException(BigQueryException.UNKNOWN_CODE,
"Unrecognized access configuration", false);
"Unrecognized access configuration");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ public static QueryResultsOption startIndex(long startIndex) {
/**
* Returns an option that sets how long to wait for the query to complete, in milliseconds,
* before returning. Default is 10 seconds. If the timeout passes before the job completes,
* {@link QueryResponse#jobComplete()} will be {@code false}.
* {@link QueryResponse#jobCompleted()} will be {@code false}.
*/
public static QueryResultsOption maxWaitTime(long maxWaitTime) {
checkArgument(maxWaitTime >= 0);
Expand Down Expand Up @@ -662,4 +662,12 @@ Page<List<FieldValue>> listTableData(TableId tableId, TableDataListOption... opt
* @throws BigQueryException upon failure
*/
QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException;

/**
* Returns a channel to write data to be inserted into a BigQuery table. Data format and other
* options can be configured using the {@link LoadConfiguration} parameter.
*
* @throws BigQueryException upon failure
*/
TableDataWriteChannel writer(LoadConfiguration loadConfiguration);
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,16 @@

package com.google.gcloud.bigquery;

import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.common.collect.ImmutableSet;
import com.google.gcloud.BaseServiceException;
import com.google.gcloud.RetryHelper.RetryHelperException;
import com.google.gcloud.RetryHelper.RetryInterruptedException;

import java.io.IOException;
import java.util.Set;

/**
* BigQuery service exception.
*
Expand All @@ -28,20 +34,39 @@
*/
public class BigQueryException extends BaseServiceException {

private static final long serialVersionUID = -5504832700512784654L;
public static final int UNKNOWN_CODE = -1;
// see: https://cloud.google.com/bigquery/troubleshooting-errors
private static final Set<Error> RETRYABLE_ERRORS = ImmutableSet.of(
new Error(500, null),
new Error(502, null),
new Error(503, null),
new Error(504, null));
private static final long serialVersionUID = -5006625989225438209L;

private final BigQueryError error;

public BigQueryException(int code, String message, boolean retryable) {
this(code, message, retryable, null);
public BigQueryException(int code, String message) {
this(code, message, null);
}

public BigQueryException(int code, String message, boolean retryable, BigQueryError error) {
super(code, message, retryable);
public BigQueryException(int code, String message, BigQueryError error) {
super(code, message, error != null ? error.reason() : null, true);
this.error = error;
}

public BigQueryException(IOException exception) {
super(exception, true);
BigQueryError bigqueryError = null;
if (exception instanceof GoogleJsonResponseException) {
GoogleJsonError error = ((GoogleJsonResponseException) exception).getDetails();
if (error != null && error.getErrors() != null && !error.getErrors().isEmpty()) {
GoogleJsonError.ErrorInfo errorInfo = error.getErrors().get(0);
bigqueryError = new BigQueryError(errorInfo.getReason(), errorInfo.getLocation(),
errorInfo.getMessage(), (String) error.get("debugInfo"));
}
}
this.error = bigqueryError;
}

/**
* Returns the {@link BigQueryError} that caused this exception. Returns {@code null} if none
* exists.
Expand All @@ -50,20 +75,20 @@ public BigQueryError error() {
return error;
}

@Override
protected Set<Error> retryableErrors() {
return RETRYABLE_ERRORS;
}

/**
* Translate RetryHelperException to the BigQueryException that caused the error. This method will
* always throw an exception.
*
* @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException}
* @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException}
*/
static BigQueryException translateAndThrow(RetryHelperException ex) {
if (ex.getCause() instanceof BigQueryException) {
throw (BigQueryException) ex.getCause();
}
if (ex instanceof RetryInterruptedException) {
RetryInterruptedException.propagate();
}
throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), false);
static BaseServiceException translateAndThrow(RetryHelperException ex) {
BaseServiceException.translateAndPropagateIfPossible(ex);
throw new BigQueryException(UNKNOWN_CODE, ex.getMessage());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gcloud.BaseService;
import com.google.gcloud.ExceptionHandler;
import com.google.gcloud.ExceptionHandler.Interceptor;
import com.google.gcloud.Page;
import com.google.gcloud.PageImpl;
import com.google.gcloud.PageImpl.NextPageFetcher;
Expand All @@ -49,27 +47,6 @@

final class BigQueryImpl extends BaseService<BigQueryOptions> implements BigQuery {

private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() {

private static final long serialVersionUID = -7478333733015750774L;

@Override
public RetryResult afterEval(Exception exception, RetryResult retryResult) {
return Interceptor.RetryResult.CONTINUE_EVALUATION;
}

@Override
public RetryResult beforeEval(Exception exception) {
if (exception instanceof BigQueryException) {
boolean retriable = ((BigQueryException) exception).retryable();
return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY;
}
return Interceptor.RetryResult.CONTINUE_EVALUATION;
}
};
static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder()
.abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build();

private static class DatasetPageFetcher implements NextPageFetcher<DatasetInfo> {

private static final long serialVersionUID = -3057564042439021278L;
Expand Down Expand Up @@ -537,10 +514,10 @@ public com.google.api.services.bigquery.model.QueryResponse call() {
QueryResponse.Builder builder = QueryResponse.builder();
JobId completeJobId = JobId.fromPb(results.getJobReference());
builder.jobId(completeJobId);
builder.jobComplete(results.getJobComplete());
builder.jobCompleted(results.getJobComplete());
List<TableRow> rowsPb = results.getRows();
if (results.getJobComplete()) {
builder.jobComplete(true);
builder.jobCompleted(true);
QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb,
results.getPageToken(), options(), ImmutableMap.<BigQueryRpc.Option, Object>of());
resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed());
Expand Down Expand Up @@ -584,7 +561,7 @@ public GetQueryResultsResponse call() {
JobId completeJobId = JobId.fromPb(results.getJobReference());
builder.jobId(completeJobId);
builder.etag(results.getEtag());
builder.jobComplete(results.getJobComplete());
builder.jobCompleted(results.getJobComplete());
List<TableRow> rowsPb = results.getRows();
if (results.getJobComplete()) {
QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb,
Expand Down Expand Up @@ -619,6 +596,10 @@ private static QueryResult.Builder transformQueryResults(JobId jobId, List<Table
.results(transformTableData(rowsPb));
}

public TableDataWriteChannel writer(LoadConfiguration loadConfiguration) {
return new TableDataWriteChannel(options(), setProjectId(loadConfiguration));
}

private Map<BigQueryRpc.Option, ?> optionMap(Option... options) {
Map<BigQueryRpc.Option, Object> optionMap = Maps.newEnumMap(BigQueryRpc.Option.class);
for (Option option : options) {
Expand Down Expand Up @@ -698,8 +679,7 @@ public TableId apply(TableId tableId) {
if (job instanceof LoadJobInfo) {
LoadJobInfo loadJob = (LoadJobInfo) job;
LoadJobInfo.Builder loadBuilder = loadJob.toBuilder();
loadBuilder.destinationTable(setProjectId(loadJob.destinationTable()));
return loadBuilder.build();
return loadBuilder.configuration(setProjectId(loadJob.configuration())).build();
}
return job;
}
Expand All @@ -711,4 +691,10 @@ private QueryRequest setProjectId(QueryRequest request) {
}
return builder.build();
}

private LoadConfiguration setProjectId(LoadConfiguration configuration) {
LoadConfiguration.Builder builder = configuration.toBuilder();
builder.destinationTable(setProjectId(configuration.destinationTable()));
return builder.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ public Builder destinationTable(TableId destinationTable) {
/**
* Sets whether the job is allowed to create new tables.
*
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.link">
* Jobs: Link Configuration</a>
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition">
* Create Disposition</a>
*/
public Builder createDisposition(CreateDisposition createDisposition) {
this.createDisposition = createDisposition;
Expand All @@ -106,8 +106,8 @@ public Builder createDisposition(CreateDisposition createDisposition) {
/**
* Sets the action that should occur if the destination table already exists.
*
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.link">
* Jobs: Link Configuration</a>
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition">
* Write Disposition</a>
*/
public Builder writeDisposition(WriteDisposition writeDisposition) {
this.writeDisposition = writeDisposition;
Expand Down Expand Up @@ -145,8 +145,8 @@ public TableId destinationTable() {
/**
* Returns whether the job is allowed to create new tables.
*
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy">
* Jobs: Copy Configuration</a>
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition">
* Create Disposition</a>
*/
public CreateDisposition createDisposition() {
return this.createDisposition;
Expand All @@ -155,8 +155,8 @@ public CreateDisposition createDisposition() {
/**
* Returns the action that should occur if the destination table already exists.
*
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy">
* Jobs: Copy Configuration</a>
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition">
* Write Disposition</a>
*/
public WriteDisposition writeDisposition() {
return writeDisposition;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import com.google.api.services.bigquery.model.Table;
import com.google.common.base.MoreObjects.ToStringHelper;

import java.util.Objects;

/**
* Google BigQuery External Table information. BigQuery's external tables are tables whose data
* reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are
Expand Down Expand Up @@ -103,6 +105,17 @@ ToStringHelper toStringHelper() {
return super.toStringHelper().add("configuration", configuration);
}

@Override
public boolean equals(Object obj) {
return obj instanceof ExternalTableInfo
&& Objects.equals(toPb(), ((ExternalTableInfo) obj).toPb());
}

@Override
public int hashCode() {
return Objects.hash(super.hashCode(), configuration);
}

@Override
Table toPb() {
Table tablePb = super.toPb();
Expand Down
Loading