Skip to content

Commit

Permalink
docs(samples): fix checkstyle errors (#682)
Browse files Browse the repository at this point in the history
Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
- [X] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquery/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
- [X] Ensure the tests and linter pass
- [X] Code coverage does not decrease (if any source code was changed)
- [X] Appropriate docs were updated (if necessary)

Fixes #681
  • Loading branch information
Praful Makani authored Aug 19, 2020
1 parent 5acb756 commit fe5abc0
Show file tree
Hide file tree
Showing 49 changed files with 347 additions and 365 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,8 @@ public static void authorizedViewTutorial(
// [START bigquery_authorized_view_tutorial]
// [START bigquery_avt_create_source_dataset]
// Create a source dataset to store your table.
Dataset sourceDataset = bigquery.create(DatasetInfo.of(sourceDatasetId));
final Dataset sourceDataset = bigquery.create(DatasetInfo.of(sourceDatasetId));
// [END bigquery_avt_create_source_dataset]

// [START bigquery_avt_create_source_table]
// Populate a source table
String tableQuery =
Expand All @@ -72,32 +71,27 @@ public static void authorizedViewTutorial(
.build();
bigquery.query(queryConfig);
// [END bigquery_avt_create_source_table]

// [START bigquery_avt_create_shared_dataset]
// Create a separate dataset to store your view
Dataset sharedDataset = bigquery.create(DatasetInfo.of(sharedDatasetId));
// [END bigquery_avt_create_shared_dataset]

// [START bigquery_avt_create_view]
// Create the view in the new dataset
String viewQuery =
String.format(
"SELECT commit, author.name as author, committer.name as committer, repo_name FROM %s.%s.%s",
"SELECT commit, author.name as author, "
+ "committer.name as committer, repo_name FROM %s.%s.%s",
projectId, sourceDatasetId, sourceTableId);

ViewDefinition viewDefinition = ViewDefinition.of(viewQuery);

Table view =
bigquery.create(TableInfo.of(TableId.of(sharedDatasetId, sharedViewId), viewDefinition));
// [END bigquery_avt_create_view]

// [START bigquery_avt_shared_dataset_access]
// Assign access controls to the dataset containing the view
List<Acl> viewAcl = new ArrayList<>(sharedDataset.getAcl());
viewAcl.add(Acl.of(new Acl.Group("[email protected]"), Acl.Role.READER));
sharedDataset.toBuilder().setAcl(viewAcl).build().update();
// [END bigquery_avt_shared_dataset_access]

// [START bigquery_avt_source_dataset_access]
// Authorize the view to access the source dataset
List<Acl> srcAcl = new ArrayList<>(sourceDataset.getAcl());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import com.google.cloud.bigquery.TableId;

// Sample to copy a cmek table
public class CopyTableCMEK {
public class CopyTableCmek {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -38,11 +38,11 @@ public static void main(String[] args) {
String kmsKeyName = "MY_KMS_KEY_NAME";
EncryptionConfiguration encryption =
EncryptionConfiguration.newBuilder().setKmsKeyName(kmsKeyName).build();
copyTableCMEK(
copyTableCmek(
sourceDatasetName, sourceTableId, destinationDatasetName, destinationTableId, encryption);
}

public static void copyTableCMEK(
public static void copyTableCmek(
String sourceDatasetName,
String sourceTableId,
String destinationDatasetName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public static void main(String[] args) {
+ "learn_rate=0.4, "
+ "learn_rate_strategy='constant' "
+ ") AS ( "
+ " SELECT 'a' AS f1, 2.0 AS label "
+ "SELECT 'a' AS f1, 2.0 AS label "
+ "UNION ALL "
+ "SELECT 'b' AS f1, 3.8 AS label "
+ ")";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import com.google.cloud.bigquery.QueryJobConfiguration;

// Sample to create a routine using DDL
public class CreateRoutineDDL {
public class CreateRoutineDdl {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -41,11 +41,12 @@ public static void main(String[] args) {
+ "."
+ routineId
+ "`"
+ "( arr ARRAY<STRUCT<name STRING, val INT64>>) AS ( (SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem))";
createRoutineDDL(sql);
+ "( arr ARRAY<STRUCT<name STRING, val INT64>>) AS "
+ "( (SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem))";
createRoutineDdl(sql);
}

public static void createRoutineDDL(String sql) {
public static void createRoutineDdl(String sql) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import com.google.cloud.bigquery.TableInfo;

// Sample to create a cmek table
public class CreateTableCMEK {
public class CreateTableCmek {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -44,10 +44,10 @@ public static void main(String[] args) {
// i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}
EncryptionConfiguration encryption =
EncryptionConfiguration.newBuilder().setKmsKeyName(kmsKeyName).build();
createTableCMEK(datasetName, tableName, schema, encryption);
createTableCmek(datasetName, tableName, schema, encryption);
}

public static void createTableCMEK(
public static void createTableCmek(
String datasetName, String tableName, Schema schema, EncryptionConfiguration configuration) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import com.google.cloud.bigquery.QueryJobConfiguration;

// Sample to create a view using DDL
public class DDLCreateView {
public class DdlCreateView {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,21 +47,17 @@ public static void grantViewAccess(String srcDatasetId, String viewDatasetId, St
Dataset srcDataset = bigquery.getDataset(DatasetId.of(srcDatasetId));
Dataset viewDataset = bigquery.getDataset(DatasetId.of(viewDatasetId));
Table view = viewDataset.get(viewId);

// First, we'll add a group to the ACL for the dataset containing the view. This will allow
// users within that group to query the view, but they must have direct access to any tables
// referenced by the view.
List<Acl> viewAcl = new ArrayList<>();
viewAcl.addAll(viewDataset.getAcl());
List<Acl> viewAcl = new ArrayList<>(viewDataset.getAcl());
viewAcl.add(Acl.of(new Acl.Group("[email protected]"), Acl.Role.READER));
viewDataset.toBuilder().setAcl(viewAcl).build().update();

// Now, we'll authorize a specific view against a source dataset, delegating access
// enforcement. Once this has been completed, members of the group previously added to the
// view dataset's ACL no longer require access to the source dataset to successfully query the
// view
List<Acl> srcAcl = new ArrayList<>();
srcAcl.addAll(srcDataset.getAcl());
List<Acl> srcAcl = new ArrayList<>(srcDataset.getAcl());
srcAcl.add(Acl.of(new Acl.View(view.getTableId())));
srcDataset.toBuilder().setAcl(srcAcl).build().update();
System.out.println("Grant view access successfully");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,56 +41,56 @@ public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";

// Inserting data types
Field name = Field.of("name", StandardSQLTypeName.STRING);
Field age = Field.of("age", StandardSQLTypeName.INT64);
Field school =
Field.newBuilder("school", StandardSQLTypeName.BYTES).setMode(Field.Mode.REPEATED).build();
Field location = Field.of("location", StandardSQLTypeName.GEOGRAPHY);
Field measurements =
Field.newBuilder("measurements", StandardSQLTypeName.FLOAT64)
.setMode(Field.Mode.REPEATED)
.build();
Field day = Field.of("day", StandardSQLTypeName.DATE);
Field firstTime = Field.of("firstTime", StandardSQLTypeName.DATETIME);
Field secondTime = Field.of("secondTime", StandardSQLTypeName.TIME);
Field thirdTime = Field.of("thirdTime", StandardSQLTypeName.TIMESTAMP);
Field datesTime =
Field.of("datesTime", StandardSQLTypeName.STRUCT, day, firstTime, secondTime, thirdTime);
Schema schema = Schema.of(name, age, school, location, measurements, datesTime);

// Inserting Sample data
Map<String, Object> datesTimeContent = new HashMap<>();
datesTimeContent.put("day", "2019-1-12");
datesTimeContent.put("firstTime", "2019-02-17 11:24:00.000");
datesTimeContent.put("secondTime", "14:00:00");
datesTimeContent.put("thirdTime", "2020-04-27T18:07:25.356Z");

Map<String, Object> rowContent = new HashMap<>();
rowContent.put("name", "Tom");
rowContent.put("age", 30);
rowContent.put("school", "Test University".getBytes());
rowContent.put("location", "POINT(1 2)");
rowContent.put("measurements", new Float[] {50.05f, 100.5f});
rowContent.put("datesTime", datesTimeContent);

insertingDataTypes(datasetName, tableName, schema, rowContent);
insertingDataTypes(datasetName, tableName);
}

public static void insertingDataTypes(
String datasetName, String tableName, Schema schema, Map<String, Object> rowContent) {
public static void insertingDataTypes(String datasetName, String tableName) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

// Inserting data types
Field name = Field.of("name", StandardSQLTypeName.STRING);
Field age = Field.of("age", StandardSQLTypeName.INT64);
Field school =
Field.newBuilder("school", StandardSQLTypeName.BYTES)
.setMode(Field.Mode.REPEATED)
.build();
Field location = Field.of("location", StandardSQLTypeName.GEOGRAPHY);
Field measurements =
Field.newBuilder("measurements", StandardSQLTypeName.FLOAT64)
.setMode(Field.Mode.REPEATED)
.build();
Field day = Field.of("day", StandardSQLTypeName.DATE);
Field firstTime = Field.of("firstTime", StandardSQLTypeName.DATETIME);
Field secondTime = Field.of("secondTime", StandardSQLTypeName.TIME);
Field thirdTime = Field.of("thirdTime", StandardSQLTypeName.TIMESTAMP);
Field datesTime =
Field.of("datesTime", StandardSQLTypeName.STRUCT, day, firstTime, secondTime, thirdTime);
Schema schema = Schema.of(name, age, school, location, measurements, datesTime);

TableId tableId = TableId.of(datasetName, tableName);
TableDefinition tableDefinition = StandardTableDefinition.of(schema);
TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();

bigquery.create(tableInfo);

// Inserting Sample data
Map<String, Object> datesTimeContent = new HashMap<>();
datesTimeContent.put("day", "2019-1-12");
datesTimeContent.put("firstTime", "2019-02-17 11:24:00.000");
datesTimeContent.put("secondTime", "14:00:00");
datesTimeContent.put("thirdTime", "2020-04-27T18:07:25.356Z");

Map<String, Object> rowContent = new HashMap<>();
rowContent.put("name", "Tom");
rowContent.put("age", 30);
rowContent.put("school", "Test University".getBytes());
rowContent.put("location", "POINT(1 2)");
rowContent.put("measurements", new Float[] {50.05f, 100.5f});
rowContent.put("datesTime", datesTimeContent);

InsertAllResponse response =
bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow(rowContent).build());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,17 @@
import com.google.cloud.bigquery.TableId;

// Sample to load Avro data from Cloud Storage into a new BigQuery table
public class LoadAvroFromGCS {
public class LoadAvroFromGcs {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.avro";
loadAvroFromGCS(datasetName, tableName, sourceUri);
loadAvroFromGcs(datasetName, tableName, sourceUri);
}

public static void loadAvroFromGCS(String datasetName, String tableName, String sourceUri) {
public static void loadAvroFromGcs(String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,17 @@
import com.google.cloud.bigquery.TableId;

// Sample to overwrite the BigQuery table data by loading a AVRO file from GCS
public class LoadAvroFromGCSTruncate {
public class LoadAvroFromGcsTruncate {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.avro";
loadAvroFromGCSTruncate(datasetName, tableName, sourceUri);
loadAvroFromGcsTruncate(datasetName, tableName, sourceUri);
}

public static void loadAvroFromGCSTruncate(
public static void loadAvroFromGcsTruncate(
String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,17 @@
import com.google.cloud.bigquery.TableId;

// Sample to load CSV data with autodetect schema from Cloud Storage into a new BigQuery table
public class LoadCsvFromGCSAutodetect {
public class LoadCsvFromGcsAutodetect {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv";
loadCsvFromGCSAutodetect(datasetName, tableName, sourceUri);
loadCsvFromGcsAutodetect(datasetName, tableName, sourceUri);
}

public static void loadCsvFromGCSAutodetect(
public static void loadCsvFromGcsAutodetect(
String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import com.google.cloud.bigquery.TableId;

// Sample to load JSON data from Cloud Storage into a new BigQuery table
public class LoadJsonFromGCS {
public class LoadJsonFromGcs {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -41,10 +41,10 @@ public static void main(String[] args) {
Schema.of(
Field.of("name", StandardSQLTypeName.STRING),
Field.of("post_abbr", StandardSQLTypeName.STRING));
loadJsonFromGCS(datasetName, tableName, sourceUri, schema);
loadJsonFromGcs(datasetName, tableName, sourceUri, schema);
}

public static void loadJsonFromGCS(
public static void loadJsonFromGcs(
String datasetName, String tableName, String sourceUri, Schema schema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,17 @@
import com.google.cloud.bigquery.TableId;

// Sample to load JSON data with autodetect schema from Cloud Storage into a new BigQuery table
public class LoadJsonFromGCSAutodetect {
public class LoadJsonFromGcsAutodetect {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
loadJsonFromGCSAutodetect(datasetName, tableName, sourceUri);
loadJsonFromGcsAutodetect(datasetName, tableName, sourceUri);
}

public static void loadJsonFromGCSAutodetect(
public static void loadJsonFromGcsAutodetect(
String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import com.google.cloud.bigquery.TableId;

// Sample to load JSON data with configuration key from Cloud Storage into a new BigQuery table
public class LoadJsonFromGCSCMEK {
public class LoadJsonFromGcsCmek {

public static void main(String[] args) {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -39,10 +39,10 @@ public static void main(String[] args) {
// i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}
EncryptionConfiguration encryption =
EncryptionConfiguration.newBuilder().setKmsKeyName(kmsKeyName).build();
loadJsonFromGCSCMEK(datasetName, tableName, sourceUri, encryption);
loadJsonFromGcsCmek(datasetName, tableName, sourceUri, encryption);
}

public static void loadJsonFromGCSCMEK(
public static void loadJsonFromGcsCmek(
String datasetName, String tableName, String sourceUri, EncryptionConfiguration encryption) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down
Loading

0 comments on commit fe5abc0

Please sign in to comment.