Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/scorecard.yml
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These files changes were added automatically to the PR by a bot...not sure if I can revert them or if this is expected behavior, it seems like they are just updating some dependency versions.

Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
persist-credentials: false

- name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
with:
results_file: results.sarif
results_format: sarif
Expand All @@ -59,7 +59,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: SARIF file
path: results.sarif
Expand Down
2 changes: 1 addition & 1 deletion benchmark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.13.0</version>
<version>3.14.0</version>
<configuration>
<annotationProcessorPaths>
<path>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,19 @@

package com.google.cloud.bigquery;

import com.google.cloud.bigquery.FieldValue.Attribute;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
Expand Down Expand Up @@ -114,6 +119,77 @@ private class BigQueryResultSet extends AbstractJdbcResultSet {
// curTup.isLast(). Ref: https://github.com/googleapis/java-bigquery/issues/2033
private boolean wasNull = false;

private class BigQueryArrayResult implements java.sql.Array {
List<Object> array;

public BigQueryArrayResult(Object array) {
if (array instanceof Object[]) {
this.array = new ArrayList<>(Arrays.asList((Object[]) array));
} else if (array instanceof FieldValueList) {
this.array = new ArrayList<>((FieldValueList) array);
} else {
this.array = (List<Object>) array;
}
}

@Override
public String getBaseTypeName() throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public int getBaseType() throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public Object getArray() throws SQLException {
return array;
}

@Override
public Object getArray(java.util.Map<String, Class<?>> map) throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public Object getArray(long index, int count) throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public Object getArray(long index, int count, java.util.Map<String, Class<?>> map)
throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public ResultSet getResultSet() throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public ResultSet getResultSet(java.util.Map<String, Class<?>> map) throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public ResultSet getResultSet(long index, int count) throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public ResultSet getResultSet(long index, int count, java.util.Map<String, Class<?>> map)
throws SQLException {
throw new SQLFeatureNotSupportedException();
}

@Override
public void free() throws SQLException {
throw new SQLFeatureNotSupportedException();
}
}

@Override
/*Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation*/
public boolean next() throws SQLException {
Expand Down Expand Up @@ -160,6 +236,54 @@ private Object getCurrentValueForReadApiData(String fieldName) throws SQLExcepti
return curRow.get(fieldName);
}

@Override
public java.sql.Array getArray(String fieldName) throws SQLException {
if (fieldName == null) {
throw new SQLException("fieldName can't be null");
}
if (cursor == null) {
throw new BigQuerySQLException(NULL_CURSOR_MSG);
} else if (cursor instanceof FieldValueList) {
FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName);
if ((fieldValue == null || fieldValue.getValue() == null)) {
wasNull = true;
return null;
}
wasNull = false;
if (fieldValue.getAttribute().equals(Attribute.REPEATED)) {
return new BigQueryArrayResult(fieldValue.getValue());
} else {
wasNull = true;
return null;
}
} else { // Data received from Read API (Arrow)
Object currentVal = getCurrentValueForReadApiData(fieldName);
if (currentVal == null) {
wasNull = true;
return null;
}
wasNull = false;
return new BigQueryArrayResult(currentVal);
}
}

@Override
public java.sql.Array getArray(int columnIndex) throws SQLException {
if (cursor == null) {
return null;
} else if (cursor instanceof FieldValueList) {
FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex);
if (fieldValue == null || fieldValue.getValue() == null) {
wasNull = true;
return null;
}
wasNull = false;
return new BigQueryArrayResult(fieldValue.getValue());
} else {
return getArray(schemaFieldList.get(columnIndex).getName());
}
}

@Override
public Object getObject(String fieldName) throws SQLException {
if (fieldName == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
import java.sql.Timestamp;
import java.time.LocalTime;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
Expand Down Expand Up @@ -58,7 +60,13 @@ public class BigQueryResultImplTest {
.setMode(Field.Mode.NULLABLE)
.build(),
Field.newBuilder("time", StandardSQLTypeName.TIME).setMode(Field.Mode.NULLABLE).build(),
Field.newBuilder("date", StandardSQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build());
Field.newBuilder("date", StandardSQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build(),
Field.newBuilder("intArray", StandardSQLTypeName.INT64)
.setMode(Field.Mode.REPEATED)
.build(),
Field.newBuilder("stringArray", StandardSQLTypeName.STRING)
.setMode(Field.Mode.REPEATED)
.build());

private static final FieldList FIELD_LIST_SCHEMA =
FieldList.of(
Expand All @@ -69,7 +77,9 @@ public class BigQueryResultImplTest {
Field.of("bytes", LegacySQLTypeName.BYTES),
Field.of("timestamp", LegacySQLTypeName.TIMESTAMP),
Field.of("time", LegacySQLTypeName.TIME),
Field.of("date", LegacySQLTypeName.DATE));
Field.of("date", LegacySQLTypeName.DATE),
Field.of("intArray", LegacySQLTypeName.INTEGER),
Field.of("stringArray", LegacySQLTypeName.STRING));

private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD};
private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES);
Expand All @@ -79,6 +89,11 @@ public class BigQueryResultImplTest {
private static final String DATE = "2020-01-21";
private static final int DATE_INT = 0;
private static final Date EXPECTED_DATE = java.sql.Date.valueOf(DATE);
private static final ArrayList<Integer> EXPECTED_INT_ARRAY =
new ArrayList<>(Arrays.asList(0, 1, 2, 3, 4));
private static final String[] STRING_ARRAY = {"str1", "str2", "str3"};
private static final ArrayList<String> EXPECTED_STRING_ARRAY =
new ArrayList<>(Arrays.asList(STRING_ARRAY));
private static final int BUFFER_SIZE = 10;

@Test
Expand All @@ -97,7 +112,9 @@ public void testResultSetFieldValueList() throws InterruptedException, SQLExcept
Long.toString(EXPECTED_TIMESTAMP.getTime() / 1000),
false), // getTime is in milliseconds.
FieldValue.of(Attribute.PRIMITIVE, TIME),
FieldValue.of(Attribute.PRIMITIVE, DATE)),
FieldValue.of(Attribute.PRIMITIVE, DATE),
FieldValue.of(Attribute.REPEATED, EXPECTED_INT_ARRAY),
FieldValue.of(Attribute.REPEATED, STRING_ARRAY)),
FIELD_LIST_SCHEMA);
buffer.put(fieldValues);

Expand All @@ -111,7 +128,9 @@ public void testResultSetFieldValueList() throws InterruptedException, SQLExcept
FieldValue.of(Attribute.PRIMITIVE, null),
FieldValue.of(Attribute.PRIMITIVE, null),
FieldValue.of(Attribute.PRIMITIVE, null),
FieldValue.of(Attribute.PRIMITIVE, null)),
FieldValue.of(Attribute.PRIMITIVE, null),
FieldValue.of(Attribute.REPEATED, null),
FieldValue.of(Attribute.REPEATED, null)),
FIELD_LIST_SCHEMA);
buffer.put(nullValues);

Expand Down Expand Up @@ -143,6 +162,10 @@ public void testResultSetFieldValueList() throws InterruptedException, SQLExcept
assertThat(resultSet.wasNull()).isFalse();
assertThat(resultSet.getDate("date").getTime()).isEqualTo(EXPECTED_DATE.getTime());
assertThat(resultSet.wasNull()).isFalse();
assertThat(resultSet.getArray("intArray").getArray()).isEqualTo(EXPECTED_INT_ARRAY);
assertThat(resultSet.wasNull()).isFalse();
assertThat(resultSet.getArray("stringArray").getArray()).isEqualTo(EXPECTED_STRING_ARRAY);
assertThat(resultSet.wasNull()).isFalse();

assertThat(resultSet.next()).isTrue();
assertThat(resultSet.getObject("string")).isNull();
Expand All @@ -167,6 +190,10 @@ public void testResultSetFieldValueList() throws InterruptedException, SQLExcept
assertThat(resultSet.wasNull()).isTrue();
assertThat(resultSet.getDate("date")).isNull();
assertThat(resultSet.wasNull()).isTrue();
assertThat(resultSet.getArray("intArray")).isNull();
assertThat(resultSet.wasNull()).isTrue();
assertThat(resultSet.getArray("stringArray")).isNull();
assertThat(resultSet.wasNull()).isTrue();

assertThat(resultSet.next()).isFalse();
}
Expand All @@ -184,6 +211,7 @@ public void testResultSetReadApi() throws InterruptedException, SQLException {
rowValues.put("timestamp", EXPECTED_TIMESTAMP.getTime() * 1000);
rowValues.put("time", EXPECTED_TIME.getTime() * 1000);
rowValues.put("date", DATE_INT);
rowValues.put("intArray", EXPECTED_INT_ARRAY);
buffer.put(new BigQueryResultImpl.Row(rowValues));

Map<String, Object> nullValues = new HashMap<>();
Expand All @@ -195,6 +223,7 @@ public void testResultSetReadApi() throws InterruptedException, SQLException {
nullValues.put("timestamp", null);
nullValues.put("time", null);
nullValues.put("date", null);
nullValues.put("intArray", null);
buffer.put(new BigQueryResultImpl.Row(nullValues));

buffer.put(new BigQueryResultImpl.Row(null, true)); // End of buffer marker.
Expand Down Expand Up @@ -227,6 +256,8 @@ public void testResultSetReadApi() throws InterruptedException, SQLException {
// JVM default timezone which causes flakes in non-UTC zones.
assertThat(resultSet.getDate("date")).isNotNull();
assertThat(resultSet.wasNull()).isFalse();
assertThat(resultSet.getArray("intArray")).isNotNull();
assertThat(resultSet.wasNull()).isFalse();

assertThat(resultSet.next()).isTrue();
assertThat(resultSet.getObject("string")).isNull();
Expand All @@ -251,6 +282,8 @@ public void testResultSetReadApi() throws InterruptedException, SQLException {
assertThat(resultSet.wasNull()).isTrue();
assertThat(resultSet.getDate("date")).isNull();
assertThat(resultSet.wasNull()).isTrue();
assertThat(resultSet.getArray("intArray")).isNull();
assertThat(resultSet.wasNull()).isTrue();

assertThat(resultSet.next()).isFalse();
}
Expand Down
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Both of the modified integration tests exercise the non-ReadAPI path. Can you check if we have an existing integration test that exercise the read API path and augment the test to validate getArray on it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've added a new IT test to test the ReadAPI path. Turns out that path had not been tested yet, and so the expected responses for getDate() and getTime() were inaccurate. The ReadAPI path returns the Date and Time objects in local format instead of UTC format.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As discussed offline, it looks more like my local machine had a different local time than that of the VM running the kokoro, resulting in different dates and time objects. This has been resolved.

Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,8 @@ public class ITBigQueryTest {
ImmutableMap.of(
"example-label1", "example-value1",
"example-label2", "example-value2");
private static final List<Integer> INTEGER_ARRAY_FIELD_LIST =
new ArrayList<>(Arrays.asList(1, 2, 3, 4));
private static final Field TIMESTAMP_FIELD_SCHEMA =
Field.newBuilder("TimestampField", LegacySQLTypeName.TIMESTAMP)
.setMode(Field.Mode.NULLABLE)
Expand Down Expand Up @@ -3925,6 +3927,10 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException {
(com.google.cloud.bigquery.FieldValueList) rs.getObject("IntegerArrayField");
assertEquals(4, integerArrayFieldValue.size()); // Array has 4 elements
assertEquals(3, (integerArrayFieldValue.get(2).getNumericValue()).intValue());
List<FieldValue> integerArrayFieldValueList =
(List<FieldValue>) rs.getArray("IntegerArrayField").getArray();
assertEquals(4, integerArrayFieldValueList.size());
assertEquals(3, integerArrayFieldValueList.get(2).getNumericValue().intValue());

assertFalse(rs.next()); // no 3rd row in the table
}
Expand Down Expand Up @@ -4278,6 +4284,19 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException {
(integerArrayFieldValue.get(2).getNumericValue()).intValue(),
(integerArrayFieldValueColInd.get(2).getNumericValue()).intValue());
}

List<FieldValue> integerArrayFieldValueList =
(List<FieldValue>) rs.getArray("IntegerArrayField").getArray();
List<FieldValue> integerArrayFieldValueListColInd =
(List<FieldValue>) rs.getArray(14).getArray();
assertEquals(
integerArrayFieldValueList.size(),
integerArrayFieldValueListColInd.size()); // Array has 4 elements
if (integerArrayFieldValueList.size() == 4) { // as we are picking the third index
assertEquals(
(integerArrayFieldValueList.get(2).getNumericValue()).intValue(),
(integerArrayFieldValueListColInd.get(2).getNumericValue()).intValue());
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<github.global.server>github</github.global.server>
<site.installationModule>google-cloud-bigquery-parent</site.installationModule>
<google-api-services-bigquery.version>v2-rev20250128-2.0.0</google-api-services-bigquery.version>
<google-api-services-bigquery.version>v2-rev20250216-2.0.0</google-api-services-bigquery.version>
</properties>

<dependencyManagement>
Expand Down