Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/src/main/sphinx/connector/kudu.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ Compatibility
The Kudu connector is compatible with all Apache Kudu versions starting from 1.0.

If the connector uses features that are not available on the target server, an error is returned.
Apache Kudu 1.8.0 is currently used for testing.

Apache Kudu 1.10.0 and 1.14.0 is currently used for testing, but any intermediate or newer
versions are expected to work.

Configuration
-------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,62 +13,35 @@
*/
package io.trino.plugin.kudu;

import io.trino.testing.AbstractTestDistributedQueries;
import io.trino.testing.BaseConnectorTest;
import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
import io.trino.testing.TestingConnectorBehavior;
import io.trino.testing.sql.TestTable;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;

import java.util.Optional;

import static io.trino.plugin.kudu.KuduQueryRunnerFactory.createKuduQueryRunnerTpch;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.testing.MaterializedResult.resultBuilder;
import static io.trino.testing.assertions.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;

public class TestKuduDistributedQueries
extends AbstractTestDistributedQueries
public abstract class BaseKuduConnectorTest
extends BaseConnectorTest
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you are using BaseConnectorTest then you should also remove AbstractKuduIntegrationSmokeTest as smoke tests are already in BaseConnectorTest.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@kokosing Took a look at removing this. I am a bit hesitant because each of the integration smoke tests implementations have a separate schema inference -- disabled, empty, or standard -- which is given to as an input parameter to a QueryRunner. This seems intentional to me, but I am not familiar with the Kudu connector tests. Just trying to add support for testing the latest. Let me know your thoughts.

I've rebased this PR to the latest master.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

each of the integration smoke tests implementations have a separate schema inference -- disabled, empty, or standard
So you can have a dedicated instance of BaseKuduConnectorTest for each case. Or you can create a BaseKuduConnectorSmokeTest (that extends BaseConnectorSmokeTest) and use it for each case.

{
private TestingKuduServer kuduServer;

@Override
protected QueryRunner createQueryRunner()
throws Exception
{
kuduServer = new TestingKuduServer();
return createKuduQueryRunnerTpch(kuduServer, Optional.of(""), REQUIRED_TPCH_TABLES);
}

@AfterClass(alwaysRun = true)
public void destroy()
{
kuduServer.close();
}

@Override
protected boolean supportsViews()
{
return false;
}

@Override
protected boolean supportsArrays()
{
return false;
}

@Override
protected boolean supportsCommentOnTable()
{
return false;
}

@Override
protected boolean supportsCommentOnColumn()
protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
{
return false;
switch (connectorBehavior) {
case SUPPORTS_COMMENT_ON_TABLE:
case SUPPORTS_COMMENT_ON_COLUMN:
case SUPPORTS_CREATE_VIEW:
case SUPPORTS_ARRAY:
return false;
default:
return super.hasBehavior(connectorBehavior);
}
}

@Override
Expand Down Expand Up @@ -155,6 +128,50 @@ public void testColumnName(String columnName)
throw new SkipException("TODO");
}

@Override
@Test
public void testDescribeTable()
{
MaterializedResult expectedColumns = MaterializedResult.resultBuilder(getQueryRunner().getDefaultSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
.row("orderkey", "bigint", "nullable, encoding=auto, compression=default", "")
.row("custkey", "bigint", "nullable, encoding=auto, compression=default", "")
.row("orderstatus", "varchar", "nullable, encoding=auto, compression=default", "")
.row("totalprice", "double", "nullable, encoding=auto, compression=default", "")
.row("orderdate", "varchar", "nullable, encoding=auto, compression=default", "")
.row("orderpriority", "varchar", "nullable, encoding=auto, compression=default", "")
.row("clerk", "varchar", "nullable, encoding=auto, compression=default", "")
.row("shippriority", "integer", "nullable, encoding=auto, compression=default", "")
.row("comment", "varchar", "nullable, encoding=auto, compression=default", "")
.build();
MaterializedResult actualColumns = computeActual("DESCRIBE orders");
assertEquals(actualColumns, expectedColumns);
}

@Override
@Test
public void testShowCreateTable()
{
assertThat((String) computeActual("SHOW CREATE TABLE orders").getOnlyValue())
.matches("CREATE TABLE \\w+\\.\\w+\\.orders \\Q(\n" +
" orderkey bigint WITH ( nullable = true ),\n" +
" custkey bigint WITH ( nullable = true ),\n" +
" orderstatus varchar WITH ( nullable = true ),\n" +
" totalprice double WITH ( nullable = true ),\n" +
" orderdate varchar WITH ( nullable = true ),\n" +
" orderpriority varchar WITH ( nullable = true ),\n" +
" clerk varchar WITH ( nullable = true ),\n" +
" shippriority integer WITH ( nullable = true ),\n" +
" comment varchar WITH ( nullable = true )\n" +
")\n" +
"WITH (\n" +
" number_of_replicas = 3,\n" +
" partition_by_hash_buckets = 2,\n" +
" partition_by_hash_columns = ARRAY['row_uuid'],\n" +
" partition_by_range_columns = ARRAY['row_uuid'],\n" +
" range_partitions = '[{\"lower\":null,\"upper\":null}]'\n" +
")");
}

@Override
protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.kudu;

import io.trino.testing.QueryRunner;
import io.trino.tpch.TpchTable;

import java.util.Optional;

import static io.trino.plugin.kudu.KuduQueryRunnerFactory.createKuduQueryRunnerTpch;

public class TestKuduConnectorTest
extends BaseKuduConnectorTest
{
@Override
protected QueryRunner createQueryRunner()
throws Exception
{
TestingKuduServer kuduServer = closeAfterClass(new TestingKuduServer());
return createKuduQueryRunnerTpch(kuduServer, Optional.of(""), TpchTable.getTables());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.kudu;

import io.trino.testing.QueryRunner;
import io.trino.tpch.TpchTable;

import java.util.Optional;

import static io.trino.plugin.kudu.KuduQueryRunnerFactory.createKuduQueryRunnerTpch;

public class TestKuduLatestConnectorTest
extends BaseKuduConnectorTest
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should this be a ConnectorSmokeTest only?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've been a bit out of the loop on the changes from Distributed/Smoke tests to these BaseConnectorTest and BaseConnectorSmokeTest. I'm okay keeping the "latest" tests as a smoke test if that has been the pattern that has been adopted for testing the later versions of the software. Makes sense since the expectation is that newer versions should work and we don't need to go through the full breadth of testing and a quick smoke test is sufficient.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that's my thinking as well

{
@Override
protected QueryRunner createQueryRunner()
throws Exception
{
TestingKuduServer kuduServer = closeAfterClass(new TestingKuduServer("1.14.0"));
return createKuduQueryRunnerTpch(kuduServer, Optional.of(""), TpchTable.getTables());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,15 @@ public class TestingKuduServer
private final List<GenericContainer<?>> tServers;

public TestingKuduServer()
{
this("1.10.0");
}

public TestingKuduServer(String kuduVersion)
{
Network network = Network.newNetwork();
ImmutableList.Builder<GenericContainer<?>> tServersBuilder = ImmutableList.builder();
this.master = new GenericContainer<>("apache/kudu:1.10.0")
this.master = new GenericContainer<>("apache/kudu:" + kuduVersion)
.withExposedPorts(KUDU_MASTER_PORT)
.withCommand("master")
.withNetwork(network)
Expand Down