Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,17 @@ public void testCatalogWithCacheEnabled()
.put("hive.pushdown-filter-enabled", "true")
.build();

getQueryRunner().createCatalog("hivecached", "hive", hiveProperties);
try {
getQueryRunner().createCatalog("hivecached", "hive", hiveProperties);
}
catch (IllegalArgumentException e) {
if (e.getMessage().contains("A catalog already exists")) {
System.out.println("Catalog 'hivecached' already exists, skipping creation");
}
else {
throw e;
}
}

Session actualSession = Session.builder(getSession())
.setCatalog("hivecached")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.nio.file.Paths;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;

import static com.facebook.airlift.log.Level.WARN;
import static com.facebook.presto.nativeworker.NativeQueryRunnerUtils.getNativeWorkerHiveProperties;
Expand Down Expand Up @@ -106,6 +107,39 @@ public static PrestoSparkQueryRunner createHiveRunner()
return queryRunner;
}

/**
* Similar to createHiveRunner(), but also add additional specified catalogs and their
* corresponding properties. This method exists because unlike Java, native execution does not
* allow adding catalogs in the tests after process starts. So any tests that need additional
* catalogs need to add them upon runner creation.
*/
public static PrestoSparkQueryRunner createHiveRunner(Map<String, Map<String, String>> additionalCatalogs)
{
// Add connectors on the native side to make them available during execution.
ImmutableMap.Builder<String, Map<String, String>> catalogBuilder = ImmutableMap.builder();
catalogBuilder.put("hive", ImmutableMap.of("connector.name", "hive"))
.putAll(additionalCatalogs);
PrestoSparkQueryRunner queryRunner = createRunner("hive", new NativeExecutionModule(
Optional.of(catalogBuilder.build())));

// Add connectors on the Java side to make them visible during planning.
additionalCatalogs.entrySet().stream().forEach(entry -> {
queryRunner.createCatalog(
entry.getKey(),
entry.getValue().get("connector.name"),
entry.getValue().entrySet().stream().filter(propertyEntry -> {
// Add all properties except for "connector.name" as it is not applicable
// for Java config (it's already fed in as the above function parameter).
return !propertyEntry.getKey().equals("connector.name");
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
});

PrestoNativeQueryRunnerUtils.setupJsonFunctionNamespaceManager(queryRunner,
"external_functions.json", "json");

return queryRunner;
}

private static PrestoSparkQueryRunner createRunner(String defaultCatalog, NativeExecutionModule nativeExecutionModule)
{
// Increases log level to reduce log spamming while running test.
Expand All @@ -122,10 +156,10 @@ private static PrestoSparkQueryRunner createRunner(String defaultCatalog, Native
public static PrestoSparkQueryRunner createTpchRunner()
{
return createRunner(
"tpchstandard",
new NativeExecutionModule(
Optional.of(
ImmutableMap.of("hive", ImmutableMap.of("connector.name", "tpch")))));
"tpchstandard",
new NativeExecutionModule(
Optional.of(
ImmutableMap.of("tpchstandard", ImmutableMap.of("connector.name", "tpch")))));
}

public static PrestoSparkQueryRunner createRunner(String defaultCatalog, Optional<Path> baseDir, Map<String, String> additionalConfigProperties, Map<String, String> additionalSparkProperties, ImmutableList<Module> nativeModules)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.facebook.presto.scalar.sql.SqlInvokedFunctionsPlugin;
import com.facebook.presto.testing.ExpectedQueryRunner;
import com.facebook.presto.testing.QueryRunner;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Ignore;

import java.util.ArrayList;
Expand All @@ -32,7 +33,12 @@ public class TestPrestoSparkNativeGeneralQueries
@Override
protected QueryRunner createQueryRunner()
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suggestion (testing): Restoring previously ignored tests could improve coverage.

Since the runner now supports multiple catalogs, please re-enable 'testCatalogWithCacheEnabled' and confirm its assertions to ensure the recent changes are properly tested.

Suggested implementation:

    @Override
    public void testCatalogWithCacheEnabled() {
        // Ensure the runner supports multiple catalogs and cache-enabled catalog works
        PrestoSparkNativeQueryRunner runner = PrestoSparkNativeQueryRunnerUtils.createHiveRunner(
            ImmutableMap.of(
                "hivecached", ImmutableMap.of("connector.name", "hive"),
                "hive", ImmutableMap.of("connector.name", "hive")
            )
        );
        // Example assertion: query both catalogs and check results
        assertQuery(runner, "SELECT * FROM hivecached.default.nation", "SELECT * FROM hive.default.nation");
        // Add more assertions as needed to confirm cache behavior
    }

If the assertQuery method or the runner setup requires additional configuration for cache validation, you may need to add more specific assertions or setup steps. Also, ensure that the test data exists in both catalogs for the assertion to pass.

{
QueryRunner queryRunner = PrestoSparkNativeQueryRunnerUtils.createHiveRunner();
// Adding additional catalog needed in some tests in the suite.
QueryRunner queryRunner = PrestoSparkNativeQueryRunnerUtils.createHiveRunner(
ImmutableMap.of("hivecached",
ImmutableMap.of("connector.name", "hive",
"hive.storage-format", "DWRF",
"hive.pushdown-filter-enabled", "true")));

// Install plugins needed for extra array functions.
queryRunner.installPlugin(new SqlInvokedFunctionsPlugin());
Expand Down Expand Up @@ -115,9 +121,4 @@ public void testRowWiseExchange() {}
@Override
@Ignore
public void testAnalyzeStatsOnDecimals() {}

// VeloxRuntimeError: it != connectors().end() Connector with ID 'hivecached' not registered
@Override
@Ignore
public void testCatalogWithCacheEnabled() {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import com.facebook.presto.nativeworker.AbstractTestNativeTpchConnectorQueries;
import com.facebook.presto.testing.ExpectedQueryRunner;
import com.facebook.presto.testing.QueryRunner;
import org.testng.annotations.Ignore;

public class TestPrestoSparkNativeTpchConnectorQueries
extends AbstractTestNativeTpchConnectorQueries
Expand All @@ -39,13 +38,4 @@ public void testMissingTpchConnector()
{
super.testMissingTpchConnector(".*Catalog tpch does not exist*");
}

@Override
@Ignore
public void testTpchTinyTables() {}

// VeloxRuntimeError: it != connectors().end() Connector with ID 'tpchstandard' not registered
@Override
@Ignore
public void testTpchDateFilter() {}
}
Loading