Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 76 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -525,6 +525,8 @@ jobs:

build-pt:
runs-on: ubuntu-latest
outputs:
have_azure_secrets: ${{ steps.check-secrets.outputs.have_azure_secrets }}
steps:
- uses: actions/checkout@v2
with:
Expand All @@ -536,6 +538,20 @@ jobs:
distribution: 'zulu'
java-version: 11
cache: 'maven'
- name: Check secrets
run: |
if [[ "${{ secrets.AZURE_ABFS_CONTAINER }}" != "" && \
"${{ secrets.AZURE_ABFS_ACCOUNT }}" != "" && \
"${{ secrets.AZURE_ABFS_ACCESSKEY }}" != ""
]]; \
then
echo "Secrets to run product tests were configured in the repo"
echo "::set-output name=have_azure_secrets::true"
else
echo "Secrets to run product tests were not configured in the repo"
echo "::set-output name=have_azure_secrets::false"
fi
id: check-secrets
- name: Maven Install
run: |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}"
Expand Down Expand Up @@ -718,3 +734,63 @@ jobs:
name: test report pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }})
path: testing/trino-product-tests/target/reports/**/testng-results.xml
retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }}

azure-pt:
runs-on: ubuntu-latest
needs: build-pt
if: needs.build-pt.outputs.have_azure_secrets == 'true'
strategy:
fail-fast: false
matrix:
config:
- hdp3
suite:
- suite-azure
jdk:
- 11
timeout-minutes: 30
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent
- uses: actions/setup-java@v2
with:
distribution: 'zulu'
java-version: 11
- name: Product tests artifact
uses: actions/download-artifact@v2
with:
name: product tests and server tarball
- name: Fix artifact permissions
run: |
find . -type f -name \*-executable.jar -exec chmod 0777 {} \;
- name: Product Tests
env:
ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }}
ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }}
ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_ACCESSKEY }}
run: |
testing/bin/ptl suite run \
--suite ${{ matrix.suite }} \
--config config-${{ matrix.config }} \
--bind=off --logs-dir logs/ --timeout 2h \
--trino-jdk-version zulu_${{ matrix.jdk }}
- name: Upload test logs and results
uses: actions/upload-artifact@v2
# Upload all test reports only on failure, because the artifacts are large
if: failure()
with:
name: result pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }})
path: |
testing/trino-product-tests/target/*
logs/*
- name: Upload test report
uses: actions/upload-artifact@v2
# Always upload the test report for the annotate.yml workflow,
# but only the single XML file to keep the artifact small
if: always()
with:
# Name prefix is checked in the `Annotate checks` workflow
name: test report pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }})
path: testing/trino-product-tests/target/reports/**/testng-results.xml
retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }}
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.env.environment;

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.docker.DockerFiles;
import io.trino.tests.product.launcher.env.Environment;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.EnvironmentProvider;
import io.trino.tests.product.launcher.env.common.Hadoop;
import io.trino.tests.product.launcher.env.common.StandardMultinode;
import io.trino.tests.product.launcher.env.common.TestsEnvironment;

import javax.inject.Inject;

import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermissions;

import static io.trino.tests.product.launcher.env.EnvironmentContainers.COORDINATOR;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.HADOOP;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.TESTS;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.WORKER;
import static io.trino.tests.product.launcher.env.common.Hadoop.CONTAINER_HADOOP_INIT_D;
import static java.nio.file.attribute.PosixFilePermissions.fromString;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.utility.MountableFile.forHostPath;

@TestsEnvironment
public class EnvMultinodeAzure
extends EnvironmentProvider
{
private final DockerFiles dockerFiles;
private final DockerFiles.ResourceProvider configDir;
private final String hadoopBaseImage;
private final String hadoopImagesVersion;

@Inject
public EnvMultinodeAzure(DockerFiles dockerFiles, StandardMultinode standardMultinode, Hadoop hadoop, EnvironmentConfig environmentConfig)
{
super(ImmutableList.of(standardMultinode, hadoop));
this.dockerFiles = requireNonNull(dockerFiles, "dockerFiles is null");
configDir = dockerFiles.getDockerFilesHostDirectory("conf/environment/multinode-azure");
requireNonNull(environmentConfig, "environmentConfig is null");
hadoopBaseImage = environmentConfig.getHadoopBaseImage();
hadoopImagesVersion = environmentConfig.getHadoopImagesVersion();
}

@Override
public void extendEnvironment(Environment.Builder builder)
{
String dockerImageName = hadoopBaseImage + ":" + hadoopImagesVersion;

builder.configureContainer(HADOOP, container -> {
container.setDockerImageName(dockerImageName);
container.withCopyFileToContainer(
forHostPath(getCoreSiteOverrideXml()),
"/docker/presto-product-tests/conf/environment/multinode-azure/core-site-overrides.xml");
container.withCopyFileToContainer(
forHostPath(dockerFiles.getDockerFilesHostPath("conf/environment/multinode-azure/apply-azure-config.sh")),
CONTAINER_HADOOP_INIT_D + "apply-azure-config.sh");
});

builder.configureContainer(COORDINATOR, container -> container
.withEnv("ABFS_ACCOUNT", requireEnv("ABFS_ACCOUNT"))
.withEnv("ABFS_ACCESS_KEY", requireEnv("ABFS_ACCESS_KEY")));

builder.configureContainer(WORKER, container -> container
.withEnv("ABFS_ACCOUNT", requireEnv("ABFS_ACCOUNT"))
.withEnv("ABFS_ACCESS_KEY", requireEnv("ABFS_ACCESS_KEY")));

builder.configureContainer(TESTS, container -> container
.withEnv("ABFS_CONTAINER", requireEnv("ABFS_CONTAINER"))
.withEnv("ABFS_ACCOUNT", requireEnv("ABFS_ACCOUNT")));

builder.addConnector("hive", forHostPath(configDir.getPath("hive.properties")));
}

private Path getCoreSiteOverrideXml()
{
try {
String coreSite = Files.readString(configDir.getPath("core-site-overrides-template.xml"))
.replace("%ABFS_ACCOUNT%", requireEnv("ABFS_ACCOUNT"))
.replace("%ABFS_ACCESS_KEY%", requireEnv("ABFS_ACCESS_KEY"));
File coreSiteXml = Files.createTempFile("core-site", ".xml", PosixFilePermissions.asFileAttribute(fromString("rwxrwxrwx"))).toFile();
coreSiteXml.deleteOnExit();
Files.writeString(coreSiteXml.toPath(), coreSite);
return coreSiteXml.toPath();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}

private static String requireEnv(String variable)
{
return requireNonNull(System.getenv(variable), () -> "environment variable not set: " + variable);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvMultinode.class)
.withExcludedGroups("large_query", "storage_formats", "storage_formats_detailed", "profile_specific_tests", "tpcds", "hive_compression")
.withExcludedGroups("large_query", "storage_formats", "storage_formats_detailed", "profile_specific_tests", "tpcds", "hive_compression", "azure")
.build());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
return ImmutableList.of(
testOnEnvironment(EnvMultinodeTls.class)
.withGroups("configured_features", "smoke", "cli", "group-by", "join", "tls")
.withExcludedGroups("azure")
.build(),
testOnEnvironment(EnvMultinodeTlsKerberos.class)
.withGroups("configured_features", "cli", "group-by", "join", "tls")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.suite.suites;

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.environment.EnvMultinodeAzure;
import io.trino.tests.product.launcher.suite.Suite;
import io.trino.tests.product.launcher.suite.SuiteTestRun;

import java.util.List;

import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteAzure
extends Suite
{
@Override
public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvMultinodeAzure.class)
.withGroups("configured_features", "azure")
.build());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash
set -exuo pipefail

echo "Applying HDP3 core-site configuration overrides"
apply-site-xml-override /etc/hadoop/conf/core-site.xml "/docker/presto-product-tests/conf/environment/multinode-azure/core-site-overrides.xml"
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<configuration>

<property>
<name>fs.azure.account.key.%ABFS_ACCOUNT%.dfs.core.windows.net</name>
<value>%ABFS_ACCESS_KEY%</value>
</property>

</configuration>
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
connector.name=hive
hive.metastore.uri=thrift://hadoop-master:9083
hive.config.resources=/docker/presto-product-tests/conf/presto/etc/hive-default-fs-site.xml
hive.azure.abfs-storage-account=${ENV:ABFS_ACCOUNT}
hive.azure.abfs-access-key=${ENV:ABFS_ACCESS_KEY}
hive.non-managed-table-writes-enabled=true
hive.allow-add-column=true
hive.allow-drop-column=true
hive.allow-rename-column=true
hive.allow-comment-table=true
hive.allow-drop-table=true
hive.allow-rename-table=true
hive.translate-hive-views=true
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public final class TestGroups
public static final String HIVE_ICEBERG_REDIRECTIONS = "hive_iceberg_redirections";
public static final String AUTHORIZATION = "authorization";
public static final String HIVE_COERCION = "hive_coercion";
public static final String AZURE = "azure";
public static final String CASSANDRA = "cassandra";
public static final String SQL_SERVER = "sqlserver";
public static final String LDAP = "ldap";
Expand Down
Loading