Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions .github/workflows/gpu-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: GPU Tests

on:
workflow_call:

jobs:

prep-testbed-gpu:
runs-on: ubuntu-24.04

steps:
- uses: actions/checkout@v4

- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/[email protected]

- id: set-matrix
run: |
ALL_TESTS=`poetry run -- nox -s integration-test-list -- --test-set gpu-only`
echo "matrix=$ALL_TESTS" >> "$GITHUB_OUTPUT"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}

test-gpu:
needs: prep-testbed-gpu
strategy:
fail-fast: false
matrix:
python_version:
- "3.10"
test-path: ${{fromJson(needs.prep-testbed-gpu.outputs.matrix)}}
runs-on:
labels: int-linux-x64-4core-gpu-t4-ubuntu24.04-1
name: GPU Test
steps:
- uses: actions/checkout@v4

#Cannot use exasol/python-toolbox/.github/actions/python-environment here
#because pipx is not installed on int-linux-x64-4core-gpu-t4-ubuntu24.04-1
- name: Setup Python ('${{ matrix.python_version }}')
uses: actions/setup-python@v5
with:
python-version: '${{ matrix.python_version }}'

- name: Install pipx and poetry
run: |
pip install pipx
pipx install poetry=="2.1.2"
echo "$HOME/.local/bin" >> $GITHUB_PATH

- name: Setup Poetry
run: |
poetry env use python3
poetry install

- name: Allow unprivileged user namespaces
run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0

- name: Run ${{ matrix.test-path.name }}
run: poetry run -- python "${{ matrix.test-path.path }}"
53 changes: 53 additions & 0 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: Integration Tests

on:
workflow_call:
secrets:
DOCKER_USERNAME:
required: true
DOCKER_PASSWORD:
required: true
jobs:

prep-testbed:
runs-on: ubuntu-24.04

steps:
- uses: actions/checkout@v4

- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/[email protected]

- id: set-matrix
run: |
ALL_TESTS=`poetry run -- nox -s integration-test-list -- --test-set default`
echo "matrix=$ALL_TESTS" >> "$GITHUB_OUTPUT"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}

test-all:
needs: prep-testbed
strategy:
fail-fast: false
matrix:
python_version:
- "3.10"
test-path: ${{fromJson(needs.prep-testbed.outputs.matrix)}}
runs-on: ubuntu-24.04
name: ${{ matrix.test-path.name }}
steps:
- uses: actions/checkout@v4

- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/[email protected]
with:
python-version: '${{ matrix.python_version }}'

- name: Allow unprivileged user namespaces
run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0

- name: Run test ${{ matrix.test-path.name }}
run: poetry run -- python ${{ matrix.test-path.path }}
env: # Set the secret as an input
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWD: ${{ secrets.DOCKER_PASSWORD }}
51 changes: 10 additions & 41 deletions .github/workflows/slow-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,46 +27,15 @@ jobs:
- name: Tests
run: echo "Slow tests approved"

prep-testbed:
needs: slow-tests-approval
runs-on: ubuntu-24.04

steps:
- uses: actions/checkout@v4

- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/[email protected]

- id: set-matrix
run: |
ALL_TESTS=`poetry run -- nox -s integration-test-list`
echo "matrix=$ALL_TESTS" >> "$GITHUB_OUTPUT"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}

test-all:
needs: prep-testbed
strategy:
fail-fast: false
matrix:
python_version:
- "3.10"
test-path: ${{fromJson(needs.prep-testbed.outputs.matrix)}}
runs-on: ubuntu-24.04
name: ${{ matrix.test-path.name }}
steps:
- uses: actions/checkout@v4

- name: Setup Python & Poetry Environment
uses: exasol/python-toolbox/.github/actions/[email protected]
with:
python-version: '${{ matrix.python_version }}'

- name: Allow unprivileged user namespaces
run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
integration-tests:
needs: slow-tests-approval
name: Integrations
uses: ./.github/workflows/integration-tests.yml
secrets: inherit

- name: Run test ${{ matrix.test-path.name }}
run: poetry run -- python ${{ matrix.test-path.path }}
env: # Set the secret as an input
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWD: ${{ secrets.DOCKER_PASSWORD }}
gpu-tests:
needs: slow-tests-approval
name: Integrations
uses: ./.github/workflows/gpu-tests.yml
secrets: inherit
4 changes: 4 additions & 0 deletions doc/changes/unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,7 @@

## Refactorings
- #252: Update Github workflows from Exasol toolbox

## Features

- #283: Added GPU Support for `run-db-test`
7 changes: 7 additions & 0 deletions doc/user_guide/user_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ With this additional option, `exaslct` won't build and export the container agai
1. `exaslct` won't check if the given container file is compatible with the given flavor path. If this is not the case, the tests probably will fail.
2. As `exaslct` cannot check if the file was changed during different executions of `run-db-test` with options `--reuse-uploaded-container` or `--reuse-test-environment`, `exaslct` will always skip the upload if those options are set and the file already exists in BucketFS.

### Testing with an accelerator

If your test environment fulfills the requirements, you can run tests with an accelerator. Currently, only `nvidia` is supported:
```bash
exaslct run-db-test --flavor-path=flavors/<flavor-name> --accelerator nvidia
```
This will launch the underlying docker-db with the necessary options to enable the `NVIDIA` accelerator.

## Cleaning up after you are finished

Expand Down
9 changes: 9 additions & 0 deletions exasol/slc/api/run_db_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from exasol.slc.internal.tasks.test.test_container_content import (
build_test_container_content,
)
from exasol.slc.models.accelerator import Accelerator, defaultAccelerator
from exasol.slc.models.compression_strategy import (
CompressionStrategy,
defaultCompressionStrategy,
Expand Down Expand Up @@ -103,6 +104,7 @@ def run_db_test(
log_level: Optional[str] = None,
use_job_specific_log_file: bool = True,
compression_strategy: CompressionStrategy = defaultCompressionStrategy(),
accelerator: Accelerator = defaultAccelerator(),
) -> AllTestsResult:
"""
This command runs the integration tests in local docker-db.
Expand Down Expand Up @@ -155,6 +157,12 @@ def run_db_test(
if external_exasol_ssh_port is None:
raise api_errors.MissingArgumentError("external_exasol_ssh_port")

docker_runtime = None
if accelerator == Accelerator.NVIDA:
additional_db_parameter += ("-enableAcceleratorDeviceDetection=1",)
docker_runtime = "nvidia"
docker_environment_variable += ("NVIDIA_VISIBLE_DEVICES=all",)

def root_task_generator() -> DependencyLoggerBaseTask:
return generate_root_task(
task_class=TestContainer,
Expand Down Expand Up @@ -199,6 +207,7 @@ def root_task_generator() -> DependencyLoggerBaseTask:
additional_db_parameter=additional_db_parameter,
test_container_content=build_test_container_content(test_container_folder),
compression_strategy=compression_strategy,
docker_runtime=docker_runtime,
docker_environment_variables=docker_environment_variable,
)

Expand Down
19 changes: 19 additions & 0 deletions exasol/slc/models/accelerator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from enum import Enum
from typing import List


class Accelerator(Enum):
"""
This enum serves as a definition of values for possible accelerators for `run-db-tests`.
"""

NONE = "none"
NVIDA = "nvidia"


def defaultAccelerator() -> Accelerator:
return Accelerator.NONE


def acceleratorValues() -> List[str]:
return [a.value for a in Accelerator]
14 changes: 14 additions & 0 deletions exasol/slc/tool/commands/run_db_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@

from exasol.slc import api
from exasol.slc.api import api_errors
from exasol.slc.models.accelerator import (
Accelerator,
acceleratorValues,
defaultAccelerator,
)
from exasol.slc.models.compression_strategy import CompressionStrategy
from exasol.slc.tool.cli import cli
from exasol.slc.tool.options.export_options import export_options
Expand Down Expand Up @@ -149,6 +154,13 @@
@add_options(system_options)
@add_options(luigi_logging_options)
@add_options(export_options)
@click.option(
"--accelerator",
type=click.Choice(acceleratorValues()),
show_default=True,
default=defaultAccelerator().value,
help=f"""Accelerator to be enabled for tests in docker-db. Possible values: {acceleratorValues()}""",
)
def run_db_test(
flavor_path: Tuple[str, ...],
release_goal: Tuple[str, ...],
Expand Down Expand Up @@ -209,6 +221,7 @@ def run_db_test(
log_level: Optional[str],
use_job_specific_log_file: bool,
compression_strategy: str,
accelerator: str,
):
"""
This command runs the integration tests in local docker-db.
Expand Down Expand Up @@ -279,6 +292,7 @@ def run_db_test(
use_job_specific_log_file=use_job_specific_log_file,
compression_strategy=CompressionStrategy[compression_strategy.upper()],
docker_environment_variable=docker_environment_variable,
accelerator=Accelerator[accelerator.upper()],
)
if result.command_line_output_path.exists():
with result.command_line_output_path.open("r") as f:
Expand Down
31 changes: 30 additions & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import json
from argparse import ArgumentParser
from enum import Enum

import nox

Expand All @@ -11,11 +13,38 @@
from noxconfig import PROJECT_CONFIG


class TestSet(Enum):
GPU_ONLY = "gpu-only"
DEFAULT = "default"


@nox.session(name="integration-test-list", python=False)
def run_integration_test_list(session: nox.Session):
"""
Returns the test files under directory test (without walking subdirectories) as JSON list.
"""
test_set_values = [ts.value for ts in TestSet]
parser = ArgumentParser(
usage=f"nox -s {session.name} -- --test-set {{{','.join(test_set_values)}}}"
)
parser.add_argument(
"--test-set",
type=TestSet,
required=True,
help="Test set name",
)
args = parser.parse_args(session.posargs)
test_path = PROJECT_CONFIG.root / "test"
tests = [{"path": str(t), "name": t.stem} for t in test_path.glob("test_*.py")]
if args.test_set == TestSet.GPU_ONLY:
tests = [
{"path": str(t), "name": t.stem}
for t in test_path.glob("test_*.py")
if "gpu" in t.name
]
else:
tests = [
{"path": str(t), "name": t.stem}
for t in test_path.glob("test_*.py")
if "gpu" not in t.name
]
print(json.dumps(tests))
Loading
Loading