Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doctest/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ task startPrometheus(type: SpawnProcessTask) {

//evaluationDependsOn(':')
task startOpenSearch(type: SpawnProcessTask) {
dependsOn ':opensearch-sql-plugin:bundlePlugin'

if( getOSFamilyType() == "windows") {
command "${path}\\gradlew.bat -p ${plugin_path} runRestTestCluster"
}
Expand Down
29 changes: 23 additions & 6 deletions doctest/test_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import subprocess
import sys
import unittest
from concurrent.futures import ThreadPoolExecutor, as_completed
from functools import partial

import click
Expand Down Expand Up @@ -205,6 +206,7 @@ class TestDataManager:

def __init__(self):
self.client = OpenSearch([ENDPOINT], verify_certs=True)
self.is_loaded = False

def load_file(self, filename, index_name):
mapping_file_path = './test_mapping/' + filename
Expand All @@ -218,18 +220,33 @@ def load_json():
for line in f:
yield json.loads(line)

helpers.bulk(self.client, load_json(), stats_only=True, index=index_name, refresh='wait_for')
helpers.bulk(self.client, load_json(), stats_only=True, index=index_name, refresh="wait_for")

def load_all_test_data(self):
for index_name, filename in TEST_DATA.items():
if self.is_loaded:
return

def load_index(index_name, filename):
if filename is not None:
self.load_file(filename, index_name)
else:
debug(f"Skipping index '{index_name}' - filename is None")

def cleanup_indices(self):
indices_to_delete = list(TEST_DATA.keys())
self.client.indices.delete(index=indices_to_delete, ignore_unavailable=True)
with ThreadPoolExecutor() as executor:
futures = {
executor.submit(load_index, index_name, filename): index_name
for index_name, filename in TEST_DATA.items()
}

for future in as_completed(futures):
index_name = futures[future]
try:
future.result()
except Exception as e:
debug(f"Error loading index '{index_name}': {str(e)}")
raise

self.is_loaded = True


def sql_cli_transform(s):
Expand Down Expand Up @@ -282,7 +299,7 @@ def set_up_test_indices_without_calcite(test):


def tear_down(test):
get_test_data_manager().cleanup_indices()
pass
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the PR I was thinking about improving this as well. Have a question, will this lead to cascading test failures if the first test failed to load indexes?

Copy link
Collaborator Author

@Swiddis Swiddis Sep 3, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We run doctest with --fastfail so if the first test failed, the rest wouldn't even run. Same behavior as now, if any individual run has an index failure. (Here, the indexing exception would be raised via future.result())



docsuite = partial(doctest.DocFileSuite,
Expand Down
Loading