Skip to content

Commit 48c3263

Browse files
authored
fix: tpcds spill test fail (#17597)
* splite queries.test * fix * update * x * x * x * x * x * x * x * xx * x * x * x * x
1 parent 675e0f5 commit 48c3263

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

116 files changed

+18010
-17850
lines changed

.github/actions/test_sqllogic_cluster_linux/action.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,11 @@ inputs:
99
description: "logic test handlers, mysql,http,clickhouse"
1010
required: false
1111
default: ""
12+
parallel:
13+
description: "logic test parallel"
14+
required: false
15+
default: ""
16+
1217
runs:
1318
using: "composite"
1419
steps:
@@ -20,4 +25,6 @@ runs:
2025
shell: bash
2126
env:
2227
TEST_HANDLERS: ${{ inputs.handlers }}
28+
TEST_PARALLEL: ${{ inputs.parallel }}
29+
TEST_EXT_ARGS: '--skip_file tpcds_spill_1.test,tpcds_spill_2.test,tpcds_spill_3.test'
2330
run: bash ./scripts/ci/ci-run-sqllogic-tests-cluster.sh ${{ inputs.dirs }}

.github/actions/test_sqllogic_stage/action.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ inputs:
1313
description: "storage backend for stage, choices: s3,fs"
1414
required: true
1515
default: ""
16-
deducp:
16+
dedup:
1717
description: "path type for dedup when copy, choices: full_path,sub_path"
1818
required: true
1919
default: ""

.github/actions/test_sqllogic_standalone_linux/action.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,20 @@ runs:
2929
with:
3030
artifacts: sqllogictests,meta,query
3131

32+
- uses: actions/github-script@v7
33+
id: ext-args
34+
env:
35+
DIRS: ${{ inputs.dirs }}
36+
with:
37+
script: require('.github/actions/test_sqllogic_standalone_linux/script.js')(core)
38+
3239
- name: Run sqllogic Tests with Standalone mode
3340
if: inputs.storage-format == 'all' || inputs.storage-format == 'parquet'
3441
shell: bash
3542
env:
3643
TEST_HANDLERS: ${{ inputs.handlers }}
3744
TEST_PARALLEL: ${{ inputs.parallel }}
45+
TEST_EXT_ARGS: ${{ steps.ext-args.outputs.parquet }}
3846
CACHE_ENABLE_TABLE_META_CACHE: ${{ inputs.enable_table_meta_cache}}
3947
run: bash ./scripts/ci/ci-run-sqllogic-tests.sh ${{ inputs.dirs }}
4048

@@ -44,5 +52,6 @@ runs:
4452
env:
4553
TEST_HANDLERS: ${{ inputs.handlers }}
4654
TEST_PARALLEL: ${{ inputs.parallel }}
55+
TEST_EXT_ARGS: '--skip_file tpcds_spill_1.test,tpcds_spill_2.test,tpcds_spill_3.test'
4756
CACHE_ENABLE_TABLE_META_CACHE: ${{ inputs.enable_table_meta_cache}}
4857
run: bash ./scripts/ci/ci-run-sqllogic-tests-native.sh ${{ inputs.dirs }}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
module.exports = (core) => {
2+
switch (process.env.DIRS) {
3+
case 'tpcds':
4+
const parquet = [
5+
'--skip_file tpcds_spill_2.test,tpcds_spill_3.test',
6+
'--skip_file tpcds_spill_1.test,tpcds_spill_3.test',
7+
'--skip_file tpcds_spill_1.test,tpcds_spill_2.test',
8+
][Date.now() % 3];
9+
core.setOutput('parquet', parquet)
10+
return
11+
}
12+
}

.github/workflows/reuse.sqllogic.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ jobs:
5050
- { dirs: "crdb", runner: "2c8g" }
5151
- { dirs: "base", runner: "2c8g" }
5252
- { dirs: "ydb", runner: "2c8g" }
53-
- { dirs: "tpcds", runner: "2c8g" }
53+
- { dirs: "tpcds", runner: "4c16g", parallel: "1" }
5454
- { dirs: "tpch", runner: "2c8g" }
5555
- { dirs: "standalone", runner: "2c8g" }
5656
handler:
@@ -59,10 +59,11 @@ jobs:
5959
steps:
6060
- uses: actions/checkout@v4
6161
- uses: ./.github/actions/test_sqllogic_standalone_linux
62-
timeout-minutes: 15
62+
timeout-minutes: 20
6363
with:
6464
dirs: ${{ matrix.tests.dirs }}
6565
handlers: ${{ matrix.handler }}
66+
parallel: ${{ matrix.tests.parallel }}
6667
storage-format: all
6768
- name: Upload failure
6869
if: failure()
@@ -192,6 +193,7 @@ jobs:
192193
with:
193194
dirs: ${{ matrix.tests.dirs }}
194195
handlers: ${{ matrix.handler }}
196+
parallel: ${{ matrix.tests.parallel }}
195197
- name: Upload failure
196198
if: failure()
197199
uses: ./.github/actions/artifact_failure

scripts/ci/ci-run-sqllogic-tests-cluster.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,4 +22,4 @@ fi
2222
echo "Run suites using argument: $RUN_DIR"
2323

2424
echo "Starting databend-sqllogic tests"
25-
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --enable_sandbox --parallel ${TEST_PARALLEL} --skip_file tpcds_q64.test,tpcds_join_order.test
25+
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --enable_sandbox --parallel ${TEST_PARALLEL} ${TEST_EXT_ARGS}

scripts/ci/ci-run-sqllogic-tests-native.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@ fi
2020
echo "Run suites using argument: $RUN_DIR"
2121

2222
echo "Starting databend-sqllogic tests"
23-
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --skip_dir management,cluster,explain,tpch,ee --enable_sandbox --parallel ${TEST_PARALLEL}
23+
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --skip_dir management,cluster,explain,tpch,ee --enable_sandbox --parallel ${TEST_PARALLEL} ${TEST_EXT_ARGS}

scripts/ci/ci-run-sqllogic-tests.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,6 @@ echo "Run suites using argument: $RUN_DIR"
2121

2222
echo "Starting databend-sqllogic tests"
2323
if [ -z "$RUN_DIR" ]; then
24-
target/${BUILD_PROFILE}/databend-sqllogictests --run_dir temp_table --enable_sandbox --parallel ${TEST_PARALLEL}
24+
target/${BUILD_PROFILE}/databend-sqllogictests --run_dir temp_table --enable_sandbox --parallel ${TEST_PARALLEL} ${TEST_EXT_ARGS}
2525
fi
26-
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --skip_dir management,explain_native,ee,temp_table --enable_sandbox --parallel ${TEST_PARALLEL}
26+
target/${BUILD_PROFILE}/databend-sqllogictests --handlers ${TEST_HANDLERS} ${RUN_DIR} --skip_dir management,explain_native,ee,temp_table --enable_sandbox --parallel ${TEST_PARALLEL} ${TEST_EXT_ARGS}

src/query/service/src/pipelines/processors/transforms/hash_join/hash_join_probe_state.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -114,11 +114,14 @@ impl HashJoinProbeState {
114114
}
115115
let hash_key_types = probe_keys
116116
.iter()
117-
.map(|expr| {
118-
expr.as_expr(&BUILTIN_FUNCTIONS)
119-
.data_type()
120-
.remove_nullable()
121-
.clone()
117+
.zip(&hash_join_state.hash_join_desc.is_null_equal)
118+
.map(|(expr, is_null_equal)| {
119+
let expr = expr.as_expr(&BUILTIN_FUNCTIONS);
120+
if *is_null_equal {
121+
expr.data_type().clone()
122+
} else {
123+
expr.data_type().remove_nullable()
124+
}
122125
})
123126
.collect::<Vec<_>>();
124127
let method = DataBlock::choose_hash_method_with_types(&hash_key_types)?;

src/query/service/src/pipelines/processors/transforms/hash_join/hash_join_spiller.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ pub struct HashJoinSpiller {
4040
partition_buffer: PartitionBuffer,
4141
partition_threshold: usize,
4242
join_type: JoinType,
43+
join_state: Arc<HashJoinState>,
4344
is_build_side: bool,
4445
func_ctx: FunctionContext,
4546
/// Used for partition.
@@ -95,6 +96,7 @@ impl HashJoinSpiller {
9596
hash_keys,
9697
hash_method,
9798
join_type,
99+
join_state,
98100
func_ctx: ctx.get_function_context()?,
99101
is_build_side,
100102
next_restore_file: 0,
@@ -235,6 +237,7 @@ impl HashJoinSpiller {
235237
&self.hash_method,
236238
join_type,
237239
self.is_build_side,
240+
&self.join_state.hash_join_desc.is_null_equal,
238241
&mut hashes,
239242
)?;
240243
Ok(hashes)

0 commit comments

Comments
 (0)