Skip to content

Commit

Permalink
fix(query): fix qualify display (#15537)
Browse files Browse the repository at this point in the history
* refactor(query): refactor some commits

* fix(query): fix qualify display

* fix(query): fix qualify display

* fix(query): fix qualify display
  • Loading branch information
sundy-li authored May 15, 2024
1 parent 26a9ae9 commit 18e00ea
Show file tree
Hide file tree
Showing 14 changed files with 522 additions and 40 deletions.
4 changes: 1 addition & 3 deletions .github/workflows/reuse.linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ jobs:
- uses: ./.github/actions/test_ee_standalone_linux
timeout-minutes: 10
env:
DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
QUERY_DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
- name: Upload failure
if: failure()
uses: ./.github/actions/artifact_failure
Expand All @@ -260,7 +260,6 @@ jobs:
- uses: ./.github/actions/test_ee_standalone_background_linux
timeout-minutes: 10
env:
DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
QUERY_DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
- name: Upload failure
if: failure()
Expand All @@ -282,7 +281,6 @@ jobs:
- uses: ./.github/actions/test_ee_management_mode_linux
timeout-minutes: 10
env:
DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
QUERY_DATABEND_ENTERPRISE_LICENSE: ${{ steps.license.outputs.license }}
- name: Upload failure
if: failure()
Expand Down
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion src/common/storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ pub use column_node::ColumnNodes;
mod parquet2;
pub use parquet2::infer_schema_with_extension;
pub use parquet2::read_parquet_metas_in_parallel;
pub use parquet2::read_parquet_schema_async;

pub mod parquet_rs;
pub use parquet_rs::read_metadata_async;
Expand Down
3 changes: 3 additions & 0 deletions src/query/ast/src/ast/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,9 @@ impl Display for SelectStmt {
write_comma_separated_list(f, windows)?;
}

if let Some(quailfy) = &self.qualify {
write!(f, " QUALIFY {quailfy}")?;
}
Ok(())
}
}
Expand Down
12 changes: 12 additions & 0 deletions src/query/ast/tests/it/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -787,6 +787,18 @@ fn test_statement() {
END;
$$
"#,
r#"
with
abc as (
select
id, uid, eid, match_id, created_at, updated_at
from (
select * from ddd.ccc where score > 0 limit 10
)
qualify row_number() over(partition by uid,eid order by updated_at desc) = 1
)
select * from abc;
"#,
];

for case in cases {
Expand Down
489 changes: 489 additions & 0 deletions src/query/ast/tests/it/testdata/statement.txt

Large diffs are not rendered by default.

22 changes: 7 additions & 15 deletions src/query/service/src/table_functions/infer_schema/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ use databend_common_pipeline_sources::AsyncSource;
use databend_common_pipeline_sources::AsyncSourcer;
use databend_common_sql::binder::resolve_file_location;
use databend_common_storage::init_stage_operator;
use databend_common_storage::read_parquet_schema_async;
use databend_common_storage::read_parquet_schema_async_rs;
use databend_common_storage::StageFilesInfo;
use opendal::Scheme;
Expand Down Expand Up @@ -128,22 +127,15 @@ impl AsyncSource for ParquetInferSchemaSource {
Some(f) => self.ctx.get_file_format(f).await?,
None => stage_info.file_format_params.clone(),
};
let use_parquet2 = self.ctx.get_settings().get_use_parquet2()?;
let schema = match file_format_params.get_type() {
StageFileFormatType::Parquet => {
if use_parquet2 {
let arrow_schema =
read_parquet_schema_async(&operator, &first_file.path).await?;
TableSchema::try_from(&arrow_schema)?
} else {
let arrow_schema = read_parquet_schema_async_rs(
&operator,
&first_file.path,
Some(first_file.size),
)
.await?;
TableSchema::try_from(&arrow_schema)?
}
let arrow_schema = read_parquet_schema_async_rs(
&operator,
&first_file.path,
Some(first_file.size),
)
.await?;
TableSchema::try_from(&arrow_schema)?
}
_ => {
return Err(ErrorCode::BadArguments(
Expand Down
1 change: 0 additions & 1 deletion src/query/storages/iceberg/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ publish = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
databend-common-arrow = { path = "../../../common/arrow" }
databend-common-base = { path = "../../../common/base" }
databend-common-catalog = { path = "../../catalog" }
databend-common-exception = { path = "../../../common/exception" }
Expand Down
13 changes: 1 addition & 12 deletions src/query/storages/iceberg/src/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ use std::sync::Arc;
use arrow_schema::Schema as ArrowSchema;
use async_trait::async_trait;
use chrono::Utc;
use databend_common_arrow::arrow::datatypes::Field as Arrow2Field;
use databend_common_arrow::arrow::datatypes::Schema as Arrow2Schema;
use databend_common_catalog::catalog::StorageDescription;
use databend_common_catalog::plan::DataSourcePlan;
use databend_common_catalog::plan::ParquetReadOptions;
Expand Down Expand Up @@ -120,16 +118,7 @@ impl IcebergTable {
.map_err(|e| {
ErrorCode::ReadTableDataError(format!("Cannot convert table metadata: {e:?}"))
})?;

// Build arrow2 schema from arrow schema.
let fields: Vec<Arrow2Field> = arrow_schema
.fields()
.into_iter()
.map(|f| f.into())
.collect();
let arrow2_schema = Arrow2Schema::from(fields);

TableSchema::try_from(&arrow2_schema)
TableSchema::try_from(&arrow_schema)
}

/// create a new table on the table directory
Expand Down
1 change: 1 addition & 0 deletions src/query/storages/parquet/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ databend-storages-common-table-meta = { path = "../common/table_meta" }

arrow-array = { workspace = true }
arrow-buffer = { workspace = true }
arrow-cast = { workspace = true }
arrow-schema = { workspace = true }
async-backtrace = { workspace = true }
async-trait = { workspace = true }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
use std::collections::HashMap;
use std::sync::Arc;

use databend_common_arrow::arrow::compute::cast::can_cast_types;
use databend_common_arrow::arrow::datatypes::Field as ArrowField;
use arrow_cast::can_cast_types;
use arrow_schema::Field;
use databend_common_catalog::plan::Projection;
use databend_common_catalog::plan::PushDownInfo;
use databend_common_catalog::table_context::TableContext;
Expand Down Expand Up @@ -106,8 +106,8 @@ impl RowGroupReaderForCopy {
if from_field.data_type == to_field.data_type {
expr
} else if can_cast_types(
ArrowField::from(from_field).data_type(),
ArrowField::from(to_field).data_type(),
Field::from(from_field).data_type(),
Field::from(to_field).data_type(),
) {
check_cast(
None,
Expand Down
2 changes: 1 addition & 1 deletion tests/suites/5_ee/00_check/00_0014_license_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@


def get_license():
return os.getenv("DATABEND_ENTERPRISE_LICENSE")
return os.getenv("QUERY_DATABEND_ENTERPRISE_LICENSE")


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion tests/suites/5_ee/01_vacuum/01_0000_ee_vacuum.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def insert_data(name):


def get_license():
return os.getenv("DATABEND_ENTERPRISE_LICENSE")
return os.getenv("QUERY_DATABEND_ENTERPRISE_LICENSE")


def compact_data(name):
Expand Down
2 changes: 1 addition & 1 deletion tests/suites/5_ee/02_data_mask/02_0000_data_mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


def get_license():
return os.getenv("DATABEND_ENTERPRISE_LICENSE")
return os.getenv("QUERY_DATABEND_ENTERPRISE_LICENSE")


# TODO: https://github.com/datafuselabs/databend/pull/15088
Expand Down

0 comments on commit 18e00ea

Please sign in to comment.