-
Notifications
You must be signed in to change notification settings - Fork 16.4k
fix(chart): Supporting custom SQL as temporal x-axis column with filter #25126
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 11 commits
5b62640
8bc5c72
a191583
cf8c0ba
f65d707
08a50ce
a76ed47
d16ffbb
16f941d
91788c1
bb52a1a
5c79619
64e01b6
672eea4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -51,6 +51,7 @@ | |
| from superset.superset_typing import AdhocColumn | ||
| from superset.utils.core import ( | ||
| AnnotationType, | ||
| backend, | ||
| get_example_default_schema, | ||
| AdhocMetricExpressionType, | ||
| ExtraFiltersReasonType, | ||
|
|
@@ -943,6 +944,71 @@ def test_chart_data_get(self): | |
| assert data["result"][0]["status"] == "success" | ||
| assert data["result"][0]["rowcount"] == 2 | ||
|
|
||
| @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") | ||
| def test_chart_data_get_with_x_axis_using_custom_sql(self): | ||
| """ | ||
| Chart data API: Test GET endpoint | ||
| """ | ||
| chart = db.session.query(Slice).filter_by(slice_name="Genders").one() | ||
| chart.query_context = json.dumps( | ||
| { | ||
| "datasource": {"id": chart.table.id, "type": "table"}, | ||
| "force": False, | ||
| "queries": [ | ||
| { | ||
| "time_range": "1900-01-01T00:00:00 : 2000-01-01T00:00:00", | ||
| "granularity": "ds", | ||
| "filters": [ | ||
| {"col": "ds", "op": "TEMPORAL_RANGE", "val": "No filter"} | ||
| ], | ||
| "extras": { | ||
| "having": "", | ||
| "where": "", | ||
| }, | ||
| "applied_time_extras": {}, | ||
| "columns": [ | ||
| { | ||
| "columnType": "BASE_AXIS", | ||
| "datasourceWarning": False, | ||
| "expressionType": "SQL", | ||
| "label": "My column", | ||
| "sqlExpression": "ds", | ||
| "timeGrain": "P1W", | ||
| } | ||
| ], | ||
|
Comment on lines
+960
to
+978
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Basically, the "granularity" and "Axis-column" shouldn't be concurrent sent from frontend. the "granularity" is a legacy Druid "concept"(and this concept as be wrapped under the hood in "modern" Druid), so from my original design would like to totally remove "granularity" in Superset, the "granularity" means that a temporal column in columns(or dimensions).
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the "time_range" and "filters" in query_object is same logic as before. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
I'm trying to fix a bug related to granularity. Basically, the temporal granularity column should be renamed as "__timestamp" like what the legacy time series line chart did before(alias after group by should not be the same with the raw column name in clickhouse) but current chart don't, so granularity is a little different from other temporal columns(at least for users). If the "granularity" should be removed, should this bug be fixed on the frontend? it seems like the time column as x-axis has been completely unusable for a very long time. thanks to this PR made custom SQL work |
||
| "metrics": ["sum__num"], | ||
| "orderby": [["sum__num", False]], | ||
| "annotation_layers": [], | ||
| "row_limit": 50000, | ||
| "timeseries_limit": 0, | ||
| "order_desc": True, | ||
| "url_params": {}, | ||
| "custom_params": {}, | ||
| "custom_form_data": {}, | ||
| } | ||
| ], | ||
| "form_data": { | ||
| "x_axis": { | ||
| "datasourceWarning": False, | ||
| "expressionType": "SQL", | ||
| "label": "My column", | ||
| "sqlExpression": "ds", | ||
| } | ||
| }, | ||
| "result_format": "json", | ||
| "result_type": "full", | ||
| } | ||
| ) | ||
| rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") | ||
| assert rv.mimetype == "application/json" | ||
| data = json.loads(rv.data.decode("utf-8")) | ||
| assert data["result"][0]["status"] == "success" | ||
|
|
||
| if backend() == "presto": | ||
| assert data["result"][0]["rowcount"] == 41 | ||
| else: | ||
| assert data["result"][0]["rowcount"] == 40 | ||
|
|
||
| @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") | ||
| def test_chart_data_get_forced(self): | ||
| """ | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.