diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 304fd0ebb6bc..a83a59d4e73e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,11 +16,21 @@ # repos: - repo: https://github.com/ambv/black - rev: stable + rev: 19.3b0 hooks: - id: black language_version: python3 +- repo: https://github.com/asottile/seed-isort-config + rev: v1.9.3 + hooks: + - id: seed-isort-config + +- repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.21 + hooks: + - id: isort + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.2.3 hooks: @@ -30,8 +40,3 @@ repos: - id: check-added-large-files - id: check-yaml - id: debug-statements - -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.7.1 - hooks: - - id: flake8 diff --git a/.travis.yml b/.travis.yml index ec11b25b4b52..000d2129deec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -75,7 +75,10 @@ jobs: env: TOXENV=black - language: python python: 3.6 - env: TOXENV=flake8 + env: TOXENV=isort + - language: python + python: 3.6 + env: TOXENV=mypy - language: python python: 3.6 env: TOXENV=py36-sqlite diff --git a/CHANGELOG.md b/CHANGELOG.md index cbddcb669673..0a194ad477bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,142 @@ under the License. --> ## Change Log +### 0.35.0 (2019/10/31 11:12 +00:00) +- [#8436](https://github.com/apache/incubator-superset/pull/8436) [fix] Updating parse_human_timedelta typing (#8436) (@john-bodley) +- [#8423](https://github.com/apache/incubator-superset/pull/8423) chore: disable another flaky cypress test (#8423) (@mistercrunch) +- [#8331](https://github.com/apache/incubator-superset/pull/8331) Add improved typed casting to BigQuery (#8331) (@betodealmeida) +- [#8353](https://github.com/apache/incubator-superset/pull/8353) [tests] Fix, removing deprecated function for future Python release compatibility. (#8353) (@MarcusSorealheis) +- [#8413](https://github.com/apache/incubator-superset/pull/8413) fixed typo in installation instructions (#8413) (@francishemingway) +- [#8409](https://github.com/apache/incubator-superset/pull/8409) [ci] Deprecate flake8 (#8409) (@john-bodley) +- [#7702](https://github.com/apache/incubator-superset/pull/7702) [sqla] Enforcing ISO 8601 date/timestamp formats (#7702) (@john-bodley) +- [#8370](https://github.com/apache/incubator-superset/pull/8370) [Config] Cache static resources (#8370) (@etr2460) +- [#8367](https://github.com/apache/incubator-superset/pull/8367) [docs] CTAS on PostgreSQL needs commit to apply (#8367) (@dpgaspar) +- [#8399](https://github.com/apache/incubator-superset/pull/8399) Update Slack invitation link (#8399) (@robdiciuccio) +- [#8371](https://github.com/apache/incubator-superset/pull/8371) [fix] make names non-nullable (#8371) (@serenajiang) +- [#8390](https://github.com/apache/incubator-superset/pull/8390) [form-data] Deleting obsolete Superset.get_form_data method (#8390) (@john-bodley) +- [#8376](https://github.com/apache/incubator-superset/pull/8376) Add 0.34.1 PRs to CHANGELOG (#8376) (@villebro) +- [#8372](https://github.com/apache/incubator-superset/pull/8372) [migration] Migrating since/until to time_range (#8372) (@john-bodley) +- [#8374](https://github.com/apache/incubator-superset/pull/8374) [Cypress] Skip another flaky sqllab test (#8374) (@etr2460) +- [#8369](https://github.com/apache/incubator-superset/pull/8369) Add incubating to NOTICE (#8369) (@villebro) +- [#8368](https://github.com/apache/incubator-superset/pull/8368) [Docs] Put superset users in abc order (#8368) (@etr2460) +- [#8365](https://github.com/apache/incubator-superset/pull/8365) Update README.md (#8365) (@christophlingg) +- [#8332](https://github.com/apache/incubator-superset/pull/8332) Revert "[fix] make datasource names non-nullable (#8332)" (#8363) (@serenajiang) +- [#8345](https://github.com/apache/incubator-superset/pull/8345) [dashboard] show less filter indicators (#8345) (@graceguo-supercat) +- [#8330](https://github.com/apache/incubator-superset/pull/8330) Fix pypi link and replace gitter badge with slack (#8330) (@villebro) +- [#7992](https://github.com/apache/incubator-superset/pull/7992) Revert "[init] Setting up cache before registering blueprints (#7992)" (#8005) (@john-bodley) +- [#8343](https://github.com/apache/incubator-superset/pull/8343) Add support for Exasol (#8343) (@kevang) +- [#8351](https://github.com/apache/incubator-superset/pull/8351) fix saving new sql lab queries (#8351) (@suddjian) +- [#8350](https://github.com/apache/incubator-superset/pull/8350) Remove unused methods from Presto (#8350) (@betodealmeida) +- [#8338](https://github.com/apache/incubator-superset/pull/8338) Fix lint in `superset/db_engine_spec` (#8338) (@willbarrett) +- [#8332](https://github.com/apache/incubator-superset/pull/8332) [fix] make datasource names non-nullable (#8332) (@serenajiang) +- [#8344](https://github.com/apache/incubator-superset/pull/8344) Add Fanatics to companies that use Superset (#8344) (@coderfender) +- [#8347](https://github.com/apache/incubator-superset/pull/8347) [docs] Add Deepomatic in organization list (#8347) (@Zanoellia) +- [#8296](https://github.com/apache/incubator-superset/pull/8296) feat: Set moment library language during app setup (#8296) (@oskarhinc) +- [#8299](https://github.com/apache/incubator-superset/pull/8299) fix: npm audit fix bumps handlebars 4.1.2->4.3.1 (#8299) (@mistercrunch) +- [#8175](https://github.com/apache/incubator-superset/pull/8175) [metric] Adding security for restricted metrics (#8175) (@john-bodley) +- [#8304](https://github.com/apache/incubator-superset/pull/8304) [Presto] Handle uncaught exception in get_create_view (#8304) (@etr2460) +- [#8305](https://github.com/apache/incubator-superset/pull/8305) [SQL Lab] Improve autocomplete (#8305) (@etr2460) +- [#8298](https://github.com/apache/incubator-superset/pull/8298) Allow overwriting a SQLLab query that has previously been saved (#8298) (@suddjian) +- [#8300](https://github.com/apache/incubator-superset/pull/8300) refactor: remove unused code (#8300) (@kristw) +- [#8320](https://github.com/apache/incubator-superset/pull/8320) Update RunQueryActionButton.jsx (#8320) (@nabinkhadka) +- [#8317](https://github.com/apache/incubator-superset/pull/8317) Adding explicit Flask session cookie options to default config (#8317) (@craig-rueda) +- [#8239](https://github.com/apache/incubator-superset/pull/8239) [superset] Fix, missing return on error and improved query (#8239) (@dpgaspar) +- [#8307](https://github.com/apache/incubator-superset/pull/8307) allow non-relative LOGO_TARGET_PATH (#8307) (@nytai) +- [#8311](https://github.com/apache/incubator-superset/pull/8311) Add safaricom to companies using superset (#8311) (@mmutiso) +- [#8309](https://github.com/apache/incubator-superset/pull/8309) Add explicit support for python 3.7 (#8309) (@villebro) +- [#8272](https://github.com/apache/incubator-superset/pull/8272) Refine release instructions (#8272) (@villebro) +- [#8262](https://github.com/apache/incubator-superset/pull/8262) Add commit to attempt to resolve query table lock (#8262) (@etr2460) +- [#8282](https://github.com/apache/incubator-superset/pull/8282) chore: skip flaky SQL Lab test (#8282) (@mistercrunch) +- [#8261](https://github.com/apache/incubator-superset/pull/8261) [migration] Stripping leading and trailing whitespace (#8261) (@john-bodley) +- [#8256](https://github.com/apache/incubator-superset/pull/8256) [sql_json] Ensuring the request body is JSON encoded (#8256) (@john-bodley) +- [#8240](https://github.com/apache/incubator-superset/pull/8240) [database] Improve form and API validation for invalid URI (#8240) (@dpgaspar) +- [#8274](https://github.com/apache/incubator-superset/pull/8274) More intuitive display of negative time deltas (#8274) (#8276) (@benvdh) +- [#8268](https://github.com/apache/incubator-superset/pull/8268) Fix no data in Presto (#8268) (@betodealmeida) +- [#8258](https://github.com/apache/incubator-superset/pull/8258) refactor: use deck.gl plugins from npm package (#8258) (@kristw) +- [#8138](https://github.com/apache/incubator-superset/pull/8138) [typing] add typing for superset/connectors and superset/common (#8138) (@serenajiang) +- [#8163](https://github.com/apache/incubator-superset/pull/8163) [SQLLab] Refactor sql json endpoint (#8163) (@dpgaspar) +- [#8257](https://github.com/apache/incubator-superset/pull/8257) Remove __row_id (#8257) (@betodealmeida) +- [#8250](https://github.com/apache/incubator-superset/pull/8250) Update another pypi case from superset to apache-superset (#8250) (@etr2460) +- [#8244](https://github.com/apache/incubator-superset/pull/8244) docs: reflect the pypi move from superset to apache-superset (#8244) (@mistercrunch) +- [#8224](https://github.com/apache/incubator-superset/pull/8224) [sqllab] create query slightly earlier (#8224) (@serenajiang) +- [#8253](https://github.com/apache/incubator-superset/pull/8253) Fix array casting (#8253) (@betodealmeida) +- [#8243](https://github.com/apache/incubator-superset/pull/8243) Show Presto views as views, not tables (#8243) (@betodealmeida) +- [#8251](https://github.com/apache/incubator-superset/pull/8251) Small fix for Presto dtype map (#8251) (@betodealmeida) +- [#8246](https://github.com/apache/incubator-superset/pull/8246) Update README.md (#8246) (@MaiTiano) +- [#8185](https://github.com/apache/incubator-superset/pull/8185) Add RegisteredLookupExtraction support to extraction function (#8185) (@1AB9502) +- [#8213](https://github.com/apache/incubator-superset/pull/8213) Better distinction between tables and views, and show CREATE VIEW (#8213) (@betodealmeida) +- [#8233](https://github.com/apache/incubator-superset/pull/8233) Simplify and improve `expand_data` in Presto (#8233) (@betodealmeida) +- [#8172](https://github.com/apache/incubator-superset/pull/8172) Allow users to estimate query cost before executing it (#8172) (@betodealmeida) +- [#8226](https://github.com/apache/incubator-superset/pull/8226) Handle int64 columns with missing data in SQL Lab (#8226) (@betodealmeida) +- [#8222](https://github.com/apache/incubator-superset/pull/8222) refactor: prepare control panel configs for separation into plugins (#8222) (@kristw) +- [#8221](https://github.com/apache/incubator-superset/pull/8221) fix: initialize control state for inline control config object (#8221) (@kristw) +- [#8214](https://github.com/apache/incubator-superset/pull/8214) [talisman] Enforcing HTTP for status checks (#8214) (@john-bodley) +- [#8173](https://github.com/apache/incubator-superset/pull/8173) feat: read control panel configs from registry (#8173) (@kristw) +- [#8196](https://github.com/apache/incubator-superset/pull/8196) chore: fix version info (#8196) (@mistercrunch) +- [#8212](https://github.com/apache/incubator-superset/pull/8212) fix: attempt to fix the flaky SQL Lab test (#8212) (@mistercrunch) +- [#8210](https://github.com/apache/incubator-superset/pull/8210) Update messages.json (#8210) (@klxiang) +- [#8166](https://github.com/apache/incubator-superset/pull/8166) [feature][dashboard] Show/hide filter indicator on the applicable charts when filter options are open/close (#8166) (@graceguo-supercat) +- [#8206](https://github.com/apache/incubator-superset/pull/8206) Update README.md (#8206) (@garnservo) +- [#8208](https://github.com/apache/incubator-superset/pull/8208) chore: update UPDATING.md with a 0.34.0 section (#8208) (@mistercrunch) +- [#8034](https://github.com/apache/incubator-superset/pull/8034) [viz] Adding get_def typing (#8034) (@john-bodley) +- [#8197](https://github.com/apache/incubator-superset/pull/8197) chore: deprecate restricted metrics (#8197) (@mistercrunch) +- [#8198](https://github.com/apache/incubator-superset/pull/8198) Update messages.json (#8198) (@klxiang) +- [#8204](https://github.com/apache/incubator-superset/pull/8204) change log from csv_export to data_export; fix db name in log (#8204) (@DiggidyDave) +- [#8164](https://github.com/apache/incubator-superset/pull/8164) more detailed async error messages (#8164) (@serenajiang) +- [#8182](https://github.com/apache/incubator-superset/pull/8182) [Config] Disable celery acks late (#8182) (@etr2460) +- [#8038](https://github.com/apache/incubator-superset/pull/8038) Prevent 'main' database connection creation (#8038) (@mistercrunch) +- [#8187](https://github.com/apache/incubator-superset/pull/8187) ignore formatting for ` + + + ); + } + + renderEditingFiltersName() { + const { dashboardFilters } = this.props; + const { activeFilterField, checkedFilterFields } = this.state; + const currentFilterLabels = [] + .concat(activeFilterField || checkedFilterFields) + .map(key => { + const { chartId, column } = getChartIdAndColumnFromFilterKey(key); + return dashboardFilters[chartId].labels[column] || column; + }); + + return ( +
+ {currentFilterLabels.length === 0 && t('No filter is selected.')} + {currentFilterLabels.length === 1 && t('Editing 1 filter:')} + {currentFilterLabels.length > 1 && + t('Batch editing %d filters:', currentFilterLabels.length)} + + {currentFilterLabels.join(', ')} + +
+ ); + } + + render() { + const { showSelector } = this.state; + + return ( + +
+
+

{t('Configure filter scopes')}

+ {this.renderEditingFiltersName()} +
+ + {!showSelector ? ( +
{t('There are no filters in this dashboard.')}
+ ) : ( +
+
+ {this.renderFilterFieldList()} +
+
+ {this.renderFilterScopeTree()} +
+
+ )} +
+
+ + {showSelector && ( + + )} +
+
+ ); + } +} + +FilterScopeSelector.propTypes = propTypes; diff --git a/superset/assets/src/dashboard/components/filterscope/FilterScopeTree.jsx b/superset/assets/src/dashboard/components/filterscope/FilterScopeTree.jsx new file mode 100644 index 000000000000..e433f15bc104 --- /dev/null +++ b/superset/assets/src/dashboard/components/filterscope/FilterScopeTree.jsx @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import PropTypes from 'prop-types'; +import CheckboxTree from 'react-checkbox-tree'; +import 'react-checkbox-tree/lib/react-checkbox-tree.css'; +import { t } from '@superset-ui/translation'; + +import { + CheckboxChecked, + CheckboxUnchecked, + CheckboxHalfChecked, +} from '../../../components/CheckboxIcons'; +import renderFilterScopeTreeNodes from './renderFilterScopeTreeNodes'; +import { filterScopeSelectorTreeNodePropShape } from '../../util/propShapes'; + +const propTypes = { + nodes: PropTypes.arrayOf(filterScopeSelectorTreeNodePropShape).isRequired, + checked: PropTypes.arrayOf( + PropTypes.oneOfType([PropTypes.number, PropTypes.string]), + ).isRequired, + expanded: PropTypes.arrayOf( + PropTypes.oneOfType([PropTypes.number, PropTypes.string]), + ).isRequired, + onCheck: PropTypes.func.isRequired, + onExpand: PropTypes.func.isRequired, + selectedChartId: PropTypes.oneOfType([null, PropTypes.number]), +}; + +const defaultProps = { + selectedChartId: null, +}; + +const NOOP = () => {}; + +const FILTER_SCOPE_CHECKBOX_TREE_ICONS = { + check: , + uncheck: , + halfCheck: , + expandClose: , + expandOpen: , + expandAll: ( + {t('Expand all')} + ), + collapseAll: ( + {t('Collapse all')} + ), + parentClose: , + parentOpen: , + leaf: , +}; + +export default function FilterScopeTree({ + nodes = [], + checked = [], + expanded = [], + onCheck, + onExpand, + selectedChartId, +}) { + return ( + + ); +} + +FilterScopeTree.propTypes = propTypes; +FilterScopeTree.defaultProps = defaultProps; diff --git a/superset/assets/src/dashboard/components/filterscope/renderFilterFieldTreeNodes.jsx b/superset/assets/src/dashboard/components/filterscope/renderFilterFieldTreeNodes.jsx new file mode 100644 index 000000000000..5ffd49e80ad5 --- /dev/null +++ b/superset/assets/src/dashboard/components/filterscope/renderFilterFieldTreeNodes.jsx @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; + +import FilterFieldItem from './FilterFieldItem'; +import { getFilterColorMap } from '../../util/dashboardFiltersColorMap'; + +export default function renderFilterFieldTreeNodes({ nodes, activeKey }) { + if (!nodes) { + return []; + } + + const root = nodes[0]; + const allFilterNodes = root.children; + const children = allFilterNodes.map(node => ({ + ...node, + children: node.children.map(child => { + const { label, value } = child; + const colorCode = getFilterColorMap()[value]; + return { + ...child, + label: ( + + ), + }; + }), + })); + + return [ + { + ...root, + children, + }, + ]; +} diff --git a/superset/assets/src/dashboard/components/filterscope/renderFilterScopeTreeNodes.jsx b/superset/assets/src/dashboard/components/filterscope/renderFilterScopeTreeNodes.jsx new file mode 100644 index 000000000000..f4a85cf039ed --- /dev/null +++ b/superset/assets/src/dashboard/components/filterscope/renderFilterScopeTreeNodes.jsx @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import cx from 'classnames'; + +import ChartIcon from '../../../components/ChartIcon'; +import { CHART_TYPE } from '../../util/componentTypes'; + +function traverse({ currentNode = {}, selectedChartId }) { + if (!currentNode) { + return null; + } + + const { label, value, type, children } = currentNode; + if (children && children.length) { + const updatedChildren = children.map(child => + traverse({ currentNode: child, selectedChartId }), + ); + return { + ...currentNode, + label: ( + + {type === CHART_TYPE && ( + + + + )} + {label} + + ), + children: updatedChildren, + }; + } + return { + ...currentNode, + label: ( + + {label} + + ), + }; +} + +export default function renderFilterScopeTreeNodes({ nodes, selectedChartId }) { + if (!nodes) { + return []; + } + + return nodes.map(node => traverse({ currentNode: node, selectedChartId })); +} diff --git a/superset/assets/src/dashboard/containers/FilterScope.jsx b/superset/assets/src/dashboard/containers/FilterScope.jsx new file mode 100644 index 000000000000..f88d66594e61 --- /dev/null +++ b/superset/assets/src/dashboard/containers/FilterScope.jsx @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { connect } from 'react-redux'; +import { bindActionCreators } from 'redux'; + +import { setDirectPathToChild } from '../actions/dashboardState'; +import FilterScopeSelector from '../components/filterscope/FilterScopeSelector'; + +function mapStateToProps({ dashboardLayout, dashboardFilters, dashboardInfo }) { + return { + dashboardFilters, + filterImmuneSlices: dashboardInfo.metadata.filterImmuneSlices || [], + filterImmuneSliceFields: + dashboardInfo.metadata.filterImmuneSliceFields || {}, + layout: dashboardLayout.present, + }; +} + +function mapDispatchToProps(dispatch) { + return bindActionCreators( + { + setDirectPathToChild, + }, + dispatch, + ); +} + +export default connect( + mapStateToProps, + mapDispatchToProps, +)(FilterScopeSelector); diff --git a/superset/assets/src/dashboard/reducers/dashboardFilters.js b/superset/assets/src/dashboard/reducers/dashboardFilters.js index 7cd7c8987e4b..4c1f14ddd12a 100644 --- a/superset/assets/src/dashboard/reducers/dashboardFilters.js +++ b/superset/assets/src/dashboard/reducers/dashboardFilters.js @@ -17,7 +17,6 @@ * under the License. */ /* eslint-disable camelcase */ -import { DASHBOARD_ROOT_ID } from '../util/constants'; import { ADD_FILTER, REMOVE_FILTER, @@ -28,15 +27,23 @@ import { TIME_RANGE } from '../../visualizations/FilterBox/FilterBox'; import getFilterConfigsFromFormdata from '../util/getFilterConfigsFromFormdata'; import { buildFilterColorMap } from '../util/dashboardFiltersColorMap'; import { buildActiveFilters } from '../util/activeDashboardFilters'; +import { DASHBOARD_ROOT_ID } from '../util/constants'; + +export const DASHBOARD_FILTER_SCOPE_GLOBAL = { + scope: [DASHBOARD_ROOT_ID], + immune: [], +}; export const dashboardFilter = { chartId: 0, - componentId: '', + componentId: null, + filterName: null, directPathToFilter: [], - scope: DASHBOARD_ROOT_ID, isDateFilter: false, isInstantFilter: true, columns: {}, + labels: {}, + scopes: {}, }; export default function dashboardFiltersReducer(dashboardFilters = {}, action) { @@ -44,6 +51,13 @@ export default function dashboardFiltersReducer(dashboardFilters = {}, action) { [ADD_FILTER]() { const { chartId, component, form_data } = action; const { columns, labels } = getFilterConfigsFromFormdata(form_data); + const scopes = Object.keys(columns).reduce( + (map, column) => ({ + ...map, + [column]: DASHBOARD_FILTER_SCOPE_GLOBAL, + }), + {}, + ); const directPathToFilter = component ? (component.parents || []).slice().concat(component.id) : []; @@ -52,9 +66,11 @@ export default function dashboardFiltersReducer(dashboardFilters = {}, action) { ...dashboardFilter, chartId, componentId: component.id, + filterName: component.meta.sliceName, directPathToFilter, columns, labels, + scopes, isInstantFilter: !!form_data.instant_filtering, isDateFilter: Object.keys(columns).includes(TIME_RANGE), }; diff --git a/superset/assets/src/dashboard/reducers/getInitialState.js b/superset/assets/src/dashboard/reducers/getInitialState.js index 85a62e383983..185a8b0428fb 100644 --- a/superset/assets/src/dashboard/reducers/getInitialState.js +++ b/superset/assets/src/dashboard/reducers/getInitialState.js @@ -180,6 +180,7 @@ export default function(bootstrapData) { ...dashboardFilter, chartId: key, componentId, + filterName: slice.slice_name, directPathToFilter, columns, labels, @@ -187,8 +188,6 @@ export default function(bootstrapData) { isDateFilter: Object.keys(columns).includes(TIME_RANGE), }; } - buildActiveFilters(dashboardFilters); - buildFilterColorMap(dashboardFilters); } // sync layout names with current slice names in case a slice was edited @@ -199,6 +198,8 @@ export default function(bootstrapData) { layout[layoutId].meta.sliceName = slice.slice_name; } }); + buildActiveFilters(dashboardFilters); + buildFilterColorMap(dashboardFilters); // store the header as a layout component so we can undo/redo changes layout[DASHBOARD_HEADER_ID] = { diff --git a/superset/assets/src/dashboard/stylesheets/dashboard.less b/superset/assets/src/dashboard/stylesheets/dashboard.less index c37d5e593761..cdf11f748884 100644 --- a/superset/assets/src/dashboard/stylesheets/dashboard.less +++ b/superset/assets/src/dashboard/stylesheets/dashboard.less @@ -165,19 +165,26 @@ body { padding: 24px 24px 29px 24px; } - .delete-modal-actions-container { + .modal-dialog.filter-scope-modal { + width: 80%; + } + + .dashboard-modal-actions-container { margin-top: 24px; + text-align: right; .btn { margin-right: 16px; &:last-child { margin-right: 0; } + } + } - &.btn-primary { - background: @pink !important; - border-color: @pink !important; - } + .dashboard-modal.delete { + .btn.btn-primary { + background: @pink; + border-color: @pink; } } } diff --git a/superset/assets/src/dashboard/stylesheets/filter-scope-selector.less b/superset/assets/src/dashboard/stylesheets/filter-scope-selector.less new file mode 100644 index 000000000000..dd19a66fadde --- /dev/null +++ b/superset/assets/src/dashboard/stylesheets/filter-scope-selector.less @@ -0,0 +1,241 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +@import "../../../stylesheets/less/cosmo/variables.less"; + +.filter-scope-container { + font-size: 14px; + + .nav.nav-tabs { + border: none; + } +} + +.filter-scope-header { + h4 { + margin-top: 0; + } + + .selected-fields { + margin: 12px 0 16px; + visibility: hidden; + + &.multi-edit-mode { + visibility: visible; + } + + .selected-scopes { + padding-left: 5px; + } + } +} + +.filters-scope-selector { + margin: 10px -24px 20px; + display: flex; + flex-direction: row; + position: relative; + border: 1px solid #ccc; + border-left: none; + border-right: none; + + a, a:active, a:hover { + color: @almost-black; + text-decoration: none; + } + + .react-checkbox-tree .rct-icon.rct-icon-expand-all, + .react-checkbox-tree .rct-icon.rct-icon-collapse-all { + font-size: 13px; + font-family: @font-family-sans-serif; + color: @brand-primary; + + &::before { + content: ''; + } + + &:hover { + text-decoration: underline; + } + + &:focus { + outline: none; + } + } + + .filter-field-pane { + position: relative; + width: 40%; + padding: 16px 16px 16px 24px; + border-right: 1px solid #ccc; + + .filter-container { + label { + font-weight: normal; + margin: 0 0 0 16px; + } + } + + .filter-field-item { + height: 35px; + display: flex; + align-items: center; + padding: 0 24px; + margin-left: -24px; + + &.is-selected { + border: 1px solid #aaa; + border-radius: 4px; + background-color: #eee; + margin-left: -25px; + } + } + + .react-checkbox-tree { + .rct-text { + height: 40px; + } + } + } + + .filter-scope-pane { + position: relative; + flex: 1; + padding: 16px 24px 16px 16px; + } + + .react-checkbox-tree { + flex-direction: column; + color: @almost-black; + font-size: 14px; + + .filter-scope-type { + padding: 8px 0; + display: block; + + .type-indicator { + position: relative; + top: 3px; + margin-right: 8px; + } + + &.chart { + font-weight: normal; + } + + &.selected-filter { + padding-left: 28px; + position: relative; + color: #aaa; + + &::before { + content: " "; + position: absolute; + left: 0; + top: 50%; + width: 18px; + height: 18px; + border-radius: 2px; + margin-top: -9px; + box-shadow: inset 0 0 0 2px #ccc; + background: #f2f2f2; + } + } + + &.root { + font-weight: 700; + } + + &.tab { + font-weight: 700; + } + } + + .rct-checkbox { + svg { + position: relative; + top: 3px; + width: 18px; + } + } + + .rct-node-leaf { + .rct-bare-label { + &::before { + padding-left: 5px; + } + } + } + + .rct-options { + text-align: left; + margin-left: 0; + margin-bottom: 8px; + } + + .rct-text { + margin: 0; + display: flex; + } + + .rct-title { + display: block; + font-weight: bold; + } + + // disable style from react-checkbox-trees.css + .rct-node-clickable:hover, + .rct-node-clickable:focus, + label:hover, + label:active { + background: none !important; + } + } + + .multi-edit-mode { + &.filter-scope-pane { + .rct-node.rct-node-leaf .filter-scope-type.filter_box { + display: none; + } + } + + .filter-field-item { + padding: 0 16px 0 50px; + margin-left: -50px; + + &.is-selected { + margin-left: -51px; + } + } + } + + .scope-search { + position: absolute; + right: 16px; + top: 16px; + border-radius: 4px; + border: 1px solid #ccc; + padding: 4px 8px 4px 8px; + font-size: 13px; + outline: none; + + &:focus { + border: 1px solid @brand-primary; + } + } +} diff --git a/superset/assets/src/dashboard/stylesheets/index.less b/superset/assets/src/dashboard/stylesheets/index.less index 01a0e3cb2eb0..8ebce2555b47 100644 --- a/superset/assets/src/dashboard/stylesheets/index.less +++ b/superset/assets/src/dashboard/stylesheets/index.less @@ -23,6 +23,7 @@ @import './buttons.less'; @import './dashboard.less'; @import './dnd.less'; +@import './filter-scope-selector.less'; @import './filter-indicator.less'; @import './filter-indicator-tooltip.less'; @import './grid.less'; diff --git a/superset/assets/src/dashboard/util/activeDashboardFilters.js b/superset/assets/src/dashboard/util/activeDashboardFilters.js index 8c70577d05ba..8fa00d02f1a3 100644 --- a/superset/assets/src/dashboard/util/activeDashboardFilters.js +++ b/superset/assets/src/dashboard/util/activeDashboardFilters.js @@ -17,14 +17,31 @@ * under the License. */ let activeFilters = {}; +let allFilterBoxChartIds = []; export function getActiveFilters() { return activeFilters; } +// currently filterbox is a chart, +// when define filter scopes, they have to be out pulled out in a few places. +// after we make filterbox a dashboard build-in component, +// will not need this check anymore +export function isFilterBox(chartId) { + return allFilterBoxChartIds.includes(chartId); +} + +export function getAllFilterBoxChartIds() { + return allFilterBoxChartIds; +} + // non-empty filters from dashboardFilters, // this function does not take into account: filter immune or filter scope settings export function buildActiveFilters(allDashboardFilters = {}) { + allFilterBoxChartIds = Object.values(allDashboardFilters).map( + filter => filter.chartId, + ); + activeFilters = Object.values(allDashboardFilters).reduce( (result, filter) => { const { chartId, columns } = filter; diff --git a/superset/assets/src/dashboard/util/buildFilterScopeTreeEntry.js b/superset/assets/src/dashboard/util/buildFilterScopeTreeEntry.js new file mode 100644 index 000000000000..e91ac51fbdc8 --- /dev/null +++ b/superset/assets/src/dashboard/util/buildFilterScopeTreeEntry.js @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import getFilterScopeNodesTree from './getFilterScopeNodesTree'; +import getFilterScopeParentNodes from './getFilterScopeParentNodes'; +import getKeyForFilterScopeTree from './getKeyForFilterScopeTree'; +import getSelectedChartIdForFilterScopeTree from './getSelectedChartIdForFilterScopeTree'; + +export default function buildFilterScopeTreeEntry({ + checkedFilterFields = [], + activeFilterField, + filterScopeMap = {}, + layout = {}, +}) { + const key = getKeyForFilterScopeTree({ + checkedFilterFields, + activeFilterField, + }); + const editingList = activeFilterField + ? [activeFilterField] + : checkedFilterFields; + const selectedChartId = getSelectedChartIdForFilterScopeTree({ + checkedFilterFields, + activeFilterField, + }); + const nodes = getFilterScopeNodesTree({ + components: layout, + filterFields: editingList, + selectedChartId, + }); + const checkedChartIdSet = new Set(); + editingList.forEach(filterField => { + (filterScopeMap[filterField].checked || []).forEach(chartId => { + checkedChartIdSet.add(`${chartId}:${filterField}`); + }); + }); + const checked = [...checkedChartIdSet]; + const expanded = filterScopeMap[key] + ? filterScopeMap[key].expanded + : getFilterScopeParentNodes(nodes, 1); + + return { + [key]: { + nodes, + nodesFiltered: [...nodes], + checked, + expanded, + }, + }; +} diff --git a/superset/assets/src/dashboard/util/constants.js b/superset/assets/src/dashboard/util/constants.js index e2cbd3215863..5eae2a8633c9 100644 --- a/superset/assets/src/dashboard/util/constants.js +++ b/superset/assets/src/dashboard/util/constants.js @@ -76,3 +76,6 @@ export const FILTER_INDICATORS_DISPLAY_LENGTH = 3; // in-component element types: can be added into // directPathToChild, used for in dashboard navigation and focus export const IN_COMPONENT_ELEMENT_TYPES = ['LABEL']; + +// filter scope selector filter fields pane root id +export const ALL_FILTERS_ROOT = 'ALL_FILTERS_ROOT'; diff --git a/superset/assets/src/dashboard/util/dashboardFiltersColorMap.js b/superset/assets/src/dashboard/util/dashboardFiltersColorMap.js index bb8f762fbd3f..129acf5ceda8 100644 --- a/superset/assets/src/dashboard/util/dashboardFiltersColorMap.js +++ b/superset/assets/src/dashboard/util/dashboardFiltersColorMap.js @@ -16,15 +16,13 @@ * specific language governing permissions and limitations * under the License. */ +import { getDashboardFilterKey } from './getDashboardFilterKey'; + // should be consistent with @badge-colors .less variable const FILTER_COLORS_COUNT = 20; let filterColorMap = {}; -export function getFilterColorKey(chartId, column) { - return `${chartId}_${column}`; -} - export function getFilterColorMap() { return filterColorMap; } @@ -38,7 +36,7 @@ export function buildFilterColorMap(allDashboardFilters = {}) { Object.keys(columns) .sort() .forEach(column => { - const key = getFilterColorKey(chartId, column); + const key = getDashboardFilterKey({ chartId, column }); const colorCode = `badge-${filterColorIndex % FILTER_COLORS_COUNT}`; /* eslint-disable no-param-reassign */ colorMap[key] = colorCode; diff --git a/superset/assets/src/dashboard/util/getCurrentScopeChartIds.js b/superset/assets/src/dashboard/util/getCurrentScopeChartIds.js new file mode 100644 index 000000000000..60d86b59bb71 --- /dev/null +++ b/superset/assets/src/dashboard/util/getCurrentScopeChartIds.js @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { CHART_TYPE } from '../util/componentTypes'; + +export default function getCurrentScopeChartIds({ + scopeComponentIds, + filterField, + filterImmuneSlices, + filterImmuneSliceFields, + components, +}) { + let chartIds = []; + + function traverse(component) { + if (!component) { + return; + } + + if ( + component.type === CHART_TYPE && + component.meta && + component.meta.chartId + ) { + chartIds.push(component.meta.chartId); + } else if (component.children) { + component.children.forEach(child => traverse(components[child])); + } + } + + scopeComponentIds.forEach(componentId => traverse(components[componentId])); + + if (filterImmuneSlices && filterImmuneSlices.length) { + chartIds = chartIds.filter(id => !filterImmuneSlices.includes(id)); + } + + if (filterImmuneSliceFields) { + chartIds = chartIds.filter( + id => + !(id.toString() in filterImmuneSliceFields) || + !filterImmuneSliceFields[id].includes(filterField), + ); + } + + return chartIds; +} diff --git a/superset/assets/src/dashboard/util/getDashboardFilterKey.js b/superset/assets/src/dashboard/util/getDashboardFilterKey.js new file mode 100644 index 000000000000..e6307c3abc3e --- /dev/null +++ b/superset/assets/src/dashboard/util/getDashboardFilterKey.js @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export function getDashboardFilterKey({ chartId, column }) { + return `${chartId}_${column}`; +} + +export function getChartIdAndColumnFromFilterKey(key) { + const [chartId, ...parts] = key.split('_'); + const column = parts.slice().join('_'); + return { chartId: parseInt(chartId, 10), column }; +} diff --git a/superset/assets/src/dashboard/util/getFilterFieldNodesTree.js b/superset/assets/src/dashboard/util/getFilterFieldNodesTree.js new file mode 100644 index 000000000000..b55d28fe66a8 --- /dev/null +++ b/superset/assets/src/dashboard/util/getFilterFieldNodesTree.js @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { t } from '@superset-ui/translation'; + +import { getDashboardFilterKey } from './getDashboardFilterKey'; +import { ALL_FILTERS_ROOT } from './constants'; + +export default function getFilterFieldNodesTree({ dashboardFilters = {} }) { + const allFilters = Object.values(dashboardFilters).map(dashboardFilter => { + const { chartId, filterName, columns, labels } = dashboardFilter; + const children = Object.keys(columns).map(column => ({ + value: getDashboardFilterKey({ chartId, column }), + label: labels[column] || column, + })); + return { + value: chartId, + label: filterName, + children, + showCheckbox: true, + }; + }); + + return [ + { + value: ALL_FILTERS_ROOT, + label: t('Select/deselect all filters'), + children: allFilters, + }, + ]; +} diff --git a/superset/assets/src/dashboard/util/getFilterScopeNodesTree.js b/superset/assets/src/dashboard/util/getFilterScopeNodesTree.js new file mode 100644 index 000000000000..470ac08a092f --- /dev/null +++ b/superset/assets/src/dashboard/util/getFilterScopeNodesTree.js @@ -0,0 +1,128 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { isEmpty } from 'lodash'; +import { t } from '@superset-ui/translation'; + +import { DASHBOARD_ROOT_ID } from './constants'; +import { + CHART_TYPE, + DASHBOARD_ROOT_TYPE, + TAB_TYPE, +} from '../util/componentTypes'; + +const FILTER_SCOPE_CONTAINER_TYPES = [TAB_TYPE, DASHBOARD_ROOT_TYPE]; + +function traverse({ + currentNode = {}, + components = {}, + filterFields = [], + selectedChartId, +}) { + if (!currentNode) { + return null; + } + + const type = currentNode.type; + if ( + CHART_TYPE === type && + currentNode && + currentNode.meta && + currentNode.meta.chartId + ) { + const chartNode = { + value: currentNode.meta.chartId, + label: + currentNode.meta.sliceName || `${type} ${currentNode.meta.chartId}`, + type, + showCheckbox: selectedChartId !== currentNode.meta.chartId, + }; + + return { + ...chartNode, + children: filterFields.map(filterField => ({ + value: `${currentNode.meta.chartId}:${filterField}`, + label: `${chartNode.label}`, + type: 'filter_box', + showCheckbox: false, + })), + }; + } + + let children = []; + if (currentNode.children && currentNode.children.length) { + currentNode.children.forEach(child => { + const childNodeTree = traverse({ + currentNode: components[child], + components, + filterFields, + selectedChartId, + }); + + const childType = components[child].type; + if (FILTER_SCOPE_CONTAINER_TYPES.includes(childType)) { + children.push(childNodeTree); + } else { + children = children.concat(childNodeTree); + } + }); + } + + if (FILTER_SCOPE_CONTAINER_TYPES.includes(type)) { + let label = null; + if (type === DASHBOARD_ROOT_TYPE) { + label = t('Select/deselect all charts'); + } else { + label = + currentNode.meta && currentNode.meta.text + ? currentNode.meta.text + : `${type} ${currentNode.id}`; + } + + return { + value: currentNode.id, + label, + type, + children, + }; + } + + return children; +} + +export default function getFilterScopeNodesTree({ + components = {}, + filterFields = [], + selectedChartId, +}) { + if (isEmpty(components)) { + return []; + } + + const root = traverse({ + currentNode: components[DASHBOARD_ROOT_ID], + components, + filterFields, + selectedChartId, + }); + return [ + { + ...root, + }, + ]; +} diff --git a/superset/assets/src/dashboard/util/getFilterScopeParentNodes.js b/superset/assets/src/dashboard/util/getFilterScopeParentNodes.js new file mode 100644 index 000000000000..8330c64cf33e --- /dev/null +++ b/superset/assets/src/dashboard/util/getFilterScopeParentNodes.js @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export default function getFilterScopeParentNodes(nodes = [], depthLimit = -1) { + const parentNodes = []; + const traverse = (currentNode, depth) => { + if (!currentNode) { + return; + } + + if (currentNode.children && (depthLimit === -1 || depth < depthLimit)) { + parentNodes.push(currentNode.value); + currentNode.children.forEach(child => traverse(child, depth + 1)); + } + }; + + if (nodes.length > 0) { + nodes.forEach(node => { + traverse(node, 0); + }); + } + + return parentNodes; +} diff --git a/superset/assets/src/dashboard/util/getKeyForFilterScopeTree.js b/superset/assets/src/dashboard/util/getKeyForFilterScopeTree.js new file mode 100644 index 000000000000..e85dd5128c12 --- /dev/null +++ b/superset/assets/src/dashboard/util/getKeyForFilterScopeTree.js @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { safeStringify } from '../../utils/safeStringify'; + +export default function getKeyForFilterScopeTree({ + activeFilterField, + checkedFilterFields, +}) { + return activeFilterField + ? safeStringify([activeFilterField]) + : safeStringify(checkedFilterFields); +} diff --git a/superset/assets/src/dashboard/util/getRevertedFilterScope.js b/superset/assets/src/dashboard/util/getRevertedFilterScope.js new file mode 100644 index 000000000000..92e4a299eadd --- /dev/null +++ b/superset/assets/src/dashboard/util/getRevertedFilterScope.js @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export default function getRevertedFilterScope({ + checked = [], + filterFields = [], + filterScopeMap = {}, +}) { + const checkedChartIdsByFilterField = checked.reduce((map, value) => { + const [chartId, filterField] = value.split(':'); + return { + ...map, + [filterField]: (map[filterField] || []).concat(parseInt(chartId, 10)), + }; + }, {}); + + return filterFields.reduce( + (map, filterField) => ({ + ...map, + [filterField]: { + ...filterScopeMap[filterField], + checked: checkedChartIdsByFilterField[filterField], + }, + }), + {}, + ); +} diff --git a/superset/assets/src/dashboard/util/getSelectedChartIdForFilterScopeTree.js b/superset/assets/src/dashboard/util/getSelectedChartIdForFilterScopeTree.js new file mode 100644 index 000000000000..cde72e35851a --- /dev/null +++ b/superset/assets/src/dashboard/util/getSelectedChartIdForFilterScopeTree.js @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { getChartIdAndColumnFromFilterKey } from './getDashboardFilterKey'; + +export default function getSelectedChartIdForFilterScopeTree({ + activeFilterField, + checkedFilterFields, +}) { + // we don't apply filter on filter_box itself, so we will disable + // checkbox in filter scope selector. + // this function returns chart id based on current filter scope selector local state: + // 1. if in single-edit mode, return the chart id for selected filter field. + // 2. if in multi-edit mode, if all filter fields are from same chart id, + // return the single chart id. + // otherwise, there is no chart to disable. + if (activeFilterField) { + return getChartIdAndColumnFromFilterKey(activeFilterField).chartId; + } + + if (checkedFilterFields.length) { + const { chartId } = getChartIdAndColumnFromFilterKey( + checkedFilterFields[0], + ); + + if ( + checkedFilterFields.some( + filterKey => + getChartIdAndColumnFromFilterKey(filterKey).chartId !== chartId, + ) + ) { + return null; + } + return chartId; + } + + return null; +} diff --git a/superset/assets/src/dashboard/util/propShapes.jsx b/superset/assets/src/dashboard/util/propShapes.jsx index d4fb6ddd623f..b59f41ea0c0b 100644 --- a/superset/assets/src/dashboard/util/propShapes.jsx +++ b/superset/assets/src/dashboard/util/propShapes.jsx @@ -35,6 +35,9 @@ export const componentShape = PropTypes.shape({ // Row background: PropTypes.oneOf(backgroundStyleOptions.map(opt => opt.value)), + + // Chart + chartId: PropTypes.number, }), }); @@ -76,7 +79,7 @@ export const filterIndicatorPropShape = PropTypes.shape({ isInstantFilter: PropTypes.bool.isRequired, label: PropTypes.string.isRequired, name: PropTypes.string.isRequired, - scope: PropTypes.string.isRequired, + scope: PropTypes.arrayOf(PropTypes.string), values: PropTypes.array.isRequired, }); @@ -102,6 +105,30 @@ export const dashboardInfoPropShape = PropTypes.shape({ userId: PropTypes.string.isRequired, }); +/* eslint-disable-next-line no-undef */ +const lazyFunction = f => () => f().apply(this, arguments); + +const leafType = PropTypes.shape({ + value: PropTypes.oneOfType([PropTypes.number, PropTypes.string]).isRequired, + label: PropTypes.string.isRequired, +}); + +const parentShape = { + value: PropTypes.oneOfType([PropTypes.number, PropTypes.string]).isRequired, + label: PropTypes.string.isRequired, + children: PropTypes.arrayOf( + PropTypes.oneOfType([ + PropTypes.shape(lazyFunction(() => parentShape)), + leafType, + ]), + ), +}; + +export const filterScopeSelectorTreeNodePropShape = PropTypes.oneOfType([ + PropTypes.shape(parentShape), + leafType, +]); + export const loadStatsPropShape = PropTypes.objectOf( PropTypes.shape({ didLoad: PropTypes.bool.isRequired, diff --git a/superset/assets/src/datasource/DatasourceEditor.jsx b/superset/assets/src/datasource/DatasourceEditor.jsx index 014be93fb3e9..4ef034c6f19c 100644 --- a/superset/assets/src/datasource/DatasourceEditor.jsx +++ b/superset/assets/src/datasource/DatasourceEditor.jsx @@ -97,14 +97,25 @@ function ColumnCollectionTable({ - {t('The pattern of the timestamp format, use ')} + {t('The pattern of timestamp format. For strings use ')} {t('python datetime string pattern')} - {t(` expression. If time is stored in epoch format, put \`epoch_s\` or - \`epoch_ms\`.`)} + {t(' expression which needs to adhere to the ')} + + {t('ISO 8601')} + + {t(` standard to ensure that the lexicographical ordering + coincides with the chronological ordering. If the + timestamp format does not adhere to the ISO 8601 standard + you will need to define an expression and type for + transforming the string into a date or timestamp. Note + currently time zones are not supported. If time is stored + in epoch format, put \`epoch_s\` or \`epoch_ms\`. If no pattern + is specified we fall back to using the optional defaults on a per + database/column name level via the extra parameter.`)} } control={} @@ -556,30 +567,20 @@ export class DatasourceEditor extends React.PureComponent { } render() { - const datasource = this.state.datasource; + const { datasource, activeTabKey } = this.state; return (
{this.renderErrors()} - - {this.state.activeTabKey === 1 && -
- - - {this.renderSettingsFieldset()} - - - - - {this.renderAdvancedFieldset()} - - -
- } + } + eventKey={1} + > + {activeTabKey === 1 && this.renderMetricCollection()} - {this.state.activeTabKey === 2 && + {activeTabKey === 2 &&
} eventKey={3} > - {this.state.activeTabKey === 3 && + {activeTabKey === 3 && this.setColumns({ calculatedColumns })} @@ -632,11 +633,25 @@ export class DatasourceEditor extends React.PureComponent { /> } - } - eventKey={4} - > - {this.state.activeTabKey === 4 && this.renderMetricCollection()} + + {activeTabKey === 4 && +
+
+ {t('Be careful.')} + {t('Changing these settings will affect all charts using this datasource, including charts owned by other people.')} +
+ + + {this.renderSettingsFieldset()} + + + + + {this.renderAdvancedFieldset()} + + +
+ }
diff --git a/superset/assets/src/datasource/main.css b/superset/assets/src/datasource/main.css index f551f7be96f7..608143377a95 100644 --- a/superset/assets/src/datasource/main.css +++ b/superset/assets/src/datasource/main.css @@ -20,3 +20,12 @@ height: 600px; overflow: auto; } + +.Datasource .change-warning { + margin: 16px 10px 0; + color: #FE4A49; +} + +.Datasource .change-warning .bold { + font-weight: bold; +} diff --git a/superset/assets/src/explore/components/controls/CheckboxControl.jsx b/superset/assets/src/explore/components/controls/CheckboxControl.jsx index 7e9afcabda8e..22dc08849dbe 100644 --- a/superset/assets/src/explore/components/controls/CheckboxControl.jsx +++ b/superset/assets/src/explore/components/controls/CheckboxControl.jsx @@ -52,6 +52,7 @@ export default class CheckboxControl extends React.Component { ); } diff --git a/superset/assets/src/explore/components/controls/DatasourceControl.css b/superset/assets/src/explore/components/controls/DatasourceControl.css new file mode 100644 index 000000000000..87ea089ef119 --- /dev/null +++ b/superset/assets/src/explore/components/controls/DatasourceControl.css @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +#datasource_menu { + border-radius: 2px; + padding-left: 8px; + padding-right: 8px; +} + +#datasource_menu .caret { + position: relative; + padding-right: 8px; + margin-left: 4px; + color: #fff; + top: -8px; +} + +#datasource_menu + ul { + margin-top: 26px; +} diff --git a/superset/assets/src/explore/components/controls/DatasourceControl.jsx b/superset/assets/src/explore/components/controls/DatasourceControl.jsx index 910a5fdef6ef..a2a35c2271d2 100644 --- a/superset/assets/src/explore/components/controls/DatasourceControl.jsx +++ b/superset/assets/src/explore/components/controls/DatasourceControl.jsx @@ -36,6 +36,8 @@ import ColumnOption from '../../../components/ColumnOption'; import MetricOption from '../../../components/MetricOption'; import DatasourceModal from '../../../datasource/DatasourceModal'; import ChangeDatasourceModal from '../../../datasource/ChangeDatasourceModal'; +import TooltipWrapper from '../../../components/TooltipWrapper'; +import './DatasourceControl.css'; const propTypes = { onChange: PropTypes.func, @@ -115,56 +117,45 @@ class DatasourceControl extends React.PureComponent { } render() { - const { menuExpanded, showChangeDatasourceModal, showEditDatasourceModal } = this.state; + const { showChangeDatasourceModal, showEditDatasourceModal } = this.state; const { datasource, onChange, onDatasourceSave, value } = this.props; return (
- {t('Click to edit the datasource')} - } - > -
- -
-
- - - } - className="label label-btn m-r-5" - bsSize="sm" - id="datasource_menu" + - - {t('Edit Datasource')} - - {datasource.type === 'table' && - {t('Explore in SQL Lab')} - } - - {t('Change Datasource')} - - + {t('Change Datasource')} + + {datasource.type === 'table' && + + {t('Explore in SQL Lab')} + } + + {t('Edit Datasource')} + + + { const nextState = getStateFromCommonTimeFrame(timeFrame); + const endpoints = this.props.endpoints; return ( - {nextState.since}
{nextState.until} + {nextState.since} {endpoints && `(${endpoints[0]})`}
{nextState.until} {endpoints && `(${endpoints[1]})`} } > @@ -494,7 +509,15 @@ export default class DateFilterControl extends React.Component { } render() { let value = this.props.value || defaultProps.value; - value = value.split(SEPARATOR).map((v, idx) => v.replace('T00:00:00', '') || (idx === 0 ? '-∞' : '∞')).join(SEPARATOR); + const endpoints = this.props.endpoints; + value = value + .split(SEPARATOR) + .map((v, idx) => + moment(v).isValid() + ? v.replace('T00:00:00', '') + (endpoints ? ` (${endpoints[idx]})` : '') + : v || (idx === 0 ? '-∞' : '∞'), + ) + .join(SEPARATOR); return (
diff --git a/superset/assets/src/explore/controlPanels/sections.jsx b/superset/assets/src/explore/controlPanels/sections.jsx index 7df048869731..db5ea6e77d62 100644 --- a/superset/assets/src/explore/controlPanels/sections.jsx +++ b/superset/assets/src/explore/controlPanels/sections.jsx @@ -35,7 +35,7 @@ export const datasourceAndVizType = { controlSetRows: [ ['datasource'], ['viz_type'], - ['slice_id', 'cache_timeout', 'url_params'], + ['slice_id', 'cache_timeout', 'url_params', 'time_range_endpoints'], ], }; diff --git a/superset/assets/src/explore/controls.jsx b/superset/assets/src/explore/controls.jsx index 8f06ae617146..94db55576c58 100644 --- a/superset/assets/src/explore/controls.jsx +++ b/superset/assets/src/explore/controls.jsx @@ -375,7 +375,7 @@ export const controls = { choices: () => sequentialSchemeRegistry .values() .map(value => [value.id, value.label]), - default: 'blue_white_yellow', + default: sequentialSchemeRegistry.getDefaultKey(), clearable: false, description: '', renderTrigger: true, @@ -575,6 +575,7 @@ export const controls = { 'India', 'Italy', 'Japan', + 'Korea', 'Morocco', 'Myanmar', 'Netherlands', @@ -951,6 +952,17 @@ export const controls = { freeForm: true, label: t('Time range'), default: t('Last week'), + description: t( + 'The time range for the visualization. All relative times, e.g. "Last month", ' + + '"Last 7 days", "now", etc. are evaluated on the server using the server\'s ' + + 'local time (sans timezone). All tooltips and placeholder times are expressed ' + + 'in UTC (sans timezone). The timestamps are then evaluated by the database ' + + 'using the engine\'s local timezone. Note one can explicitly set the timezone ' + + 'per the ISO 8601 format if specifying either the start and/or end time.', + ), + mapStateToProps: state => ({ + endpoints: state.form_data ? state.form_data.time_range_endpoints : null, + }), }, max_bubble_size: { @@ -2026,6 +2038,13 @@ export const controls = { description: t('Extra parameters for use in jinja templated queries'), }, + time_range_endpoints: { + type: 'HiddenControl', + label: t('Time range endpoints'), + hidden: true, + description: t('Time range endpoints (SIP-15)'), + }, + order_by_entity: { type: 'CheckboxControl', label: t('Order by entity id'), @@ -2047,7 +2066,7 @@ export const controls = { color_scheme: { type: 'ColorSchemeControl', label: t('Color Scheme'), - default: 'bnbColors', + default: categoricalSchemeRegistry.getDefaultKey(), renderTrigger: true, choices: () => categoricalSchemeRegistry.keys().map(s => ([s, s])), description: t('The color scheme for rendering chart'), diff --git a/superset/assets/src/messageToasts/components/Toast.jsx b/superset/assets/src/messageToasts/components/Toast.jsx index 02deb3d5e692..bbce2e97be64 100644 --- a/superset/assets/src/messageToasts/components/Toast.jsx +++ b/superset/assets/src/messageToasts/components/Toast.jsx @@ -18,6 +18,7 @@ */ import { Alert } from 'react-bootstrap'; import cx from 'classnames'; +import Interweave from 'interweave'; import PropTypes from 'prop-types'; import React from 'react'; @@ -96,7 +97,7 @@ class Toast extends React.Component { toastType === DANGER_TOAST && 'toast--danger', )} > - {text} + ); } diff --git a/superset/assets/src/messageToasts/stylesheets/toast.less b/superset/assets/src/messageToasts/stylesheets/toast.less index 227e954a47b1..bd1e80dacf9a 100644 --- a/superset/assets/src/messageToasts/stylesheets/toast.less +++ b/superset/assets/src/messageToasts/stylesheets/toast.less @@ -23,7 +23,7 @@ bottom: 16px; left: 50%; transform: translate(-50%, 0); - width: 500px; + width: 600px; z-index: 3000; // top of the world } diff --git a/superset/assets/src/setup/setupColors.js b/superset/assets/src/setup/setupColors.js index 99df66d1ebc6..d3f80ad10f92 100644 --- a/superset/assets/src/setup/setupColors.js +++ b/superset/assets/src/setup/setupColors.js @@ -40,4 +40,5 @@ export default function setupColors() { sequentialSchemeRegistry.registerValue(scheme.id, scheme); }); }); + sequentialSchemeRegistry.setDefaultKey('blue_white_yellow'); } diff --git a/superset/assets/src/visualizations/FilterBox/FilterBox.css b/superset/assets/src/visualizations/FilterBox/FilterBox.css index f546c9c463dc..0b678e0fcd98 100644 --- a/superset/assets/src/visualizations/FilterBox/FilterBox.css +++ b/superset/assets/src/visualizations/FilterBox/FilterBox.css @@ -60,7 +60,7 @@ ul.select2-results div.filter_box{ .filter-container label { display: flex; font-weight: bold; - margin-bottom: 8px; + margin: 0 0 8px 8px; } .filter-container .filter-badge-container { width: 30px; diff --git a/superset/assets/src/visualizations/FilterBox/FilterBox.jsx b/superset/assets/src/visualizations/FilterBox/FilterBox.jsx index a2b9cc84336a..b259e7bff2a7 100644 --- a/superset/assets/src/visualizations/FilterBox/FilterBox.jsx +++ b/superset/assets/src/visualizations/FilterBox/FilterBox.jsx @@ -29,7 +29,8 @@ import Control from '../../explore/components/Control'; import controls from '../../explore/controls'; import OnPasteSelect from '../../components/OnPasteSelect'; import VirtualizedRendererWrap from '../../components/VirtualizedRendererWrap'; -import { getFilterColorKey, getFilterColorMap } from '../../dashboard/util/dashboardFiltersColorMap'; +import { getDashboardFilterKey } from '../../dashboard/util/getDashboardFilterKey'; +import { getFilterColorMap } from '../../dashboard/util/dashboardFiltersColorMap'; import FilterBadgeIcon from '../../components/FilterBadgeIcon'; import './FilterBox.css'; @@ -303,7 +304,7 @@ class FilterBox extends React.Component { } renderFilterBadge(chartId, column) { - const colorKey = getFilterColorKey(chartId, column); + const colorKey = getDashboardFilterKey({ chartId, column }); const filterColorMap = getFilterColorMap(); const colorCode = filterColorMap[colorKey]; diff --git a/superset/cli.py b/superset/cli.py index b30b6547f22d..8e695bbf292c 100755 --- a/superset/cli.py +++ b/superset/cli.py @@ -16,17 +16,17 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=C,R,W -from datetime import datetime import logging +from datetime import datetime from subprocess import Popen from sys import stdout import click +import yaml from colorama import Fore, Style from flask import g from flask_appbuilder import Model from pathlib2 import Path -import yaml from superset import app, appbuilder, db, examples, security_manager from superset.common.tags import add_favorites, add_owners, add_types @@ -62,7 +62,7 @@ def version(verbose): Fore.YELLOW + "Superset " + Fore.CYAN - + "{version}".format(version=config.get("VERSION_STRING")) + + "{version}".format(version=config["VERSION_STRING"]) ) print(Fore.BLUE + "-=" * 15) if verbose: @@ -184,7 +184,7 @@ def refresh_druid(datasource, merge): @click.option( "--path", "-p", - help="Path to a single JSON file or path containing multiple JSON files" + help="Path to a single JSON file or path containing multiple JSON " "files to import (*.json)", ) @click.option( @@ -372,10 +372,8 @@ def worker(workers): ) if workers: celery_app.conf.update(CELERYD_CONCURRENCY=workers) - elif config.get("SUPERSET_CELERY_WORKERS"): - celery_app.conf.update( - CELERYD_CONCURRENCY=config.get("SUPERSET_CELERY_WORKERS") - ) + elif config["SUPERSET_CELERY_WORKERS"]: + celery_app.conf.update(CELERYD_CONCURRENCY=config["SUPERSET_CELERY_WORKERS"]) worker = celery_app.Worker(optimization="fair") worker.start() @@ -428,7 +426,7 @@ def load_test_users_run(): Syncs permissions for those users/roles """ - if config.get("TESTING"): + if config["TESTING"]: sm = security_manager diff --git a/superset/common/query_context.py b/superset/common/query_context.py index c2534e9f64fa..36ba6753676d 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -15,21 +15,21 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=C,R,W -from datetime import datetime, timedelta import logging import pickle as pkl +from datetime import datetime, timedelta from typing import Any, Dict, List, Optional import numpy as np import pandas as pd -from superset import app, cache -from superset import db +from superset import app, cache, db from superset.connectors.base.models import BaseDatasource from superset.connectors.connector_registry import ConnectorRegistry from superset.stats_logger import BaseStatsLogger from superset.utils import core as utils from superset.utils.core import DTTM_ALIAS + from .query_object import QueryObject config = app.config @@ -59,8 +59,10 @@ def __init__( force: bool = False, custom_cache_timeout: Optional[int] = None, ) -> None: - self.datasource = ConnectorRegistry.get_datasource( - datasource.get("type"), int(datasource.get("id")), db.session # noqa: T400 + self.datasource = ConnectorRegistry.get_datasource( # type: ignore + datasource.get("type"), # type: ignore + int(datasource.get("id")), # type: ignore + db.session, ) self.queries = list(map(lambda query_obj: QueryObject(**query_obj), queries)) diff --git a/superset/common/query_object.py b/superset/common/query_object.py index 71b690358a4f..21649d1d0a32 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -15,8 +15,8 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=R -from datetime import datetime, timedelta import hashlib +from datetime import datetime, timedelta from typing import Any, Dict, List, Optional, Union import simplejson as json @@ -24,7 +24,6 @@ from superset import app from superset.utils import core as utils - # TODO: Type Metrics dictionary with TypedDict when it becomes a vanilla python type # https://github.com/python/mypy/issues/5288 @@ -39,7 +38,7 @@ class QueryObject: from_dttm: datetime to_dttm: datetime is_timeseries: bool - time_shift: timedelta + time_shift: Optional[timedelta] groupby: List[str] metrics: List[Union[Dict, str]] row_limit: int @@ -61,14 +60,14 @@ def __init__( time_shift: Optional[str] = None, is_timeseries: bool = False, timeseries_limit: int = 0, - row_limit: int = app.config.get("ROW_LIMIT"), + row_limit: int = app.config["ROW_LIMIT"], timeseries_limit_metric: Optional[Dict] = None, order_desc: bool = True, extras: Optional[Dict] = None, columns: Optional[List[str]] = None, orderby: Optional[List[List]] = None, - relative_start: str = app.config.get("DEFAULT_RELATIVE_START_TIME", "today"), - relative_end: str = app.config.get("DEFAULT_RELATIVE_END_TIME", "today"), + relative_start: str = app.config["DEFAULT_RELATIVE_START_TIME"], + relative_end: str = app.config["DEFAULT_RELATIVE_END_TIME"], ): self.granularity = granularity self.from_dttm, self.to_dttm = utils.get_since_until( @@ -85,7 +84,7 @@ def __init__( # Temporal solution for backward compatability issue # due the new format of non-ad-hoc metric. self.metrics = [ - metric if "expressionType" in metric else metric["label"] # noqa: T484 + metric if "expressionType" in metric else metric["label"] # type: ignore for metric in metrics ] self.row_limit = row_limit diff --git a/superset/config.py b/superset/config.py index 79f723d45248..c5728f87aa43 100644 --- a/superset/config.py +++ b/superset/config.py @@ -21,23 +21,29 @@ in your PYTHONPATH as there is a ``from superset_config import *`` at the end of this file. """ -from collections import OrderedDict import imp import importlib.util import json import logging import os import sys +from collections import OrderedDict +from datetime import date +from typing import Any, Callable, Dict, List, Optional from celery.schedules import crontab from dateutil import tz from flask_appbuilder.security.manager import AUTH_DB from superset.stats_logger import DummyStatsLogger +from superset.utils.log import DBEventLogger from superset.utils.logging_configurator import DefaultLoggingConfigurator # Realtime stats logger, a StatsD implementation exists STATS_LOGGER = DummyStatsLogger() +EVENT_LOGGER = DBEventLogger() + +SUPERSET_LOG_VIEW = True BASE_DIR = os.path.abspath(os.path.dirname(__file__)) if "SUPERSET_HOME" in os.environ: @@ -93,7 +99,7 @@ def _try_json_readfile(filepath): # --------------------------------------------------------- # Your App secret key -SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h" # noqa +SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h" # The SQLAlchemy connection string. SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "superset.db") @@ -108,6 +114,7 @@ def _try_json_readfile(filepath): # def lookup_password(url): # return 'secret' # SQLALCHEMY_CUSTOM_PASSWORD_STORE = lookup_password +SQLALCHEMY_CUSTOM_PASSWORD_STORE = None # The limit of queries fetched for query search QUERY_SEARCH_LIMIT = 1000 @@ -231,6 +238,9 @@ def _try_json_readfile(filepath): "PRESTO_EXPAND_DATA": False, } +# This is merely a default. +FEATURE_FLAGS: Dict[str, bool] = {} + # A function that receives a dict of all feature flags # (DEFAULT_FEATURE_FLAGS merged with FEATURE_FLAGS) # can alter it, and returns a similar dict. Note the dict of feature @@ -262,12 +272,12 @@ def _try_json_readfile(filepath): # IMG_SIZE = (300, 200, True) CACHE_DEFAULT_TIMEOUT = 60 * 60 * 24 -CACHE_CONFIG = {"CACHE_TYPE": "null"} +CACHE_CONFIG: Dict[str, Any] = {"CACHE_TYPE": "null"} TABLE_NAMES_CACHE_CONFIG = {"CACHE_TYPE": "null"} # CORS Options ENABLE_CORS = False -CORS_OPTIONS = {} +CORS_OPTIONS: Dict[Any, Any] = {} # Chrome allows up to 6 open connections per domain at a time. When there are more # than 6 slices in dashboard, a lot of time fetch requests are queued up and wait for @@ -278,7 +288,7 @@ def _try_json_readfile(filepath): # Allowed format types for upload on Database view # TODO: Add processing of other spreadsheet formats (xls, xlsx etc) -ALLOWED_EXTENSIONS = set(["csv"]) +ALLOWED_EXTENSIONS = {"csv", "tsv"} # CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv # method. @@ -292,13 +302,13 @@ def _try_json_readfile(filepath): # time grains in superset/db_engine_specs.builtin_time_grains). # For example: to disable 1 second time grain: # TIME_GRAIN_BLACKLIST = ['PT1S'] -TIME_GRAIN_BLACKLIST = [] +TIME_GRAIN_BLACKLIST: List[str] = [] # Additional time grains to be supported using similar definitions as in # superset/db_engine_specs.builtin_time_grains. # For example: To add a new 2 second time grain: # TIME_GRAIN_ADDONS = {'PT2S': '2 second'} -TIME_GRAIN_ADDONS = {} +TIME_GRAIN_ADDONS: Dict[str, str] = {} # Implementation of additional time grains per engine. # For example: To implement 2 second time grain on clickhouse engine: @@ -307,7 +317,7 @@ def _try_json_readfile(filepath): # 'PT2S': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 2)*2)' # } # } -TIME_GRAIN_ADDON_FUNCTIONS = {} +TIME_GRAIN_ADDON_FUNCTIONS: Dict[str, Dict[str, str]] = {} # --------------------------------------------------- # List of viz_types not allowed in your environment @@ -315,13 +325,13 @@ def _try_json_readfile(filepath): # VIZ_TYPE_BLACKLIST = ['pivot_table', 'treemap'] # --------------------------------------------------- -VIZ_TYPE_BLACKLIST = [] +VIZ_TYPE_BLACKLIST: List[str] = [] # --------------------------------------------------- # List of data sources not to be refreshed in druid cluster # --------------------------------------------------- -DRUID_DATA_SOURCE_BLACKLIST = [] +DRUID_DATA_SOURCE_BLACKLIST: List[str] = [] # -------------------------------------------------- # Modules, datasources and middleware to be registered @@ -332,8 +342,8 @@ def _try_json_readfile(filepath): ("superset.connectors.druid.models", ["DruidDatasource"]), ] ) -ADDITIONAL_MODULE_DS_MAP = {} -ADDITIONAL_MIDDLEWARE = [] +ADDITIONAL_MODULE_DS_MAP: Dict[str, List[str]] = {} +ADDITIONAL_MIDDLEWARE: List[Callable] = [] # 1) https://docs.python-guide.org/writing/logging/ # 2) https://docs.python.org/2/library/logging.config.html @@ -370,6 +380,7 @@ def _try_json_readfile(filepath): # security_manager=None, # ): # pass +QUERY_LOGGER = None # Set this API key to enable Mapbox visualizations MAPBOX_API_KEY = os.environ.get("MAPBOX_API_KEY", "") @@ -435,8 +446,15 @@ class CeleryConfig(object): # CELERY_CONFIG = None # Additional static HTTP headers to be served by your Superset server. Note -# Flask-Talisman aplies the relevant security HTTP headers. -HTTP_HEADERS = {} +# Flask-Talisman applies the relevant security HTTP headers. +# +# DEFAULT_HTTP_HEADERS: sets default values for HTTP headers. These may be overridden +# within the app +# OVERRIDE_HTTP_HEADERS: sets override values for HTTP headers. These values will +# override anything set within the app +DEFAULT_HTTP_HEADERS: Dict[str, Any] = {} +OVERRIDE_HTTP_HEADERS: Dict[str, Any] = {} +HTTP_HEADERS: Dict[str, Any] = {} # The db id here results in selecting this one as a default in SQL Lab DEFAULT_DB_ID = None @@ -486,7 +504,7 @@ class CeleryConfig(object): # SQL Lab. The existing context gets updated with this dictionary, # meaning values for existing keys get overwritten by the content of this # dictionary. -JINJA_CONTEXT_ADDONS = {} +JINJA_CONTEXT_ADDONS: Dict[str, Callable] = {} # Roles that are controlled by the API / Superset and should not be changes # by humans. @@ -515,13 +533,18 @@ class CeleryConfig(object): SMTP_MAIL_FROM = "superset@superset.com" if not CACHE_DEFAULT_TIMEOUT: - CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG.get("CACHE_DEFAULT_TIMEOUT") + CACHE_DEFAULT_TIMEOUT = CACHE_CONFIG["CACHE_DEFAULT_TIMEOUT"] + + +ENABLE_CHUNK_ENCODING = False # Whether to bump the logging level to ERROR on the flask_appbuilder package # Set to False if/when debugging FAB related issues like # permission management SILENCE_FAB = True +FAB_ADD_SECURITY_VIEWS = True + # The link to a page containing common errors and their resolutions # It will be appended at the bottom of sql_lab errors. TROUBLESHOOTING_LINK = "" @@ -535,12 +558,12 @@ class CeleryConfig(object): # Integrate external Blueprints to the app by passing them to your # configuration. These blueprints will get integrated in the app -BLUEPRINTS = [] +BLUEPRINTS: List[Callable] = [] # Provide a callable that receives a tracking_url and returns another # URL. This is used to translate internal Hadoop job tracker URL # into a proxied one -TRACKING_URL_TRANSFORMER = lambda x: x # noqa: E731 +TRACKING_URL_TRANSFORMER = lambda x: x # Interval between consecutive polls when using Hive Engine HIVE_POLL_INTERVAL = 5 @@ -623,15 +646,18 @@ class CeleryConfig(object): WEBDRIVER_WINDOW = {"dashboard": (1600, 2000), "slice": (3000, 1200)} # Any config options to be passed as-is to the webdriver -WEBDRIVER_CONFIGURATION = {} +WEBDRIVER_CONFIGURATION: Dict[Any, Any] = {} # The base URL to query for accessing the user interface WEBDRIVER_BASEURL = "http://0.0.0.0:8080/" # Send user to a link where they can report bugs BUG_REPORT_URL = None + # Send user to a link where they can read more about Superset DOCUMENTATION_URL = None +DOCUMENTATION_TEXT = "Documentation" +DOCUMENTATION_ICON = None # Recommended size: 16x16 # What is the Last N days relative in the time selector to: # 'today' means it is midnight (00:00:00) in the local timezone @@ -665,10 +691,25 @@ class CeleryConfig(object): SESSION_COOKIE_SECURE = False # Prevent cookie from being transmitted over non-tls? SESSION_COOKIE_SAMESITE = "Lax" # One of [None, 'Lax', 'Strict'] +# Flask configuration variables +SEND_FILE_MAX_AGE_DEFAULT = 60 * 60 * 24 * 365 # Cache static resources + # URI to database storing the example data, points to # SQLALCHEMY_DATABASE_URI by default if set to `None` SQLALCHEMY_EXAMPLES_URI = None +# SIP-15 should be enabled for all new Superset deployments which ensures that the time +# range endpoints adhere to [start, end). For existing deployments admins should provide +# a dedicated period of time to allow chart producers to update their charts before +# mass migrating all charts to use the [start, end) interval. +# +# Note if no end date for the grace period is specified then the grace period is +# indefinite. +SIP_15_ENABLED = False +SIP_15_GRACE_PERIOD_END: Optional[date] = None # exclusive +SIP_15_DEFAULT_TIME_RANGE_ENDPOINTS = ["unknown", "inclusive"] +SIP_15_TOAST_MESSAGE = 'Action Required: Preview then save your chart using the new time range endpoints here.' + if CONFIG_PATH_ENV_VAR in os.environ: # Explicitly import config module that is not necessarily in pythonpath; useful # for case where app is being executed via pex. @@ -688,8 +729,8 @@ class CeleryConfig(object): raise elif importlib.util.find_spec("superset_config"): try: - from superset_config import * # noqa pylint: disable=import-error - import superset_config # noqa pylint: disable=import-error + from superset_config import * # pylint: disable=import-error + import superset_config # pylint: disable=import-error print(f"Loaded your LOCAL configuration at [{superset_config.__file__}]") except Exception: diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index d84910c6e563..e9ea9d890d14 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -16,7 +16,7 @@ # under the License. # pylint: disable=C,R,W import json -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Type from flask_appbuilder.security.sqla.models import User from sqlalchemy import and_, Boolean, Column, Integer, String, Text @@ -35,15 +35,17 @@ class BaseDatasource(AuditMixinNullable, ImportMixin): # --------------------------------------------------------------- # class attributes to define when deriving BaseDatasource # --------------------------------------------------------------- - __tablename__ = None # {connector_name}_datasource - type = None # datasoure type, str to be defined when deriving this class - baselink = None # url portion pointing to ModelView endpoint - column_class = None # link to derivative of BaseColumn - metric_class = None # link to derivative of BaseMetric + __tablename__: Optional[str] = None # {connector_name}_datasource + type: Optional[ # datasoure type, str to be defined when deriving this class + str + ] = None + baselink: Optional[str] = None # url portion pointing to ModelView endpoint + column_class: Optional[Type] = None # link to derivative of BaseColumn + metric_class: Optional[Type] = None # link to derivative of BaseMetric owner_class = None # Used to do code highlighting when displaying the query in the UI - query_language = None + query_language: Optional[str] = None name = None # can be a Column or a property pointing to one @@ -341,7 +343,7 @@ def get_extra_cache_keys(self, query_obj: Dict) -> List[Any]: class BaseColumn(AuditMixinNullable, ImportMixin): """Interface for column""" - __tablename__ = None # {connector_name}_column + __tablename__: Optional[str] = None # {connector_name}_column id = Column(Integer, primary_key=True) column_name = Column(String(255), nullable=False) @@ -411,7 +413,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin): """Interface for Metrics""" - __tablename__ = None # {connector_name}_metric + __tablename__: Optional[str] = None # {connector_name}_metric id = Column(Integer, primary_key=True) metric_name = Column(String(255), nullable=False) diff --git a/superset/connectors/druid/__init__.py b/superset/connectors/druid/__init__.py index 813d9a29db2c..ad52fc6d8bcb 100644 --- a/superset/connectors/druid/__init__.py +++ b/superset/connectors/druid/__init__.py @@ -14,5 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from . import models # noqa -from . import views # noqa +from . import models, views diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index 804c216e5e0d..af68f1046af7 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -16,23 +16,44 @@ # under the License. # pylint: disable=C,R,W # pylint: disable=invalid-unary-operand-type +import json +import logging +import re from collections import OrderedDict from copy import deepcopy from datetime import datetime, timedelta from distutils.version import LooseVersion -import json -import logging from multiprocessing.pool import ThreadPool -import re from typing import Dict, Iterable, List, Optional, Set, Tuple, Union +import pandas as pd +import sqlalchemy as sa from dateutil.parser import parse as dparse from flask import escape, Markup from flask_appbuilder import Model from flask_appbuilder.models.decorators import renders from flask_appbuilder.security.sqla.models import User from flask_babel import lazy_gettext as _ -import pandas as pd +from sqlalchemy import ( + Boolean, + Column, + DateTime, + ForeignKey, + Integer, + String, + Table, + Text, + UniqueConstraint, +) +from sqlalchemy.orm import backref, relationship, RelationshipProperty, Session +from sqlalchemy_utils import EncryptedType + +from superset import conf, db, security_manager +from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric +from superset.exceptions import SupersetException +from superset.models.core import Database +from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult +from superset.utils import core as utils, import_datasource try: from pydruid.client import PyDruid @@ -55,39 +76,16 @@ import requests except ImportError: pass -import sqlalchemy as sa -from sqlalchemy import ( - Boolean, - Column, - DateTime, - ForeignKey, - Integer, - String, - Table, - Text, - UniqueConstraint, -) -from sqlalchemy.orm import backref, relationship, RelationshipProperty, Session -from sqlalchemy_utils import EncryptedType - -from superset import conf, db, security_manager -from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric -from superset.exceptions import SupersetException -from superset.models.core import Database -from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult -from superset.utils import core as utils, import_datasource try: from superset.utils.core import DimSelector, DTTM_ALIAS, flasher except ImportError: pass - DRUID_TZ = conf.get("DRUID_TZ") POST_AGG_TYPE = "postagg" metadata = Model.metadata # pylint: disable=no-member - try: # Postaggregator might not have been imported. class JavascriptPostAggregator(Postaggregator): @@ -111,7 +109,6 @@ def __init__(self, name, post_aggregator): except NameError: pass - # Function wrapper because bound methods cannot # be passed to processes def _fetch_metadata_for(datasource): @@ -137,14 +134,14 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin): broker_user = Column(String(255)) broker_pass = Column(EncryptedType(String(255), conf.get("SECRET_KEY"))) - export_fields = ( + export_fields = [ "cluster_name", "broker_host", "broker_port", "broker_endpoint", "cache_timeout", "broker_user", - ) + ] update_from_object_fields = export_fields export_children = ["datasources"] @@ -188,7 +185,7 @@ def get_druid_version(self) -> str: auth = requests.auth.HTTPBasicAuth(self.broker_user, self.broker_pass) return json.loads(requests.get(endpoint, auth=auth).text)["version"] - @property # noqa: T484 + @property # type: ignore @utils.memoized def druid_version(self) -> str: return self.get_druid_version() @@ -310,7 +307,7 @@ class DruidColumn(Model, BaseColumn): ) dimension_spec_json = Column(Text) - export_fields = ( + export_fields = [ "datasource_id", "column_name", "is_active", @@ -320,7 +317,7 @@ class DruidColumn(Model, BaseColumn): "description", "dimension_spec_json", "verbose_name", - ) + ] update_from_object_fields = export_fields export_parent = "datasource" @@ -332,9 +329,10 @@ def expression(self) -> str: return self.dimension_spec_json @property - def dimension_spec(self) -> Optional[Dict]: # noqa: T484 + def dimension_spec(self) -> Optional[Dict]: if self.dimension_spec_json: return json.loads(self.dimension_spec_json) + return None def get_metrics(self) -> Dict[str, "DruidMetric"]: metrics = { @@ -397,7 +395,7 @@ class DruidMetric(Model, BaseMetric): ) json = Column(Text, nullable=False) - export_fields = ( + export_fields = [ "metric_name", "verbose_name", "metric_type", @@ -406,7 +404,7 @@ class DruidMetric(Model, BaseMetric): "description", "d3format", "warning_text", - ) + ] update_from_object_fields = export_fields export_parent = "datasource" @@ -490,7 +488,7 @@ class DruidDatasource(Model, BaseDatasource): owner_class, secondary=druiddatasource_user, backref="druiddatasources" ) - export_fields = ( + export_fields = [ "datasource_name", "is_hidden", "description", @@ -500,7 +498,7 @@ class DruidDatasource(Model, BaseDatasource): "cache_timeout", "params", "filter_select_enabled", - ) + ] update_from_object_fields = export_fields export_parent = "cluster" @@ -519,7 +517,7 @@ def num_cols(self) -> List[str]: return [c.column_name for c in self.columns if c.is_num] @property - def name(self) -> str: + def name(self) -> str: # type: ignore return self.datasource_name @property @@ -825,7 +823,7 @@ def granularity( granularity["period"] = period_name else: granularity["type"] = "duration" - granularity["duration"] = ( + granularity["duration"] = ( # type: ignore utils.parse_human_timedelta(period_name).total_seconds() * 1000 ) return granularity @@ -928,7 +926,7 @@ def metrics_and_post_aggs( metrics: List[Union[Dict, str]], metrics_dict: Dict[str, DruidMetric], druid_version=None, - ) -> Tuple[OrderedDict, OrderedDict]: # noqa: T484 + ) -> Tuple[OrderedDict, OrderedDict]: # Separate metrics into those that are aggregations # and those that are post aggregations saved_agg_names = set() @@ -937,21 +935,21 @@ def metrics_and_post_aggs( for metric in metrics: if utils.is_adhoc_metric(metric): adhoc_agg_configs.append(metric) - elif metrics_dict[metric].metric_type != POST_AGG_TYPE: # noqa: T484 + elif metrics_dict[metric].metric_type != POST_AGG_TYPE: # type: ignore saved_agg_names.add(metric) else: postagg_names.append(metric) # Create the post aggregations, maintain order since postaggs # may depend on previous ones - post_aggs = OrderedDict() # noqa: T484 + post_aggs: "OrderedDict[str, Postaggregator]" = OrderedDict() visited_postaggs = set() for postagg_name in postagg_names: - postagg = metrics_dict[postagg_name] # noqa: T484 + postagg = metrics_dict[postagg_name] # type: ignore visited_postaggs.add(postagg_name) DruidDatasource.resolve_postagg( postagg, post_aggs, saved_agg_names, visited_postaggs, metrics_dict ) - aggs = DruidDatasource.get_aggregations( # noqa: T484 + aggs = DruidDatasource.get_aggregations( # type: ignore metrics_dict, saved_agg_names, adhoc_agg_configs ) return aggs, post_aggs @@ -1042,7 +1040,7 @@ def druid_type_from_adhoc_metric(adhoc_metric: Dict) -> str: @staticmethod def get_aggregations( - metrics_dict: Dict, saved_metrics: Iterable[str], adhoc_metrics: List[Dict] = [] + metrics_dict: Dict, saved_metrics: Set[str], adhoc_metrics: List[Dict] = [] ) -> OrderedDict: """ Returns a dictionary of aggregation metric names to aggregation json objects @@ -1130,14 +1128,14 @@ def sanitize_metric_object(metric: Dict) -> None: ): metric["column"]["type"] = "DOUBLE" - def run_query( # noqa / druid + def run_query( # druid self, groupby, metrics, granularity, from_dttm, to_dttm, - filter=None, # noqa + filter=None, is_timeseries=True, timeseries_limit=None, timeseries_limit_metric=None, @@ -1145,7 +1143,7 @@ def run_query( # noqa / druid inner_from_dttm=None, inner_to_dttm=None, orderby=None, - extras=None, # noqa + extras=None, columns=None, phase=2, client=None, @@ -1224,7 +1222,7 @@ def run_query( # noqa / druid del qry["dimensions"] client.timeseries(**qry) elif not having_filters and len(groupby) == 1 and order_desc: - dim = list(qry.get("dimensions"))[0] # noqa: T484 + dim = list(qry["dimensions"])[0] logging.info("Running two-phase topn query for dimension [{}]".format(dim)) pre_qry = deepcopy(qry) if timeseries_limit_metric: @@ -1443,7 +1441,7 @@ def _create_extraction_fn(dim_spec): return (col, extraction_fn) @classmethod - def get_filters(cls, raw_filters, num_cols, columns_dict) -> Filter: # noqa: T484 + def get_filters(cls, raw_filters, num_cols, columns_dict) -> Filter: """Given Superset filter data structure, returns pydruid Filter(s)""" filters = None for flt in raw_filters: @@ -1561,9 +1559,9 @@ def get_filters(cls, raw_filters, num_cols, columns_dict) -> Filter: # noqa: T4 alphaNumeric=is_numeric_col, ) elif op == "IS NULL": - cond = Dimension(col) == None # NOQA + cond = Dimension(col) is None elif op == "IS NOT NULL": - cond = Dimension(col) != None # NOQA + cond = Dimension(col) is not None if filters: filters = Filter(type="and", fields=[cond, filters]) diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py index 606d3380b8ac..0f2fcd42f604 100644 --- a/superset/connectors/druid/views.py +++ b/superset/connectors/druid/views.py @@ -15,20 +15,19 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=C,R,W -from datetime import datetime import json import logging +from datetime import datetime from flask import flash, Markup, redirect from flask_appbuilder import CompactCRUDMixin, expose from flask_appbuilder.fieldwidgets import Select2Widget from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_appbuilder.security.decorators import has_access -from flask_babel import gettext as __ -from flask_babel import lazy_gettext as _ +from flask_babel import gettext as __, lazy_gettext as _ from wtforms.ext.sqlalchemy.fields import QuerySelectField -from superset import appbuilder, db, security_manager +from superset import app, appbuilder, db, security_manager from superset.connectors.base.views import DatasourceModelView from superset.connectors.connector_registry import ConnectorRegistry from superset.utils import core as utils @@ -42,10 +41,11 @@ validate_json, YamlExportMixin, ) + from . import models -class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa +class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): datamodel = SQLAInterface(models.DruidColumn) list_title = _("Columns") @@ -131,10 +131,7 @@ def post_add(self, col): self.post_update(col) -appbuilder.add_view_no_menu(DruidColumnInlineView) - - -class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa +class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): datamodel = SQLAInterface(models.DruidMetric) list_title = _("Metrics") @@ -186,10 +183,7 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa edit_form_extra_fields = add_form_extra_fields -appbuilder.add_view_no_menu(DruidMetricInlineView) - - -class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa +class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin): datamodel = SQLAInterface(models.DruidCluster) list_title = _("Druid Clusters") @@ -239,6 +233,8 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin): # ), } + yaml_dict_key = "databases" + edit_form_extra_fields = { "cluster_name": QuerySelectField( "Cluster", @@ -257,20 +253,7 @@ def _delete(self, pk): DeleteMixin._delete(self, pk) -appbuilder.add_view( - DruidClusterModelView, - name="Druid Clusters", - label=__("Druid Clusters"), - icon="fa-cubes", - category="Sources", - category_label=__("Sources"), - category_icon="fa-database", -) - - -class DruidDatasourceModelView( - DatasourceModelView, DeleteMixin, YamlExportMixin -): # noqa +class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): datamodel = SQLAInterface(models.DruidDatasource) list_title = _("Druid Datasources") @@ -380,16 +363,6 @@ def _delete(self, pk): DeleteMixin._delete(self, pk) -appbuilder.add_view( - DruidDatasourceModelView, - "Druid Datasources", - label=__("Druid Datasources"), - category="Sources", - category_label=__("Sources"), - icon="fa-cube", -) - - class Druid(BaseSupersetView): """The base views for Superset!""" @@ -435,26 +408,49 @@ def scan_new_datasources(self): return self.refresh_datasources(refresh_all=False) -appbuilder.add_view_no_menu(Druid) - -appbuilder.add_link( - "Scan New Datasources", - label=__("Scan New Datasources"), - href="/druid/scan_new_datasources/", - category="Sources", - category_label=__("Sources"), - category_icon="fa-database", - icon="fa-refresh", -) -appbuilder.add_link( - "Refresh Druid Metadata", - label=__("Refresh Druid Metadata"), - href="/druid/refresh_datasources/", - category="Sources", - category_label=__("Sources"), - category_icon="fa-database", - icon="fa-cog", -) - - -appbuilder.add_separator("Sources") +if app.config["DRUID_IS_ACTIVE"]: + appbuilder.add_view( + DruidDatasourceModelView, + "Druid Datasources", + label=__("Druid Datasources"), + category="Sources", + category_label=__("Sources"), + icon="fa-cube", + ) + + appbuilder.add_view( + DruidClusterModelView, + name="Druid Clusters", + label=__("Druid Clusters"), + icon="fa-cubes", + category="Sources", + category_label=__("Sources"), + category_icon="fa-database", + ) + + appbuilder.add_view_no_menu(DruidMetricInlineView) + + appbuilder.add_view_no_menu(DruidColumnInlineView) + + appbuilder.add_view_no_menu(Druid) + + appbuilder.add_link( + "Scan New Datasources", + label=__("Scan New Datasources"), + href="/druid/scan_new_datasources/", + category="Sources", + category_label=__("Sources"), + category_icon="fa-database", + icon="fa-refresh", + ) + appbuilder.add_link( + "Refresh Druid Metadata", + label=__("Refresh Druid Metadata"), + href="/druid/refresh_datasources/", + category="Sources", + category_label=__("Sources"), + category_icon="fa-database", + icon="fa-cog", + ) + + appbuilder.add_separator("Sources") diff --git a/superset/connectors/sqla/__init__.py b/superset/connectors/sqla/__init__.py index 813d9a29db2c..ad52fc6d8bcb 100644 --- a/superset/connectors/sqla/__init__.py +++ b/superset/connectors/sqla/__init__.py @@ -14,5 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from . import models # noqa -from . import views # noqa +from . import models, views diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 054fa2b3def7..c1ab44e5111a 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -15,17 +15,18 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=C,R,W -from collections import OrderedDict -from datetime import datetime import logging import re -from typing import Any, Dict, List, NamedTuple, Optional, Union +from collections import OrderedDict +from datetime import datetime +from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union +import pandas as pd +import sqlalchemy as sa +import sqlparse from flask import escape, Markup from flask_appbuilder import Model from flask_babel import lazy_gettext as _ -import pandas as pd -import sqlalchemy as sa from sqlalchemy import ( and_, asc, @@ -47,7 +48,6 @@ from sqlalchemy.schema import UniqueConstraint from sqlalchemy.sql import column, ColumnElement, literal_column, table, text from sqlalchemy.sql.expression import Label, Select, TextAsFrom -import sqlparse from superset import app, db, security_manager from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric @@ -126,7 +126,7 @@ class TableColumn(Model, BaseColumn): expression = Column(Text) python_date_format = Column(String(255)) - export_fields = ( + export_fields = [ "table_id", "column_name", "verbose_name", @@ -138,7 +138,7 @@ class TableColumn(Model, BaseColumn): "expression", "description", "python_date_format", - ) + ] update_from_object_fields = [s for s in export_fields if s not in ("table_id",)] export_parent = "table" @@ -159,14 +159,29 @@ def datasource(self) -> RelationshipProperty: return self.table def get_time_filter( - self, start_dttm: DateTime, end_dttm: DateTime + self, + start_dttm: DateTime, + end_dttm: DateTime, + time_range_endpoints: Optional[ + Tuple[utils.TimeRangeEndpoint, utils.TimeRangeEndpoint] + ], ) -> ColumnElement: col = self.get_sqla_col(label="__time") - l = [] # noqa: E741 + l = [] if start_dttm: - l.append(col >= text(self.dttm_sql_literal(start_dttm))) + l.append( + col >= text(self.dttm_sql_literal(start_dttm, time_range_endpoints)) + ) if end_dttm: - l.append(col <= text(self.dttm_sql_literal(end_dttm))) + if ( + time_range_endpoints + and time_range_endpoints[1] == utils.TimeRangeEndpoint.EXCLUSIVE + ): + l.append( + col < text(self.dttm_sql_literal(end_dttm, time_range_endpoints)) + ) + else: + l.append(col <= text(self.dttm_sql_literal(end_dttm, None))) return and_(*l) def get_timestamp_expression( @@ -207,19 +222,47 @@ def lookup_obj(lookup_column): return import_datasource.import_simple_obj(db.session, i_column, lookup_obj) - def dttm_sql_literal(self, dttm: DateTime) -> str: + def dttm_sql_literal( + self, + dttm: DateTime, + time_range_endpoints: Optional[ + Tuple[utils.TimeRangeEndpoint, utils.TimeRangeEndpoint] + ], + ) -> str: """Convert datetime object to a SQL expression string""" + sql = ( + self.table.database.db_engine_spec.convert_dttm(self.type, dttm) + if self.type + else None + ) + + if sql: + return sql + tf = self.python_date_format + + # Fallback to the default format (if defined) only if the SIP-15 time range + # endpoints, i.e., [start, end) are enabled. + if not tf and time_range_endpoints == ( + utils.TimeRangeEndpoint.INCLUSIVE, + utils.TimeRangeEndpoint.EXCLUSIVE, + ): + tf = ( + self.table.database.get_extra() + .get("python_date_format_by_column_name", {}) + .get(self.column_name) + ) + if tf: - seconds_since_epoch = int(dttm.timestamp()) - if tf == "epoch_s": - return str(seconds_since_epoch) - elif tf == "epoch_ms": + if tf in ["epoch_ms", "epoch_s"]: + seconds_since_epoch = int(dttm.timestamp()) + if tf == "epoch_s": + return str(seconds_since_epoch) return str(seconds_since_epoch * 1000) - return "'{}'".format(dttm.strftime(tf)) - else: - s = self.table.database.db_engine_spec.convert_dttm(self.type or "", dttm) - return s or "'{}'".format(dttm.strftime("%Y-%m-%d %H:%M:%S.%f")) + return f"'{dttm.strftime(tf)}'" + + # TODO(john-bodley): SIP-15 will explicitly require a type conversion. + return f"""'{dttm.strftime("%Y-%m-%d %H:%M:%S.%f")}'""" class SqlMetric(Model, BaseMetric): @@ -236,7 +279,7 @@ class SqlMetric(Model, BaseMetric): ) expression = Column(Text, nullable=False) - export_fields = ( + export_fields = [ "metric_name", "verbose_name", "metric_type", @@ -245,7 +288,7 @@ class SqlMetric(Model, BaseMetric): "description", "d3format", "warning_text", - ) + ] update_from_object_fields = list( [s for s in export_fields if s not in ("table_id",)] ) @@ -323,7 +366,7 @@ class SqlaTable(Model, BaseDatasource): baselink = "tablemodelview" - export_fields = ( + export_fields = [ "table_name", "main_dttm_col", "description", @@ -337,7 +380,7 @@ class SqlaTable(Model, BaseDatasource): "template_params", "filter_select_enabled", "fetch_values_predicate", - ) + ] update_from_object_fields = [ f for f in export_fields if f not in ("table_name", "database_id") ] @@ -425,7 +468,7 @@ def get_perm(self) -> str: return ("[{obj.database}].[{obj.table_name}]" "(id:{obj.id})").format(obj=self) @property - def name(self) -> str: + def name(self) -> str: # type: ignore if not self.schema: return self.table_name return "{}.{}".format(self.schema, self.table_name) @@ -438,7 +481,7 @@ def full_name(self) -> str: @property def dttm_cols(self) -> List: - l = [c.column_name for c in self.columns if c.is_dttm] # noqa: E741 + l = [c.column_name for c in self.columns if c.is_dttm] if self.main_dttm_col and self.main_dttm_col not in l: l.append(self.main_dttm_col) return l @@ -542,7 +585,7 @@ def mutate_query_from_config(self, sql: str) -> str: """Apply config's SQL_QUERY_MUTATOR Typically adds comments to the query with context""" - SQL_QUERY_MUTATOR = config.get("SQL_QUERY_MUTATOR") + SQL_QUERY_MUTATOR = config["SQL_QUERY_MUTATOR"] if SQL_QUERY_MUTATOR: username = utils.get_username() sql = SQL_QUERY_MUTATOR(sql, username, security_manager, self.database) @@ -616,7 +659,7 @@ def get_sqla_query( # sqla granularity, from_dttm, to_dttm, - filter=None, # noqa + filter=None, is_timeseries=True, timeseries_limit=15, timeseries_limit_metric=None, @@ -703,6 +746,7 @@ def get_sqla_query( # sqla ) metrics_exprs = [] + time_range_endpoints = extras.get("time_range_endpoints") groupby_exprs_with_timestamp = OrderedDict(groupby_exprs_sans_timestamp.items()) if granularity: dttm_col = cols[granularity] @@ -714,16 +758,20 @@ def get_sqla_query( # sqla select_exprs += [timestamp] groupby_exprs_with_timestamp[timestamp.name] = timestamp - # Use main dttm column to support index with secondary dttm columns + # Use main dttm column to support index with secondary dttm columns. if ( db_engine_spec.time_secondary_columns and self.main_dttm_col in self.dttm_cols and self.main_dttm_col != dttm_col.column_name ): time_filters.append( - cols[self.main_dttm_col].get_time_filter(from_dttm, to_dttm) + cols[self.main_dttm_col].get_time_filter( + from_dttm, to_dttm, time_range_endpoints + ) ) - time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm)) + time_filters.append( + dttm_col.get_time_filter(from_dttm, to_dttm, time_range_endpoints) + ) select_exprs += metrics_exprs @@ -757,7 +805,7 @@ def get_sqla_query( # sqla if op in ("in", "not in"): cond = col_obj.get_sqla_col().in_(eq) if "" in eq: - cond = or_(cond, col_obj.get_sqla_col() == None) # noqa + cond = or_(cond, col_obj.get_sqla_col() == None) if op == "not in": cond = ~cond where_clause_and.append(cond) @@ -779,9 +827,9 @@ def get_sqla_query( # sqla elif op == "LIKE": where_clause_and.append(col_obj.get_sqla_col().like(eq)) elif op == "IS NULL": - where_clause_and.append(col_obj.get_sqla_col() == None) # noqa + where_clause_and.append(col_obj.get_sqla_col() == None) elif op == "IS NOT NULL": - where_clause_and.append(col_obj.get_sqla_col() != None) # noqa + where_clause_and.append(col_obj.get_sqla_col() != None) if extras: where = extras.get("where") if where: @@ -829,7 +877,9 @@ def get_sqla_query( # sqla inner_select_exprs += [inner_main_metric_expr] subq = select(inner_select_exprs).select_from(tbl) inner_time_filter = dttm_col.get_time_filter( - inner_from_dttm or from_dttm, inner_to_dttm or to_dttm + inner_from_dttm or from_dttm, + inner_to_dttm or to_dttm, + time_range_endpoints, ) subq = subq.where(and_(*(where_clause_and + [inner_time_filter]))) subq = subq.group_by(*inner_groupby_exprs) @@ -974,7 +1024,7 @@ def fetch_metadata(self) -> None: ).format(self.table_name) ) - M = SqlMetric # noqa + M = SqlMetric metrics = [] any_date_col = None db_engine_spec = self.database.db_engine_spec diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py index 3b242ec8ed4d..c508aa9a66f0 100644 --- a/superset/connectors/sqla/views.py +++ b/superset/connectors/sqla/views.py @@ -17,6 +17,7 @@ # pylint: disable=C,R,W """Views used by the SqlAlchemy connector""" import logging +import re from flask import flash, Markup, redirect from flask_appbuilder import CompactCRUDMixin, expose @@ -24,9 +25,9 @@ from flask_appbuilder.fieldwidgets import Select2Widget from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_appbuilder.security.decorators import has_access -from flask_babel import gettext as __ -from flask_babel import lazy_gettext as _ +from flask_babel import gettext as __, lazy_gettext as _ from wtforms.ext.sqlalchemy.fields import QuerySelectField +from wtforms.validators import Regexp from superset import appbuilder, db, security_manager from superset.connectors.base.views import DatasourceModelView @@ -39,12 +40,13 @@ SupersetModelView, YamlExportMixin, ) + from . import models logger = logging.getLogger(__name__) -class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa +class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): datamodel = SQLAInterface(models.TableColumn) list_title = _("Columns") @@ -99,12 +101,20 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa ), "python_date_format": utils.markdown( Markup( - "The pattern of timestamp format, use " + "The pattern of timestamp format. For strings use " '' - "python datetime string pattern " - "expression. If time is stored in epoch " - "format, put `epoch_s` or `epoch_ms`." + "python datetime string pattern expression which needs to " + 'adhere to the ' + "ISO 8601 standard to ensure that the lexicographical ordering " + "coincides with the chronological ordering. If the timestamp " + "format does not adhere to the ISO 8601 standard you will need to " + "define an expression and type for transforming the string into a " + "date or timestamp. Note currently time zones are not supported. " + "If time is stored in epoch format, put `epoch_s` or `epoch_ms`." + "If no pattern is specified we fall back to using the optional " + "defaults on a per database/column name level via the extra parameter." + "" ), True, ), @@ -121,6 +131,24 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa "python_date_format": _("Datetime Format"), "type": _("Type"), } + validators_columns = { + "python_date_format": [ + # Restrict viable values to epoch_s, epoch_ms, or a strftime format + # which adhere's to the ISO 8601 format (without time zone). + Regexp( + re.compile( + r""" + ^( + epoch_s|epoch_ms| + (?P%Y(-%m(-%d)?)?)([\sT](?P