Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/superset into chor…
Browse files Browse the repository at this point in the history
…e/native-filters-disable-save
  • Loading branch information
geido committed Oct 21, 2021
2 parents acab402 + 4c708af commit f584fa6
Show file tree
Hide file tree
Showing 18 changed files with 99 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/superset-frontend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
if: steps.check.outcome == 'failure'
uses: actions/setup-node@v2
with:
node-version: '16'
node-version: "16"
- name: Install dependencies
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
Expand Down
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,8 @@ repos:
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.2.1 # Use the sha or tag you want to point at
hooks:
- id: prettier
files: 'superset-frontend'
2 changes: 1 addition & 1 deletion superset-frontend/.storybook/storybook.css
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
body{
body {
background: transparent;
}
4 changes: 1 addition & 3 deletions superset-frontend/cypress-base/cypress.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@
"numTestsKeptInMemory": 0,
"experimentalFetchPolyfill": true,
"requestTimeout": 10000,
"ignoreTestFiles": [
"**/!(*.test.js|*.test.ts)"
],
"ignoreTestFiles": ["**/!(*.test.js|*.test.ts)"],
"video": false,
"videoUploadOnPasses": false,
"viewportWidth": 1280,
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/cypress-base/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"lib": ["es2019", "DOM"],
"types": ["cypress"],
"allowJs": true,
"noEmit": true,
"noEmit": true
},
"files": ["cypress/support/index.d.ts"],
"include": ["node_modules/cypress", "cypress/**/*.ts"]
Expand Down
6 changes: 3 additions & 3 deletions superset-frontend/src/common/components/.eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
{
"paths": [
{
"name": "@superset-ui/core",
"importNames": ["supersetTheme"],
"message": "Please use the theme directly from the ThemeProvider rather than importing supersetTheme."
"name": "@superset-ui/core",
"importNames": ["supersetTheme"],
"message": "Please use the theme directly from the ThemeProvider rather than importing supersetTheme."
}
]
}
Expand Down
4 changes: 1 addition & 3 deletions superset-frontend/src/components/.eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
"paths": [
{
"name": "@superset-ui/core",
"importNames": [
"supersetTheme"
],
"importNames": ["supersetTheme"],
"message": "Please use the theme directly from the ThemeProvider rather than importing supersetTheme."
}
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -409,8 +409,8 @@ class DatasourceEditor extends React.PureComponent {
datasource: {
...props.datasource,
owners: props.datasource.owners.map(owner => ({
value: owner.id,
label: `${owner.first_name} ${owner.last_name}`,
value: owner.value || owner.id,
label: owner.label || `${owner.first_name} ${owner.last_name}`,
})),
metrics: props.datasource.metrics?.map(metric => {
const {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,14 +119,17 @@ const DatasourceModal: FunctionComponent<DatasourceModalProps> = ({
),
type: currentDatasource.type || currentDatasource.datasource_type,
owners: currentDatasource.owners.map(
(o: { label: string; value: number }) => o.value,
(o: Record<string, number>) => o.value || o.id,
),
},
},
})
.then(({ json }) => {
addSuccessToast(t('The dataset has been saved'));
onDatasourceSave(json);
onDatasourceSave({
...json,
owners: currentDatasource.owners,
});
onHide();
})
.catch(response => {
Expand Down
2 changes: 1 addition & 1 deletion superset-frontend/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"forceConsistentCasingInFileNames": true,
"importHelpers": false,
"jsx": "preserve",
"lib": ["dom", "dom.iterable", "esnext"],
"lib": ["dom", "dom.iterable", "esnext"],
"module": "esnext",
"moduleResolution": "node",
"noImplicitAny": true,
Expand Down
13 changes: 2 additions & 11 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,24 +30,15 @@
import sys
from collections import OrderedDict
from datetime import date, timedelta
from typing import (
Any,
Callable,
Dict,
List,
Literal,
Optional,
Type,
TYPE_CHECKING,
Union,
)
from typing import Any, Callable, Dict, List, Optional, Type, TYPE_CHECKING, Union

from cachelib.base import BaseCache
from celery.schedules import crontab
from dateutil import tz
from flask import Blueprint
from flask_appbuilder.security.manager import AUTH_DB
from pandas.io.parsers import STR_NA_VALUES
from typing_extensions import Literal
from werkzeug.local import LocalProxy

from superset.jinja_context import BaseTemplateProcessor
Expand Down
4 changes: 3 additions & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,9 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult:
"""
raise NotImplementedError()

def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
def values_for_column(
self, column_name: str, limit: int = 10000, contain_null: bool = True,
) -> List[Any]:
"""Given a column, returns an iterable of distinct values
This is used to populate the dropdown showing a list of
Expand Down
4 changes: 3 additions & 1 deletion superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -948,7 +948,9 @@ def metrics_and_post_aggs(
)
return aggs, post_aggs

def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
def values_for_column(
self, column_name: str, limit: int = 10000, contain_null: bool = True,
) -> List[Any]:
"""Retrieve some values for the given column"""
logger.info(
"Getting values for columns [{}] limited to [{}]".format(column_name, limit)
Expand Down
11 changes: 9 additions & 2 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,9 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult:
def get_query_str(self, query_obj: QueryObjectDict) -> str:
raise NotImplementedError()

def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
def values_for_column(
self, column_name: str, limit: int = 10000, contain_null: bool = True,
) -> List[Any]:
raise NotImplementedError()


Expand Down Expand Up @@ -712,7 +714,9 @@ def get_fetch_values_predicate(self) -> TextClause:
)
) from ex

def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
def values_for_column(
self, column_name: str, limit: int = 10000, contain_null: bool = True,
) -> List[Any]:
"""Runs query against sqla to retrieve some
sample values for the given column.
"""
Expand All @@ -728,6 +732,9 @@ def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
if limit:
qry = qry.limit(limit)

if not contain_null:
qry = qry.where(target_col.get_sqla_col().isnot(None))

if self.fetch_values_predicate:
qry = qry.where(self.get_fetch_values_predicate())

Expand Down
20 changes: 14 additions & 6 deletions superset/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetErrorException, SupersetErrorsException
from superset.extensions import celery_app
from superset.models.core import Database
from superset.models.sql_lab import Query
from superset.result_set import SupersetResultSet
from superset.sql_parse import CtasMethod, ParsedQuery
Expand Down Expand Up @@ -221,12 +222,7 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-locals
):
if SQL_MAX_ROW and (not query.limit or query.limit > SQL_MAX_ROW):
query.limit = SQL_MAX_ROW
if query.limit:
# We are fetching one more than the requested limit in order
# to test whether there are more rows than the limit.
# Later, the extra row will be dropped before sending
# the results back to the user.
sql = database.apply_limit_to_sql(sql, increased_limit, force=True)
sql = apply_limit_if_exists(database, increased_limit, query, sql)

# Hook to allow environment-specific mutation (usually comments) to the SQL
sql = SQL_QUERY_MUTATOR(sql, user_name, security_manager, database)
Expand Down Expand Up @@ -291,6 +287,18 @@ def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-locals
return SupersetResultSet(data, cursor_description, db_engine_spec)


def apply_limit_if_exists(
database: Database, increased_limit: Optional[int], query: Query, sql: str
) -> str:
if query.limit and increased_limit:
# We are fetching one more than the requested limit in order
# to test whether there are more rows than the limit.
# Later, the extra row will be dropped before sending
# the results back to the user.
sql = database.apply_limit_to_sql(sql, increased_limit, force=True)
return sql


def _serialize_payload(
payload: Dict[Any, Any], use_msgpack: Optional[bool] = False
) -> Union[bytes, str]:
Expand Down
4 changes: 3 additions & 1 deletion superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -921,7 +921,9 @@ def filter( # pylint: disable=no-self-use
datasource.raise_for_access()
row_limit = apply_max_row_limit(config["FILTER_SELECT_ROW_LIMIT"])
payload = json.dumps(
datasource.values_for_column(column, row_limit),
datasource.values_for_column(
column_name=column, limit=row_limit, contain_null=False,
),
default=utils.json_int_dttm_ser,
ignore_nan=True,
)
Expand Down
20 changes: 20 additions & 0 deletions tests/integration_tests/sqla_models_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,3 +463,23 @@ def test_labels_expected_on_mutated_query(self):
db.session.delete(table)
db.session.delete(database)
db.session.commit()

def test_values_for_column(self):
table = SqlaTable(
table_name="test_null_in_column",
sql="SELECT 'foo' as foo UNION SELECT 'bar' UNION SELECT NULL",
database=get_example_database(),
)
TableColumn(column_name="foo", type="VARCHAR(255)", table=table)

with_null = table.values_for_column(
column_name="foo", limit=10000, contain_null=True
)
assert None in with_null
assert len(with_null) == 3

without_null = table.values_for_column(
column_name="foo", limit=10000, contain_null=False
)
assert None not in without_null
assert len(without_null) == 2
24 changes: 24 additions & 0 deletions tests/integration_tests/sqllab_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
execute_sql_statement,
get_sql_results,
SqlLabException,
apply_limit_if_exists,
)
from superset.sql_parse import CtasMethod
from superset.utils.core import (
Expand Down Expand Up @@ -990,6 +991,29 @@ def test_sql_json_soft_timeout(self):
]
}

def test_apply_limit_if_exists_when_incremented_limit_is_none(self):
sql = """
SET @value = 42;
SELECT @value AS foo;
"""
database = get_example_database()
mock_query = mock.MagicMock()
mock_query.limit = 300
final_sql = apply_limit_if_exists(database, None, mock_query, sql)

assert final_sql == sql

def test_apply_limit_if_exists_when_increased_limit(self):
sql = """
SET @value = 42;
SELECT @value AS foo;
"""
database = get_example_database()
mock_query = mock.MagicMock()
mock_query.limit = 300
final_sql = apply_limit_if_exists(database, 1000, mock_query, sql)
assert "LIMIT 1000" in final_sql


@pytest.mark.parametrize("spec", [HiveEngineSpec, PrestoEngineSpec])
def test_cancel_query_implicit(spec: BaseEngineSpec) -> None:
Expand Down

0 comments on commit f584fa6

Please sign in to comment.