Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(pylint): Bump Pylint to 2.9.6 #16146

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ disable=
super-with-arguments,
too-few-public-methods,
too-many-locals,
duplicate-code,

[REPORTS]

Expand Down
4 changes: 2 additions & 2 deletions requirements/testing.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# via -r requirements/base.in
appnope==0.1.2
# via ipython
astroid==2.5
astroid==2.6.6
# via pylint
backcall==0.2.0
# via ipython
Expand Down Expand Up @@ -71,7 +71,7 @@ pyhive[hive,presto]==0.6.4
# via
# -r requirements/development.in
# -r requirements/testing.in
pylint==2.6.0
pylint==2.9.6
# via -r requirements/testing.in
pytest==6.2.4
# via
Expand Down
20 changes: 14 additions & 6 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def import_directory(directory: str, overwrite: bool, force: bool) -> None:
help="Create the DB if it doesn't exist",
)
def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
"""Updates a database connection URI """
"""Updates a database connection URI"""
utils.get_or_create_db(database_name, uri, not skip_create)


Expand Down Expand Up @@ -341,7 +341,8 @@ def import_dashboards(path: str, username: Optional[str]) -> None:
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
contents = {path: open(path).read()}
with open(path) as file:
contents = {path: file.read()}
try:
ImportDashboardsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
Expand All @@ -366,7 +367,8 @@ def import_datasources(path: str) -> None:
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
contents = {path: open(path).read()}
with open(path) as file:
contents = {path: file.read()}
try:
ImportDatasetsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except
Expand Down Expand Up @@ -491,7 +493,10 @@ def import_dashboards(path: str, recursive: bool, username: str) -> None:
files.extend(path_object.rglob("*.json"))
if username is not None:
g.user = security_manager.find_user(username=username)
contents = {path.name: open(path).read() for path in files}
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDashboardsCommand(contents).run()
except Exception: # pylint: disable=broad-except
Expand Down Expand Up @@ -539,7 +544,10 @@ def import_datasources(path: str, sync: str, recursive: bool) -> None:
elif path_object.exists() and recursive:
files.extend(path_object.rglob("*.yaml"))
files.extend(path_object.rglob("*.yml"))
contents = {path.name: open(path).read() for path in files}
contents = {}
for path_ in files:
with open(path_) as file:
contents[path_.name] = file.read()
try:
ImportDatasetsCommand(contents, sync_columns, sync_metrics).run()
except Exception: # pylint: disable=broad-except
Expand Down Expand Up @@ -632,7 +640,7 @@ def flower(port: int, address: str) -> None:
print(Fore.BLUE + "-=" * 40)
print(Fore.YELLOW + cmd)
print(Fore.BLUE + "-=" * 40)
Popen(cmd, shell=True).wait()
Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with


@superset.command()
Expand Down
2 changes: 1 addition & 1 deletion superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
in your PYTHONPATH as there is a ``from superset_config import *``
at the end of this file.
"""
import imp
import imp # pylint: disable=deprecated-module
import importlib.util
import json
import logging
Expand Down
5 changes: 2 additions & 3 deletions superset/connectors/connector_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@


class ConnectorRegistry:
""" Central Registry for all available datasource engines"""
"""Central Registry for all available datasource engines"""

sources: Dict[str, Type["BaseDatasource"]] = {}

Expand Down Expand Up @@ -68,8 +68,7 @@ def get_datasource(
@classmethod
def get_all_datasources(cls, session: Session) -> List["BaseDatasource"]:
datasources: List["BaseDatasource"] = []
for source_type in ConnectorRegistry.sources:
source_class = ConnectorRegistry.sources[source_type]
for source_class in ConnectorRegistry.sources.values():
qry = session.query(source_class)
qry = source_class.default_query(qry)
datasources.extend(qry.all())
Expand Down
6 changes: 2 additions & 4 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,9 +404,7 @@ class SqlMetric(Model, BaseMetric):
"extra",
"warning_text",
]
update_from_object_fields = list(
[s for s in export_fields if s not in ("table_id",)]
)
update_from_object_fields = list(s for s in export_fields if s != "table_id")
export_parent = "table"

def get_sqla_col(self, label: Optional[str] = None) -> Column:
Expand Down Expand Up @@ -1151,7 +1149,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma
having_clause_and = []

for flt in filter: # type: ignore
if not all([flt.get(s) for s in ["col", "op"]]):
if not all(flt.get(s) for s in ["col", "op"]):
Comment on lines -1154 to +1152
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If pylint caught this this is a nice improvement 👍

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I know many people find Pylint somewhat verbose and cumbersome, but the linting rules and checks are far superior than other linters in my opinion.

continue
col = flt["col"]
val = flt.get("val")
Expand Down
2 changes: 1 addition & 1 deletion superset/dashboards/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def __repr__(self) -> str:
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
log_to_statsd=False,
log_to_statsd=False, # pylint: disable=arguments-renamed
)
def get(self, id_or_slug: str) -> Response:
"""Gets a dashboard
Expand Down
3 changes: 1 addition & 2 deletions superset/datasets/commands/importers/v1/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,7 @@ def import_dataset(
def load_data(
data_uri: str, dataset: SqlaTable, example_database: Database, session: Session
) -> None:

data = request.urlopen(data_uri)
data = request.urlopen(data_uri) # pylint: disable=consider-using-with
if data_uri.endswith(".gz"):
data = gzip.open(data)
df = pd.read_csv(data, encoding="utf-8")
Expand Down
7 changes: 2 additions & 5 deletions superset/examples/flights.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,11 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
airports = pd.read_csv(airports_bytes, encoding="latin-1")
airports = airports.set_index("IATA_CODE")

pdf["ds"] = (
pdf["ds"] = ( # pylint: disable=unsupported-assignment-operation
pdf.YEAR.map(str) + "-0" + pdf.MONTH.map(str) + "-0" + pdf.DAY.map(str)
)
pdf.ds = pd.to_datetime(pdf.ds)
del pdf["YEAR"]
del pdf["MONTH"]
del pdf["DAY"]

pdf.drop(columns=["DAY", "MONTH", "YEAR"])
pdf = pdf.join(airports, on="ORIGIN_AIRPORT", rsuffix="_ORIG")
pdf = pdf.join(airports, on="DESTINATION_AIRPORT", rsuffix="_DEST")
pdf.to_sql(
Expand Down
4 changes: 3 additions & 1 deletion superset/examples/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def get_slice_json(defaults: Dict[Any, Any], **kwargs: Any) -> str:
def get_example_data(
filepath: str, is_gzip: bool = True, make_bytes: bool = False
) -> BytesIO:
content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read()
content = request.urlopen( # pylint: disable=consider-using-with
f"{BASE_URL}{filepath}?raw=true"
).read()
if is_gzip:
content = zlib.decompress(content, zlib.MAX_WBITS | 16)
if make_bytes:
Expand Down
4 changes: 2 additions & 2 deletions superset/jinja_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,10 +527,10 @@ class HiveTemplateProcessor(PrestoTemplateProcessor):
@memoized
def get_template_processors() -> Dict[str, Any]:
processors = current_app.config.get("CUSTOM_TEMPLATE_PROCESSORS", {})
for engine in DEFAULT_PROCESSORS:
for engine, processor in DEFAULT_PROCESSORS.items():
# do not overwrite engine-specific CUSTOM_TEMPLATE_PROCESSORS
if not engine in processors:
processors[engine] = DEFAULT_PROCESSORS[engine]
processors[engine] = processor

return processors

Expand Down
6 changes: 4 additions & 2 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ def get_all_view_names_in_database(
return self.db_engine_spec.get_all_datasource_names(self, "view")

@cache_util.memoized_func(
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list", # type: ignore
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:table_list",
cache=cache_manager.data_cache,
)
def get_all_table_names_in_schema(
Expand Down Expand Up @@ -536,9 +536,10 @@ def get_all_table_names_in_schema(
]
except Exception as ex: # pylint: disable=broad-except
logger.warning(ex)
return []
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Kind of surprised Mypy didn't find this.


@cache_util.memoized_func(
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list", # type: ignore
key=lambda self, schema, *args, **kwargs: f"db:{self.id}:schema:{schema}:view_list",
cache=cache_manager.data_cache,
)
def get_all_view_names_in_schema(
Expand Down Expand Up @@ -566,6 +567,7 @@ def get_all_view_names_in_schema(
return [utils.DatasourceName(table=view, schema=schema) for view in views]
except Exception as ex: # pylint: disable=broad-except
logger.warning(ex)
return []

@cache_util.memoized_func(
key=lambda self, *args, **kwargs: f"db:{self.id}:schema_list",
Expand Down
3 changes: 3 additions & 0 deletions superset/models/schedules.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,20 @@


class ScheduleType(str, enum.Enum):
# pylint: disable=invalid-name
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I opted not the change these enums (and others) to uppercase as the string representation of the lowercase name is used in the database and I wanted to minimize the size of the PR.

slice = "slice"
dashboard = "dashboard"
alert = "alert"


class EmailDeliveryType(str, enum.Enum):
# pylint: disable=invalid-name
attachment = "Attachment"
inline = "Inline"


class SliceEmailReportFormat(str, enum.Enum):
# pylint: disable=invalid-name
visualization = "Visualization"
data = "Raw data"

Expand Down
2 changes: 2 additions & 0 deletions superset/models/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ class TagTypes(enum.Enum):
can find all their objects by querying for the tag `owner:alice`.
"""

# pylint: disable=invalid-name
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I opted not the change these enums to uppercase as the string representation of the lowercase name is used throughout and I wanted to minimize the size of the PR.

# explicit tags, added manually by the owner
custom = 1

Expand All @@ -59,6 +60,7 @@ class ObjectTypes(enum.Enum):

"""Object types."""

# pylint: disable=invalid-name
query = 1
chart = 2
dashboard = 3
Expand Down
10 changes: 5 additions & 5 deletions superset/tasks/alerts/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@


class AlertValidatorType(str, enum.Enum):
not_null = "not null"
operator = "operator"
NOT_NULL = "not null"
OPERATOR = "operator"

@classmethod
def valid_type(cls, validator_type: str) -> bool:
Expand All @@ -44,7 +44,7 @@ def check_validator(validator_type: str, config: str) -> None:

config_dict = json.loads(config)

if validator_type == AlertValidatorType.operator.value:
if validator_type == AlertValidatorType.OPERATOR.value:

if not (config_dict.get("op") and config_dict.get("threshold") is not None):
raise SupersetException(
Expand Down Expand Up @@ -102,8 +102,8 @@ def get_validator_function(
"""Returns a validation function based on validator_type"""

alert_validators = {
AlertValidatorType.not_null.value: not_null_validator,
AlertValidatorType.operator.value: operator_validator,
AlertValidatorType.NOT_NULL.value: not_null_validator,
AlertValidatorType.OPERATOR.value: operator_validator,
}
if alert_validators.get(validator_type.lower()):
return alert_validators[validator_type.lower()]
Expand Down
2 changes: 1 addition & 1 deletion superset/tasks/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def cache_warmup(
for url in strategy.get_urls():
try:
logger.info("Fetching %s", url)
request.urlopen(url)
request.urlopen(url) # pylint: disable=consider-using-with
results["success"].append(url)
except URLError:
logger.exception("Error warming up cache!")
Expand Down
4 changes: 2 additions & 2 deletions superset/tasks/schedules.py
Original file line number Diff line number Diff line change
Expand Up @@ -827,7 +827,7 @@ def get_scheduler_action(report_type: str) -> Optional[Callable[..., Any]]:

@celery_app.task(name="email_reports.schedule_hourly")
def schedule_hourly() -> None:
""" Celery beat job meant to be invoked hourly """
"""Celery beat job meant to be invoked hourly"""
if not config["ENABLE_SCHEDULED_EMAIL_REPORTS"]:
logger.info("Scheduled email reports not enabled in config")
return
Expand All @@ -845,7 +845,7 @@ def schedule_hourly() -> None:

@celery_app.task(name="alerts.schedule_check")
def schedule_alerts() -> None:
""" Celery beat job meant to be invoked every minute to check alerts """
"""Celery beat job meant to be invoked every minute to check alerts"""
resolution = 0
now = datetime.utcnow()
start_at = now - timedelta(
Expand Down
9 changes: 4 additions & 5 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1321,8 +1321,8 @@ def get_first_metric_name(metrics: Sequence[Metric]) -> Optional[str]:
def ensure_path_exists(path: str) -> None:
try:
os.makedirs(path)
except OSError as exc:
if not (os.path.isdir(path) and exc.errno == errno.EEXIST):
except OSError as ex:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This fixes a potential bug as exc is defined globally.

if not (os.path.isdir(path) and ex.errno == errno.EEXIST):
raise


Expand Down Expand Up @@ -1440,9 +1440,8 @@ def create_ssl_cert_file(certificate: str) -> str:
if not os.path.exists(path):
# Validate certificate prior to persisting to temporary directory
parse_ssl_cert(certificate)
cert_file = open(path, "w")
cert_file.write(certificate)
cert_file.close()
with open(path, "w") as cert_file:
cert_file.write(certificate)
return path


Expand Down
2 changes: 1 addition & 1 deletion superset/utils/memoized.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(
def __call__(self, *args: Any, **kwargs: Any) -> Any:
key = [args, frozenset(kwargs.items())]
if self.is_method:
key.append(tuple([getattr(args[0], v, None) for v in self.watch]))
key.append(tuple(getattr(args[0], v, None) for v in self.watch))
key = tuple(key) # type: ignore
try:
if key in self.cache:
Expand Down
2 changes: 1 addition & 1 deletion superset/utils/pandas_postprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -593,7 +593,7 @@ def geohash_encode(
)
return _append_columns(df, encode_df, {"geohash": geohash})
except ValueError:
QueryObjectValidationError(_("Invalid longitude/latitude"))
raise QueryObjectValidationError(_("Invalid longitude/latitude"))
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixes a potential bug as the exception was never raised.



def geodetic_parse(
Expand Down
2 changes: 1 addition & 1 deletion superset/views/database/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def form_post(self, form: ExcelToDatabaseForm) -> Response:
flash(message, "danger")
return redirect("/exceltodatabaseview/form")

uploaded_tmp_file_path = tempfile.NamedTemporaryFile(
uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with
dir=app.config["UPLOAD_FOLDER"],
suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(),
delete=False,
Expand Down
4 changes: 2 additions & 2 deletions superset/views/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ def get_dashboard_extra_filters(
dashboard is None
or not dashboard.json_metadata
or not dashboard.slices
or not any([slc for slc in dashboard.slices if slc.id == slice_id])
or not any(slc for slc in dashboard.slices if slc.id == slice_id)
):
return []

Expand Down Expand Up @@ -455,7 +455,7 @@ def is_slice_in_container(


def is_owner(obj: Union[Dashboard, Slice], user: User) -> bool:
""" Check if user is owner of the slice """
"""Check if user is owner of the slice"""
return obj and user in obj.owners


Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/alerts_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def setup_database():
def create_alert(
db_session: Session,
sql: str,
validator_type: AlertValidatorType = AlertValidatorType.operator,
validator_type: AlertValidatorType = AlertValidatorType.OPERATOR,
validator_config: str = "",
) -> Alert:
db_session.commit()
Expand Down