Skip to content

Commit

Permalink
feat: mypy type annotations (PROJQUAY-740) (#455)
Browse files Browse the repository at this point in the history
* Add dev dependencies mypy and typing

* Add makefile target `types-test`, not yet included in `test` target.

* Generate stubs for imported modules to avoid mypy complaining about missing types.

* Remove generated stubs as there are way too many and they cause tons of mess in the repo. Switched to ignoring untyped modules for now, to concentrate on Quay-only type checking.

* mypy config changed to ignore missing imports

* ignore property decorator as it is not supported by mypy

* mypy annotations for many configuration variables

* re-generate mypy_stubs directory as its necessary in some classes for base classes to prevent mypy errors

* util/registry/queuefile referred to non existent definition of Empty class in multiprocessing.queues

* ignore type checking for things like monkey patching and exported/re-imported objects that 
mypy does not allow.

* Adjust mypy config to warn us about unreachable return paths and useless expressions.

* Add the __annotations__ property to INTERNAL_ONLY_PROPERTIES so that it is not part of the config schema testing

* Remove redundant dependencies `typing` and `typing-extensions` which are NOOP after Python 3.5

* Remove mypy-extensions which only provides a TypedDict implementation but has not been updated since 2019.

* updated mypy to 0.910 which requires all types packages to be installed manually.

* exclude local-dev from type checking until core team can suggest an outcome for __init__.py duplicate packages

* re-add typing dependency which will be needed until Python 3.9

* ignore .mypy_cache

* add mypy stub for features module to replace inline definitions

* import annotations eager evaluation in billing.py as it was required to reference a class declared later in the module.

* remove the type definition of V1ProtocolSteps/V2ProtocolSteps to make tox happy
  • Loading branch information
mosen authored Oct 25, 2021
1 parent 162b79e commit fca67e7
Show file tree
Hide file tree
Showing 28 changed files with 2,113 additions and 76 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ Dockerfile-e
.pytest_cache/*
test/dockerclients/Vagrantfile
test/dockerclients/.*
.mypy_cache

# files generated by local dev,
# do not need to check in and can be deleted
Expand Down
3 changes: 3 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ full-db-test: ensure-test-db
clients-test:
cd test/clients; python clients_test.py

types-test:
mypy .

test: unit-test registry-test registry-test-old certs-test

ensure-test-db:
Expand Down
7 changes: 4 additions & 3 deletions auth/permissions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
from typing import DefaultDict, Optional

from collections import namedtuple, defaultdict
from functools import partial
Expand Down Expand Up @@ -36,7 +37,7 @@
"member": None,
}

SCOPE_MAX_REPO_ROLES = defaultdict(lambda: None)
SCOPE_MAX_REPO_ROLES: DefaultDict[scopes.Scope, Optional[str]] = defaultdict(lambda: None)
SCOPE_MAX_REPO_ROLES.update(
{
scopes.READ_REPO: "read",
Expand All @@ -46,7 +47,7 @@
}
)

SCOPE_MAX_TEAM_ROLES = defaultdict(lambda: None)
SCOPE_MAX_TEAM_ROLES: DefaultDict[scopes.Scope, Optional[str]] = defaultdict(lambda: None)
SCOPE_MAX_TEAM_ROLES.update(
{
scopes.CREATE_REPO: "creator",
Expand All @@ -55,7 +56,7 @@
}
)

SCOPE_MAX_USER_ROLES = defaultdict(lambda: None)
SCOPE_MAX_USER_ROLES: DefaultDict[scopes.Scope, Optional[str]] = defaultdict(lambda: None)
SCOPE_MAX_USER_ROLES.update(
{
scopes.READ_USER: "read",
Expand Down
5 changes: 3 additions & 2 deletions conf/init/supervisord_conf_create.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from typing import Union, List
import os
import os.path
import sys
Expand All @@ -9,8 +10,8 @@

QUAY_LOGGING = os.getenv("QUAY_LOGGING", "stdout") # or "syslog"

QUAY_SERVICES = os.getenv("QUAY_SERVICES", [])
QUAY_OVERRIDE_SERVICES = os.getenv("QUAY_OVERRIDE_SERVICES", [])
QUAY_SERVICES: Union[List, str] = os.getenv("QUAY_SERVICES", [])
QUAY_OVERRIDE_SERVICES: Union[List, str] = os.getenv("QUAY_OVERRIDE_SERVICES", [])


def registry_services():
Expand Down
77 changes: 39 additions & 38 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from typing import Optional, Dict, Any, List, Union, Tuple
from uuid import uuid4

import os.path
Expand Down Expand Up @@ -186,21 +187,21 @@ class DefaultConfig(ImmutableConfig):
REGISTRY_TITLE = "Project Quay"
REGISTRY_TITLE_SHORT = "Project Quay"

CONTACT_INFO = []
CONTACT_INFO: List[str] = []

# Mail config
MAIL_SERVER = ""
MAIL_USE_TLS = True
MAIL_PORT = 587
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_USERNAME: Optional[str] = None
MAIL_PASSWORD: Optional[str] = None
MAIL_DEFAULT_SENDER = "[email protected]"
MAIL_FAIL_SILENTLY = False
TESTING = True

# DB config
DB_URI = "sqlite:///test/data/test.db"
DB_CONNECTION_ARGS = {
DB_CONNECTION_ARGS: Optional[Dict[str, Any]] = {
"threadlocals": True,
"autorollback": True,
}
Expand Down Expand Up @@ -229,7 +230,7 @@ def create_transaction(db):

# Build logs
BUILDLOGS_REDIS = {"host": "localhost"}
BUILDLOGS_OPTIONS = []
BUILDLOGS_OPTIONS: List[Any] = []

# Real-time user events
USER_EVENTS_REDIS = {"host": "localhost"}
Expand All @@ -250,17 +251,17 @@ def create_transaction(db):
SENTRY_PUBLIC_DSN = None

# Github Config
GITHUB_LOGIN_CONFIG = None
GITHUB_LOGIN_CONFIG: Optional[Dict[str, Any]] = None
GITHUB_TRIGGER_CONFIG = None

# Google Config.
GOOGLE_LOGIN_CONFIG = None
GOOGLE_LOGIN_CONFIG: Optional[Dict[str, str]] = None

# Bitbucket Config.
BITBUCKET_TRIGGER_CONFIG = None
BITBUCKET_TRIGGER_CONFIG: Optional[Dict[str, str]] = None

# Gitlab Config.
GITLAB_TRIGGER_CONFIG = None
GITLAB_TRIGGER_CONFIG: Optional[Dict[str, str]] = None

NOTIFICATION_QUEUE_NAME = "notification"
DOCKERFILE_BUILD_QUEUE_NAME = "dockerfilebuild"
Expand All @@ -272,7 +273,7 @@ def create_transaction(db):
SECSCAN_V4_NOTIFICATION_QUEUE_NAME = "secscanv4"

# Super user config. Note: This MUST BE an empty list for the default config.
SUPER_USERS = []
SUPER_USERS: List[str] = []

# Feature Flag: Whether sessions are permanent.
FEATURE_PERMANENT_SESSIONS = True
Expand Down Expand Up @@ -325,7 +326,7 @@ def create_transaction(db):
FEATURE_REQUIRE_ENCRYPTED_BASIC_AUTH = False

# Feature Flag: Whether to automatically replicate between storage engines.
FEATURE_STORAGE_REPLICATION = False
FEATURE_STORAGE_REPLICATION = False # Optional[bool]

# Feature Flag: Whether users can directly login to the UI.
FEATURE_DIRECT_LOGIN = True
Expand All @@ -339,7 +340,7 @@ def create_transaction(db):

# Feature Flag: Whether to restrict V1 pushes to the whitelist.
FEATURE_RESTRICTED_V1_PUSH = False
V1_PUSH_WHITELIST = []
V1_PUSH_WHITELIST: Optional[List[str]] = []

# Feature Flag: Whether or not to rotate old action logs to storage.
FEATURE_ACTION_LOG_ROTATION = False
Expand Down Expand Up @@ -391,16 +392,16 @@ def create_transaction(db):
# user's repository list pages, regardless of whether that user is a member of the namespace.
# Typically, this is used by an enterprise customer in configuring a set of "well-known"
# namespaces.
PUBLIC_NAMESPACES = []
PUBLIC_NAMESPACES: List[str] = []

# The namespace to use for library repositories.
# Note: This must remain 'library' until Docker removes their hard-coded namespace for libraries.
# See: https://github.com/docker/docker/blob/master/registry/session.go#L320
LIBRARY_NAMESPACE = "library"

BUILD_MANAGER = ("enterprise", {})
BUILD_MANAGER: Tuple[str, Dict[Any, Any]] = ("enterprise", {})

DISTRIBUTED_STORAGE_CONFIG = {
DISTRIBUTED_STORAGE_CONFIG: Optional[Dict[str, List[Any]]] = {
"local_eu": ["LocalStorage", {"storage_path": "test/data/registry/eu"}],
"local_us": ["LocalStorage", {"storage_path": "test/data/registry/us"}],
}
Expand All @@ -409,7 +410,7 @@ def create_transaction(db):
DISTRIBUTED_STORAGE_DEFAULT_LOCATIONS = ["local_us"]

# Health checker.
HEALTH_CHECKER = ("LocalHealthCheck", {})
HEALTH_CHECKER: Tuple[str, Dict[Any, Any]] = ("LocalHealthCheck", {})

# Userfiles
USERFILES_LOCATION = "local_us"
Expand All @@ -420,8 +421,8 @@ def create_transaction(db):
LOG_ARCHIVE_PATH = "logarchive/"

# Action logs archive
ACTION_LOG_ARCHIVE_LOCATION = "local_us"
ACTION_LOG_ARCHIVE_PATH = "actionlogarchive/"
ACTION_LOG_ARCHIVE_LOCATION: Optional[str] = "local_us"
ACTION_LOG_ARCHIVE_PATH: Optional[str] = "actionlogarchive/"
ACTION_LOG_ROTATION_THRESHOLD = "30d"

# Allow registry pulls when unable to write to the audit log
Expand All @@ -446,13 +447,13 @@ def create_transaction(db):

# Custom branding
if os.environ.get("RED_HAT_QUAY", False):
BRANDING = {
BRANDING: Dict[str, Optional[str]] = {
"logo": "/static/img/RH_Logo_Quay_Black_UX-horizontal.svg",
"footer_img": "/static/img/RedHat.svg",
"footer_url": "https://access.redhat.com/documentation/en-us/red_hat_quay/3/",
}
else:
BRANDING = {
BRANDING: Dict[str, Optional[str]] = {
"logo": "/static/img/quay-horizontal-color.svg",
"footer_img": None,
"footer_url": None,
Expand All @@ -472,7 +473,7 @@ def create_transaction(db):
SECURITY_SCANNER_ENDPOINT = None

# The endpoint for the V4 security scanner.
SECURITY_SCANNER_V4_ENDPOINT = None
SECURITY_SCANNER_V4_ENDPOINT: Optional[str] = None

# The number of seconds between indexing intervals in the security scanner
SECURITY_SCANNER_INDEXING_INTERVAL = 30
Expand All @@ -484,7 +485,7 @@ def create_transaction(db):
SECURITY_SCANNER_ENDPOINT_BATCH = None

# If specified, GET requests that return non-200 will be retried at the following instances.
SECURITY_SCANNER_READONLY_FAILOVER_ENDPOINTS = []
SECURITY_SCANNER_READONLY_FAILOVER_ENDPOINTS: Optional[List[str]] = []

# The indexing engine version running inside the security scanner.
SECURITY_SCANNER_ENGINE_VERSION_TARGET = 3
Expand Down Expand Up @@ -518,7 +519,7 @@ def create_transaction(db):
REPO_MIRROR_TLS_VERIFY = True

# Replaces the SERVER_HOSTNAME as the destination for mirroring.
REPO_MIRROR_SERVER_HOSTNAME = None
REPO_MIRROR_SERVER_HOSTNAME: Optional[str] = None

# JWTProxy Settings
# The address (sans schema) to proxy outgoing requests through the jwtproxy
Expand Down Expand Up @@ -564,16 +565,16 @@ def create_transaction(db):
INSTANCE_SERVICE_KEY_REFRESH = 55

# The whitelist of client IDs for OAuth applications that allow for direct login.
DIRECT_OAUTH_CLIENTID_WHITELIST = []
DIRECT_OAUTH_CLIENTID_WHITELIST: Optional[List[str]] = []

# URL that specifies the location of the prometheus pushgateway.
PROMETHEUS_PUSHGATEWAY_URL = "http://localhost:9091"
PROMETHEUS_PUSHGATEWAY_URL: Optional[str] = "http://localhost:9091"

# Namespace prefix for all prometheus metrics.
PROMETHEUS_NAMESPACE = "quay"

# Overridable list of reverse DNS prefixes that are reserved for internal use on labels.
LABEL_KEY_RESERVED_PREFIXES = []
LABEL_KEY_RESERVED_PREFIXES: Optional[List[str]] = []

# Delays workers from starting until a random point in time between 0 and their regular interval.
STAGGER_WORKERS = True
Expand All @@ -583,8 +584,8 @@ def create_transaction(db):

# Site key and secret key for using recaptcha.
FEATURE_RECAPTCHA = False
RECAPTCHA_SITE_KEY = None
RECAPTCHA_SECRET_KEY = None
RECAPTCHA_SITE_KEY: Optional[str] = None
RECAPTCHA_SECRET_KEY: Optional[str] = None

# Server where TUF metadata can be found
TUF_SERVER = None
Expand Down Expand Up @@ -637,18 +638,18 @@ def create_transaction(db):
BROWSER_API_CALLS_XHR_ONLY = True

# If set to a non-None integer value, the default number of maximum builds for a namespace.
DEFAULT_NAMESPACE_MAXIMUM_BUILD_COUNT = None
DEFAULT_NAMESPACE_MAXIMUM_BUILD_COUNT: Optional[int] = None

# If set to a non-None integer value, the default number of maximum builds for a namespace whose
# creator IP is deemed a threat.
THREAT_NAMESPACE_MAXIMUM_BUILD_COUNT = None
THREAT_NAMESPACE_MAXIMUM_BUILD_COUNT: Optional[int] = None

# The API Key to use when requesting IP information.
IP_DATA_API_KEY = None

# For Billing Support Only: The number of allowed builds on a namespace that has been billed
# successfully.
BILLED_NAMESPACE_MAXIMUM_BUILD_COUNT = None
BILLED_NAMESPACE_MAXIMUM_BUILD_COUNT: Optional[int] = None

# Configuration for the data model cache.
DATA_MODEL_CACHE_CONFIG = {
Expand Down Expand Up @@ -697,13 +698,13 @@ def create_transaction(db):

# Log model
LOGS_MODEL = "database"
LOGS_MODEL_CONFIG = {}
LOGS_MODEL_CONFIG: Dict[str, Any] = {}

# Namespace in which all audit logging is disabled.
DISABLED_FOR_AUDIT_LOGS = []
DISABLED_FOR_AUDIT_LOGS: List[str] = []

# Namespace in which pull audit logging is disabled.
DISABLED_FOR_PULL_LOGS = []
DISABLED_FOR_PULL_LOGS: List[str] = []

# Feature Flag: Whether pull logs are disabled for free namespace.
FEATURE_DISABLE_PULL_LOGS_FOR_FREE_NAMESPACES = False
Expand All @@ -715,7 +716,7 @@ def create_transaction(db):
# The list of domains, including subdomains, for which any *new* User with a matching
# email address will be denied creation. This option is only used if
# FEATURE_BLACKLISTED_EMAILS is enabled.
BLACKLISTED_EMAIL_DOMAINS = []
BLACKLISTED_EMAIL_DOMAINS: List[str] = []

# Feature Flag: Whether garbage collection is enabled.
FEATURE_GARBAGE_COLLECTION = True
Expand All @@ -735,10 +736,10 @@ def create_transaction(db):
APP_REGISTRY_RESULTS_LIMIT = 100

# The whitelist of namespaces whose app registry package list is cached for 1 hour.
APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST = []
APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST: Optional[List[str]] = []

# The whitelist of namespaces whose app registry show package is cached for 1 hour.
APP_REGISTRY_SHOW_PACKAGE_CACHE_WHITELIST = []
APP_REGISTRY_SHOW_PACKAGE_CACHE_WHITELIST: Optional[List[str]] = []

# The maximum size of uploaded CNR layers.
MAXIMUM_CNR_LAYER_SIZE = "2m"
Expand All @@ -764,7 +765,7 @@ def create_transaction(db):

# The set of hostnames disallowed from webhooks, beyond localhost (which will
# not work due to running inside a container).
WEBHOOK_HOSTNAME_BLACKLIST = []
WEBHOOK_HOSTNAME_BLACKLIST: Optional[List[str]] = []

# The root URL for documentation.
if os.environ.get("RED_HAT_QUAY", False):
Expand Down
4 changes: 3 additions & 1 deletion data/billing.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations
from typing import Dict
import stripe

from datetime import datetime, timedelta
Expand Down Expand Up @@ -383,7 +385,7 @@ class Customer(AttrDict):
}
)

ACTIVE_CUSTOMERS = {}
ACTIVE_CUSTOMERS: Dict[str, FakeStripe] = {}

@property
def card(self):
Expand Down
2 changes: 1 addition & 1 deletion data/cache/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def deserialize_json(key, value, flags):
if flags == _JSON_TYPE:
return json.loads(value)

raise Exception("Unknown flags for value: {1}".format(flags))
raise Exception("Unknown flags for value: {}".format(flags))

return PooledClient(
server=self.endpoint,
Expand Down
4 changes: 2 additions & 2 deletions data/cache/test/test_cache.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest

from typing import Dict, Any
from unittest.mock import patch, MagicMock
from rediscluster.nodemanager import NodeManager

Expand All @@ -17,7 +17,7 @@
)


DATA = {}
DATA: Dict[str, Any] = {}

TEST_CACHE_CONFIG = {
"repository_blob_cache_ttl": "240s",
Expand Down
2 changes: 1 addition & 1 deletion data/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from enum import IntEnum, Enum, unique
from peewee import *
from peewee import __exception_wrapper__, Function
from peewee import __exception_wrapper__, Function # type: ignore
from playhouse.pool import (
PooledDatabase,
PooledMySQLDatabase,
Expand Down
Loading

0 comments on commit fca67e7

Please sign in to comment.