From 4f0b90c8508f96a02ff69d708eb0ddc86691e48f Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 20 Jan 2025 15:39:17 +0100 Subject: [PATCH 01/17] Add tox generation script, but don't use it yet --- scripts/generate-test-files.sh | 17 + scripts/populate_tox/README.md | 136 ++++ scripts/populate_tox/config.py | 385 ++++++++++++ scripts/populate_tox/populate_tox.py | 502 +++++++++++++++ scripts/populate_tox/tox.jinja | 899 +++++++++++++++++++++++++++ tox.ini | 17 + 6 files changed, 1956 insertions(+) create mode 100755 scripts/generate-test-files.sh create mode 100644 scripts/populate_tox/README.md create mode 100644 scripts/populate_tox/config.py create mode 100644 scripts/populate_tox/populate_tox.py create mode 100644 scripts/populate_tox/tox.jinja diff --git a/scripts/generate-test-files.sh b/scripts/generate-test-files.sh new file mode 100755 index 0000000000..b7420b28e9 --- /dev/null +++ b/scripts/generate-test-files.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +# This script generates tox.ini and CI YAML files in one go. + +set -xe + +cd "$(dirname "$0")" + +python -m venv .venv +. .venv/bin/activate + +pip install -e .. +pip install -r populate_tox/requirements.txt +pip install -r split_tox_gh_actions/requirements.txt + +python populate_tox/populate_tox.py +python split_tox_gh_actions/split_tox_gh_actions.py diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md new file mode 100644 index 0000000000..f279dd939e --- /dev/null +++ b/scripts/populate_tox/README.md @@ -0,0 +1,136 @@ +# Populate Tox + +We integrate with a number of frameworks and libraries and have a test suite for +each. The tests run against different versions of the framework/library to make +sure we support everything we claim to. + +This `populate_tox.py` script is responsible for picking reasonable versions to +test automatically and generating parts of `tox.ini` to capture this. + +## How it works + +There is a template in this directory called `tox.jinja` which contains a +combination of hardcoded and generated entries. + +The `populate_tox.py` script fills out the auto-generated part of that template. +It does this by querying PYPI for each framework's package and its metadata and +then determining which versions make sense to test to get good coverage. + +The lowest supported and latest version of a framework are always tested, with +a number of releases in between: +- If the package has majors, we pick the highest version of each major. For the + latest major, we also pick the lowest version in that major. +- If the package doesn't have multiple majors, we pick two versions in between + lowest and highest. + +#### Caveats + +- Make sure the integration name is the same everywhere. If it consists of + multiple words, use an underscore instead of a hyphen. + +## Defining constraints + +The `TEST_SUITE_CONFIG` dictionary defines, for each integration test suite, +the main package (framework, library) to test with; any additional test +dependencies, optionally gated behind specific conditions; and optionally +the Python versions to test on. + +The format is: + +``` +integration_name: { + "package": name_of_main_package_on_pypi, + "deps": { + rule1: [package1, package2, ...], + rule2: [package3, package4, ...], + }, + "python": python_version_specifier, +} +``` + +The following can be set as a rule: + - `*`: packages will be always installed + - a version specifier on the main package (e.g. `<=0.32`): packages will only + be installed if the main package falls into the version bounds specified + - specific Python version(s) in the form `py3.8,py3.9`: packages will only be + installed if the Python version matches one from the list + +Rules can be used to specify version bounds on older versions of the main +package's dependencies, for example. If e.g. Flask tests generally need +Werkzeug and don't care about its version, but Flask older than 3.0 needs +a specific Werkzeug version to work, you can say: + +``` +"flask": { + "deps": { + "*": ["Werkzeug"], + "<3.0": ["Werkzeug<2.1.0"], + } +} +``` + +Sometimes, things depend on the Python version installed. If the integration +test should only run on specific Python version, e.g. if you want AIOHTTP +tests to only run on Python 3.7+, you can say: + +``` +"aiohttp": { + ... + "python": ">=3.7", +} +``` + +If, on the other hand, you need to install a specific version of a secondary +dependency on specific Python versions (so the test suite should still run on +said Python versions, just with different dependency-of-a-dependency bounds), +you can say: + +``` +"celery": { + ... + "deps": { + "*": ["newrelic", "redis"], + "py3.7": ["importlib-metadata<5.0"], + }, +}, +``` + +## How-Tos + +### Add a new test suite + +1. Add the minimum supported version of the framework/library to `_MIN_VERSIONS` + in `integrations/__init__.py`. This should be the lowest version of the + framework that we can guarantee works with the SDK. If you've just added the + integration, it's fine to set this to the latest version of the framework + at the time. +2. Add the integration and any constraints to `TEST_SUITE_CONFIG`. See the + "Defining constraints" section for the format (or copy-paste one + of the existing entries). +3. Add the integration to one of the groups in the `GROUPS` dictionary in + `scripts/split_tox_gh_actions/split_tox_gh_actions.py`. +4. Add the `TESTPATH` for the test suite in `tox.jinja`'s `setenv` section. +5. Run `scripts/generate-test-files.sh` and commit the changes. + +### Migrate a test suite to populate_tox.py + +A handful of integration test suites are still hardcoded. The goal is to migrate +them all to `populate_tox.py` over time. + +1. Remove the integration from the `IGNORE` list in `populate_tox.py`. +2. Remove the hardcoded entries for the integration from the `envlist` and `deps` sections of `tox.jinja`. +2. Run `scripts/generate-test-files.sh`. +3. Run the test suite, either locally or by creating a PR. +4. Address any test failures that happen. + +You might have to introduce additional version bounds on the dependencies of the +package. Try to determine the source of the failure and address it. + +Common scenarios: +- An old version of the tested package installs a dependency without defining + an upper version bound on it. A new version of the dependency is installed that + is incompatible with the package. In this case you need to determine which + versions of the dependency don't contain the breaking change and restrict this + in `TEST_SUITE_CONFIG`. +- Tests are failing on an old Python version. In this case first double-check + whether we were even testing them on that version in the original `tox.ini`. diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py new file mode 100644 index 0000000000..788c7eedac --- /dev/null +++ b/scripts/populate_tox/config.py @@ -0,0 +1,385 @@ +# The TEST_SUITE_CONFIG dictionary defines, for each integration test suite, +# the main package (framework, library) to test with; any additional test +# dependencies, optionally gated behind specific conditions; and optionally +# the Python versions to test on. +# +# See scripts/populate_tox/README.md for more info on the format and examples. + +TEST_SUITE_CONFIG = { + "aiohttp": { + "package": "aiohttp", + "deps": {"*": ["pytest-aiohttp", "pytest-asyncio"]}, + "python": ">=3.7", + }, + "anthropic": { + "package": "anthropic", + "deps": { + "*": ["pytest-asyncio"], + "<=0.32": ["httpx<0.28.0"], + }, + "python": ">=3.7", + }, + "ariadne": { + "package": "ariadne", + "deps": { + "*": ["fastapi", "flask", "httpx"], + }, + "python": ">=3.8", + }, + "arq": { + "package": "arq", + "deps": { + "*": ["fakeredis>=2.2.0,<2.8", "pytest-asyncio", "async-timeout"], + "<=0.25": ["pydantic<2"], + }, + "python": ">=3.7", + }, + "asyncpg": { + "package": "asyncpg", + "deps": { + "*": ["pytest-asyncio"], + }, + "python": ">=3.7", + }, + "beam": { + "package": "apache-beam", + "deps": { + "*": [], + }, + "python": ">=3.7", + }, + "boto3": { + "package": "boto3", + "deps": { + "*": [], + }, + }, + "bottle": { + "package": "bottle", + "deps": { + "*": ["werkzeug<2.1.0"], + }, + }, + "celery": { + "package": "celery", + "deps": { + "*": ["newrelic", "redis"], + "py3.7": ["importlib-metadata<5.0"], + }, + }, + "chalice": { + "package": "chalice", + "deps": { + "*": ["pytest-chalice==0.0.5"], + }, + }, + "clickhouse_driver": { + "package": "clickhouse-driver", + "deps": { + "*": [], + }, + }, + "cohere": { + "package": "cohere", + "deps": { + "*": ["httpx"], + }, + }, + "django": { + "package": "django", + "deps": { + "*": [ + "psycopg2-binary", + "werkzeug", + ], + ">=2.0,<3.0": ["six"], + "<=3.2": [ + "werkzeug<2.1.0", + "djangorestframework>=3.0.0,<4.0.0", + "pytest-django", + ], + ">=2.0": ["channels[daphne]"], + "<=3.0": ["pytest-django<4.0"], + ">=4.0": ["djangorestframework", "pytest-asyncio"], + }, + }, + "dramatiq": { + "package": "dramatiq", + "deps": {}, + }, + "falcon": { + "package": "falcon", + "deps": {}, + "python": "<3.13", + }, + "fastapi": { + "package": "fastapi", + "deps": { + "*": [ + "httpx", + "anyio<4.0.0", + "python-multipart", + "pytest-asyncio", + "requests", + ] + }, + "python": ">=3.7", + }, + "flask": { + "package": "flask", + "deps": { + "*": ["flask-login", "werkzeug"], + "<2.0": ["werkzeug<2.1.0", "markupsafe<2.1.0"], + }, + }, + "gql": { + "package": "gql[all]", + "deps": {}, + }, + "graphene": { + "package": "graphene", + "deps": { + "*": ["blinker", "fastapi", "flask", "httpx"], + "py3.6": ["aiocontextvars"], + }, + }, + "grpc": { + "package": "grpcio", + "deps": { + "*": ["protobuf", "mypy-protobuf", "types-protobuf", "pytest-asyncio"], + }, + "python": ">=3.7", + }, + "httpx": { + "package": "httpx", + "deps": { + "*": ["anyio<4.0.0", "pytest-httpx"], + "==0.16": ["pytest-httpx==0.10.0"], + "==0.18": ["pytest-httpx==0.12.0"], + "==0.20": ["pytest-httpx==0.14.0"], + "==0.22": ["pytest-httpx==0.19.0"], + "==0.23": ["pytest-httpx==0.21.0"], + "==0.24": ["pytest-httpx==0.22.0"], + "==0.25": ["pytest-httpx==0.25.0"], + }, + }, + "huey": { + "package": "huey", + "deps": { + "*": [], + }, + }, + "huggingface_hub": { + "package": "huggingface_hub", + "deps": {"*": []}, + }, + "langchain": { + "package": "langchain", + "deps": { + "*": ["openai", "tiktoken", "httpx"], + ">=0.3": ["langchain-community"], + }, + }, + "langchain_notiktoken": { + "package": "langchain", + "deps": { + "*": ["openai", "httpx"], + ">=0.3": ["langchain-community"], + }, + }, + "litestar": { + "package": "litestar", + "deps": { + "*": ["pytest-asyncio", "python-multipart", "requests", "cryptography"], + "<=2.6": ["httpx<0.28"], + }, + }, + "loguru": { + "package": "loguru", + "deps": { + "*": [], + }, + }, + # XXX + # openai-latest: tiktoken~=0.6.0 + "openai": { + "package": "openai", + "deps": { + "*": ["pytest-asyncio", "tiktoken", "httpx"], + "<=1.22": ["httpx<0.28.0"], + }, + }, + "openai_notiktoken": { + "package": "openai", + "deps": { + "*": ["pytest-asyncio", "httpx"], + "<=1.22": ["httpx<0.28.0"], + }, + }, + "openfeature": { + "package": "openfeature-sdk", + "deps": { + "*": [], + }, + }, + "launchdarkly": { + "package": "launchdarkly-server-sdk", + "deps": { + "*": [], + }, + }, + "opentelemetry": { + "package": "opentelemetry-distro", + "deps": { + "*": [], + }, + }, + "pure_eval": { + "package": "pure_eval", + "deps": { + "*": [], + }, + }, + "pymongo": { + "package": "pymongo", + "deps": { + "*": ["mockupdb"], + }, + }, + "pyramid": { + "package": "pyramid", + "deps": { + "*": ["werkzeug<2.1.0"], + }, + }, + "quart": { + "package": "quart", + "deps": { + "*": [ + "quart-auth", + "pytest-asyncio", + "werkzeug", + ], + "<=0.19": [ + "blinker<1.6", + "jinja2<3.1.0", + "Werkzeug<2.1.0", + "hypercorn<0.15.0", + ], + "py3.8": ["taskgroup==0.0.0a4"], + }, + }, + "ray": { + "package": "ray", + "deps": {}, + }, + "redis": { + "package": "redis", + "deps": { + "*": ["fakeredis!=1.7.4", "pytest<8.0.0", "pytest-asyncio"], + "py3.6,py3.7": [ + "fakeredis!=2.26.0" + ], # https://github.com/cunla/fakeredis-py/issues/341 + }, + }, + "redis_py_cluster_legacy": { + "package": "redis-py-cluster", + "deps": {}, + }, + "requests": { + "package": "requests", + "deps": {}, + }, + "rq": { + "package": "rq", + "deps": { + "*": ["fakeredis"], + "<0.13": [ + "fakeredis<1.0", + "redis<3.2.2", + ], # https://github.com/jamesls/fakeredis/issues/245 + ">=0.13,<=1.10": ["fakeredis>=1.0,<1.7.4"], + "py3.6,py3.7": [ + "fakeredis!=2.26.0" + ], # https://github.com/cunla/fakeredis-py/issues/341 + }, + }, + "sanic": { + "package": "sanic", + "deps": { + "*": ["websockets<11.0", "aiohttp", "sanic_testing"], + ">=22.0": ["sanic_testing"], + "py3.6": ["aiocontextvars==0.2.1"], + }, + }, + "spark": { + "package": "pyspark", + "deps": {}, + "python": ">=3.8", + }, + "starlette": { + "package": "starlette", + "deps": { + "*": [ + "pytest-asyncio", + "python-multipart", + "requests", + "anyio<4.0.0", + "jinja2", + "httpx", + ], + "<=0.36": ["httpx<0.28.0"], + "<0.15": ["jinja2<3.1"], + "py3.6": ["aiocontextvars"], + }, + }, + "starlite": { + "package": "starlite", + "deps": { + "*": [ + "pytest-asyncio", + "python-multipart", + "requests", + "cryptography", + "pydantic<2.0.0", + "httpx<0.28", + ], + }, + "python": "<=3.11", + }, + "sqlalchemy": { + "package": "sqlalchemy", + "deps": {}, + }, + "strawberry": { + "package": "strawberry-graphql[fastapi,flask]", + "deps": { + "*": ["fastapi", "flask", "httpx"], + }, + }, + "tornado": { + "package": "tornado", + "deps": { + "*": ["pytest"], + "<=6.4.1": [ + "pytest<8.2" + ], # https://github.com/tornadoweb/tornado/pull/3382 + "py3.6": ["aiocontextvars"], + }, + }, + "trytond": { + "package": "trytond", + "deps": { + "*": ["werkzeug"], + "<=5.0": ["werkzeug<1.0"], + }, + }, + "typer": { + "package": "typer", + "deps": {}, + }, + "unleash": { + "package": "UnleashClient", + "deps": {}, + }, +} diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py new file mode 100644 index 0000000000..7337a4c9f8 --- /dev/null +++ b/scripts/populate_tox/populate_tox.py @@ -0,0 +1,502 @@ +""" +This script populates tox.ini automatically using release data from PYPI. +""" + +import functools +import hashlib +import os +import sys +import time +from bisect import bisect_left +from collections import defaultdict +from datetime import datetime, timedelta +from packaging.specifiers import SpecifierSet +from packaging.version import Version +from pathlib import Path +from typing import Optional, Union + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +import requests +from jinja2 import Environment, FileSystemLoader +from sentry_sdk.integrations import _MIN_VERSIONS + +from config import TEST_SUITE_CONFIG +from split_tox_gh_actions.split_tox_gh_actions import GROUPS + + +# Only consider package versions going back this far +CUTOFF = datetime.now() - timedelta(days=365 * 5) + +TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini" +ENV = Environment( + loader=FileSystemLoader(Path(__file__).resolve().parent), + trim_blocks=True, + lstrip_blocks=True, +) + +PYPI_PROJECT_URL = "https://pypi.python.org/pypi/{project}/json" +PYPI_VERSION_URL = "https://pypi.python.org/pypi/{project}/{version}/json" +CLASSIFIER_PREFIX = "Programming Language :: Python :: " + + +IGNORE = { + # Do not try auto-generating the tox entries for these. They will be + # hardcoded in tox.ini. + # + # This set should be getting smaller over time as we migrate more test + # suites over to this script. Some entries will probably stay forever + # as they don't fit the mold (e.g. common, asgi, which don't have a 3rd party + # pypi package to install in different versions). + "common", + "gevent", + "opentelemetry", + "potel", + "aiohttp", + "anthropic", + "ariadne", + "arq", + "asgi", + "asyncpg", + "aws_lambda", + "beam", + "boto3", + "bottle", + "celery", + "chalice", + "clickhouse_driver", + "cohere", + "cloud_resource_context", + "cohere", + "django", + "dramatiq", + "falcon", + "fastapi", + "flask", + "gcp", + "gql", + "graphene", + "grpc", + "httpx", + "huey", + "huggingface_hub", + "langchain", + "langchain_notiktoken", + "launchdarkly", + "litestar", + "loguru", + "openai", + "openai_notiktoken", + "openfeature", + "pure_eval", + "pymongo", + "pyramid", + "quart", + "ray", + "redis", + "redis_py_cluster_legacy", + "requests", + "rq", + "sanic", + "spark", + "starlette", + "starlite", + "sqlalchemy", + "strawberry", + "tornado", + "trytond", + "typer", + "unleash", +} + + +@functools.cache +def fetch_package(package: str) -> dict: + """Fetch package metadata from PYPI.""" + url = PYPI_PROJECT_URL.format(project=package) + pypi_data = requests.get(url) + + if pypi_data.status_code != 200: + print(f"{package} not found") + + return pypi_data.json() + + +@functools.cache +def fetch_release(package: str, version: Version) -> dict: + url = PYPI_VERSION_URL.format(project=package, version=version) + pypi_data = requests.get(url) + + if pypi_data.status_code != 200: + print(f"{package} not found") + + return pypi_data.json() + + +def _prefilter_releases(integration: str, releases: dict[str, dict]) -> list[Version]: + """Drop versions that are unsupported without making additional API calls.""" + min_supported = _MIN_VERSIONS.get(integration) + if min_supported: + min_supported = Version(".".join(map(str, min_supported))) + else: + print( + f" {integration} doesn't have a minimum version defined in sentry_sdk/integrations/__init__.py. Consider defining one" + ) + + filtered_releases = [] + + for release, metadata in releases.items(): + if not metadata: + continue + + meta = metadata[0] + if datetime.fromisoformat(meta["upload_time"]) < CUTOFF: + continue + + if meta["yanked"]: + continue + + version = Version(release) + + if min_supported and version < min_supported: + continue + + if version.is_prerelease or version.is_postrelease: + # TODO: consider the newest prerelease unless obsolete + continue + + for i, saved_version in enumerate(filtered_releases): + if ( + version.major == saved_version.major + and version.minor == saved_version.minor + and version.micro > saved_version.micro + ): + # Don't save all patch versions of a release, just the newest one + filtered_releases[i] = version + break + else: + filtered_releases.append(version) + + return sorted(filtered_releases) + + +def get_supported_releases(integration: str, pypi_data: dict) -> list[Version]: + """ + Get a list of releases that are currently supported by the SDK. + + This takes into account a handful of parameters (Python support, the lowest + version we've defined for the framework, the date of the release). + """ + package = pypi_data["info"]["name"] + + # Get a consolidated list without taking into account Python support yet + # (because that might require an additional API call for some + # of the releases) + releases = _prefilter_releases(integration, pypi_data["releases"]) + + # Determine Python support + expected_python_versions = TEST_SUITE_CONFIG[integration].get("python") + if expected_python_versions: + expected_python_versions = SpecifierSet(expected_python_versions) + else: + expected_python_versions = SpecifierSet(f">={MIN_PYTHON_VERSION}") + + def _supports_lowest(release: Version) -> bool: + time.sleep(0.1) # don't DoS PYPI + py_versions = determine_python_versions(fetch_release(package, release)) + target_python_versions = TEST_SUITE_CONFIG[integration].get("python") + if target_python_versions: + target_python_versions = SpecifierSet(target_python_versions) + return bool(supported_python_versions(py_versions, target_python_versions)) + + if not _supports_lowest(releases[0]): + i = bisect_left(releases, True, key=_supports_lowest) + if i != len(releases) and _supports_lowest(releases[i]): + # we found the lowest version that supports at least some Python + # version(s) that we do, cut off the rest + releases = releases[i:] + + return releases + + +def pick_releases_to_test(releases: list[Version]) -> list[Version]: + """Pick a handful of releases from a list of supported releases.""" + # If the package has majors (or major-like releases, even if they don't do + # semver), we want to make sure we're testing them all. If not, we just pick + # the oldest, the newest, and a couple in between. + has_majors = len(set([v.major for v in releases])) > 1 + filtered_releases = set() + + if has_majors: + # Always check the very first supported release + filtered_releases.add(releases[0]) + + # Find out the min and max release by each major + releases_by_major = {} + for release in releases: + if release.major not in releases_by_major: + releases_by_major[release.major] = [release, release] + if release < releases_by_major[release.major][0]: + releases_by_major[release.major][0] = release + if release > releases_by_major[release.major][1]: + releases_by_major[release.major][1] = release + + for i, (min_version, max_version) in enumerate(releases_by_major.values()): + filtered_releases.add(max_version) + if i == len(releases_by_major) - 1: + # If this is the latest major release, also check the lowest + # version of this version + filtered_releases.add(min_version) + + else: + indexes = [ + 0, # oldest version supported + len(releases) // 3, + len(releases) // 3 * 2, + -1, # latest + ] + + for i in indexes: + try: + filtered_releases.add(releases[i]) + except IndexError: + pass + + return sorted(filtered_releases) + + +def supported_python_versions( + package_python_versions: Union[SpecifierSet, list[Version]], + custom_supported_versions: Optional[SpecifierSet] = None, +) -> list[Version]: + """Get an intersection of python_versions and Python versions supported in the SDK.""" + supported = [] + + curr = MIN_PYTHON_VERSION + while curr <= MAX_PYTHON_VERSION: + if curr in package_python_versions: + if not custom_supported_versions or curr in custom_supported_versions: + supported.append(curr) + + next = [int(v) for v in str(curr).split(".")] + next[1] += 1 + curr = Version(".".join(map(str, next))) + + return supported + + +def pick_python_versions_to_test(python_versions: list[Version]) -> list[Version]: + filtered_python_versions = { + python_versions[0], + } + + filtered_python_versions.add(python_versions[-1]) + try: + filtered_python_versions.add(python_versions[-2]) + except IndexError: + pass + + return sorted(filtered_python_versions) + + +def determine_python_versions(pypi_data: dict) -> Union[SpecifierSet, list[Version]]: + try: + classifiers = pypi_data["info"]["classifiers"] + except (AttributeError, KeyError): + # This function assumes `pypi_data` contains classifiers. This is the case + # for the most recent release in the /{project} endpoint or for any release + # fetched via the /{project}/{version} endpoint. + return [] + + python_versions = [] + for classifier in classifiers: + if classifier.startswith(CLASSIFIER_PREFIX): + python_version = classifier[len(CLASSIFIER_PREFIX) :] + if "." in python_version: + # We don't care about stuff like + # Programming Language :: Python :: 3 :: Only, + # Programming Language :: Python :: 3, + # etc., we're only interested in specific versions, like 3.13 + python_versions.append(Version(python_version)) + + if python_versions: + python_versions.sort() + return python_versions + + # We only use `requires_python` if there are no classifiers. This is because + # `requires_python` doesn't tell us anything about the upper bound, which + # depends on when the release first came out + try: + requires_python = pypi_data["info"]["requires_python"] + except (AttributeError, KeyError): + pass + + if requires_python: + return SpecifierSet(requires_python) + + return [] + + +def _render_python_versions(python_versions: list[Version]) -> str: + return ( + "{" + + ",".join(f"py{version.major}.{version.minor}" for version in python_versions) + + "}" + ) + + +def _render_dependencies(integration: str, releases: list[Version]) -> list[str]: + rendered = [] + for constraint, deps in TEST_SUITE_CONFIG[integration]["deps"].items(): + if constraint == "*": + for dep in deps: + rendered.append(f"{integration}: {dep}") + elif constraint.startswith("py3"): + for dep in deps: + rendered.append(f"{constraint}-{integration}: {dep}") + else: + restriction = SpecifierSet(constraint) + for release in releases: + if release in restriction: + for dep in deps: + rendered.append(f"{integration}-v{release}: {dep}") + + return rendered + + +def write_tox_file(packages: dict) -> None: + template = ENV.get_template("tox.jinja") + + context = {"groups": {}} + for group, integrations in packages.items(): + context["groups"][group] = [] + for integration in integrations: + context["groups"][group].append( + { + "name": integration["name"], + "package": integration["package"], + "extra": integration["extra"], + "releases": integration["releases"], + "dependencies": _render_dependencies( + integration["name"], integration["releases"] + ), + } + ) + + rendered = template.render(context) + + with open(TOX_FILE, "w") as file: + file.write(rendered) + file.write("\n") + + +def _get_tox_hash(): + hasher = hashlib.md5() + with open(TOX_FILE, "rb") as f: + buf = f.read() + hasher.update(buf) + + return hasher.hexdigest() + + +def main(fail_on_changes: bool = False) -> None: + print("Finding out the lowest and highest Python version supported by the SDK...") + global MIN_PYTHON_VERSION, MAX_PYTHON_VERSION + sdk_python_versions = determine_python_versions(fetch_package("sentry_sdk")) + MIN_PYTHON_VERSION = sdk_python_versions[0] + MAX_PYTHON_VERSION = sdk_python_versions[-1] + print( + f"The SDK supports Python versions {MIN_PYTHON_VERSION} - {MAX_PYTHON_VERSION}." + ) + + packages = defaultdict(list) + + for group, integrations in GROUPS.items(): + for integration in integrations: + if integration in IGNORE: + continue + + print(f"Processing {integration}...") + + # Figure out the actual main package + package = TEST_SUITE_CONFIG[integration]["package"] + extra = None + if "[" in package: + extra = package[package.find("[") + 1 : package.find("]")] + package = package[: package.find("[")] + + # Fetch data for the main package + pypi_data = fetch_package(package) + + # Get the list of all supported releases + releases = get_supported_releases(integration, pypi_data) + if not releases: + print(" Found no supported releases.") + continue + + defined_min_version = _MIN_VERSIONS.get(integration) + if defined_min_version: + defined_min_version = Version( + ".".join([str(v) for v in defined_min_version]) + ) + if ( + defined_min_version.major != releases[0].major + or defined_min_version.minor != releases[0].minor + ): + print( + f" Integration defines {defined_min_version} as minimum version, but the effective minimum version is {releases[0]}." + ) + + # Pick a handful of the supported releases to actually test against + # and fetch the PYPI data for each to determine which Python versions + # to test it on + test_releases = pick_releases_to_test(releases) + + for release in test_releases: + target_python_versions = TEST_SUITE_CONFIG[integration].get("python") + if target_python_versions: + target_python_versions = SpecifierSet(target_python_versions) + release_pypi_data = fetch_release(package, release) + release.python_versions = pick_python_versions_to_test( + supported_python_versions( + determine_python_versions(release_pypi_data), + target_python_versions, + ) + ) + if not release.python_versions: + print(f" Release {release} has no Python versions, skipping.") + release.rendered_python_versions = _render_python_versions( + release.python_versions + ) + + time.sleep(0.1) # give PYPI some breathing room + + test_releases = [ + release for release in test_releases if release.python_versions + ] + if test_releases: + packages[group].append( + { + "name": integration, + "package": package, + "extra": extra, + "releases": test_releases, + } + ) + + old_hash = _get_tox_hash() + write_tox_file(packages) + new_hash = _get_tox_hash() + if fail_on_changes and old_hash != new_hash: + raise RuntimeError( + "There are unexpected changes in tox.ini. tox.ini is not meant to " + "be edited directly. It's generated from a template located in " + "scripts/populate_tox/tox.jinja. " + "Please make sure that both the template and the tox generation " + "script in scripts/populate_tox/populate_tox.py are updated as well." + ) + + +if __name__ == "__main__": + fail_on_changes = len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" + main(fail_on_changes) diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja new file mode 100644 index 0000000000..b60c6f137a --- /dev/null +++ b/scripts/populate_tox/tox.jinja @@ -0,0 +1,899 @@ +# Tox (http://codespeak.net/~hpk/tox/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. +# +# This file has been generated from a template +# by "scripts/populate_tox/populate_tox.py". Any changes to the file should +# be made in the template (if you want to change a hardcoded part of the file) +# or in the script (if you want to change the auto-generated part). +# The file (and all resulting CI YAMLs) then need to be regenerated via +# "scripts/generate-test-files.sh". + +[tox] +requires = + # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. + virtualenv<20.26.3 +envlist = + # === Common === + {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common + + # === Gevent === + {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + + # === Integrations === + # General format is {pythonversion}-{integrationname}-v{frameworkversion} + # 1 blank line between different integrations + # Each framework version should only be mentioned once. I.e: + # {py3.7,py3.10}-django-v{3.2} + # {py3.10}-django-v{4.0} + # instead of: + # {py3.7}-django-v{3.2} + # {py3.7,py3.10}-django-v{3.2,4.0} + # + # At a minimum, we should test against at least the lowest + # and the latest supported version of a framework. + + # AIOHTTP + {py3.7}-aiohttp-v{3.4} + {py3.7,py3.9,py3.11}-aiohttp-v{3.8} + {py3.8,py3.12,py3.13}-aiohttp-latest + + # Anthropic + {py3.8,py3.11,py3.12}-anthropic-v{0.16,0.28,0.40} + {py3.7,py3.11,py3.12}-anthropic-latest + + # Ariadne + {py3.8,py3.11}-ariadne-v{0.20} + {py3.8,py3.12,py3.13}-ariadne-latest + + # Arq + {py3.7,py3.11}-arq-v{0.23} + {py3.7,py3.12,py3.13}-arq-latest + + # Asgi + {py3.7,py3.12,py3.13}-asgi + + # asyncpg + {py3.7,py3.10}-asyncpg-v{0.23} + {py3.8,py3.11,py3.12}-asyncpg-latest + + # AWS Lambda + # The aws_lambda tests deploy to the real AWS and have their own + # matrix of Python versions to run the test lambda function in. + # see `lambda_runtime` fixture in tests/integrations/aws_lambda.py + {py3.9}-aws_lambda + + # Beam + {py3.7}-beam-v{2.12} + {py3.8,py3.11}-beam-latest + + # Boto3 + {py3.6,py3.7}-boto3-v{1.12} + {py3.7,py3.11,py3.12}-boto3-v{1.23} + {py3.11,py3.12}-boto3-v{1.34} + {py3.11,py3.12,py3.13}-boto3-latest + + # Bottle + {py3.6,py3.9}-bottle-v{0.12} + {py3.6,py3.12,py3.13}-bottle-latest + + # Celery + {py3.6,py3.8}-celery-v{4} + {py3.6,py3.8}-celery-v{5.0} + {py3.7,py3.10}-celery-v{5.1,5.2} + {py3.8,py3.11,py3.12}-celery-v{5.3,5.4,5.5} + {py3.8,py3.12,py3.13}-celery-latest + + # Chalice + {py3.6,py3.9}-chalice-v{1.16} + {py3.8,py3.12,py3.13}-chalice-latest + + # Clickhouse Driver + {py3.8,py3.11}-clickhouse_driver-v{0.2.0} + {py3.8,py3.12,py3.13}-clickhouse_driver-latest + + # Cloud Resource Context + {py3.6,py3.12,py3.13}-cloud_resource_context + + # Cohere + {py3.9,py3.11,py3.12}-cohere-v5 + {py3.9,py3.11,py3.12}-cohere-latest + + # Django + # - Django 1.x + {py3.6,py3.7}-django-v{1.11} + # - Django 2.x + {py3.6,py3.7}-django-v{2.0} + {py3.6,py3.9}-django-v{2.2} + # - Django 3.x + {py3.6,py3.9}-django-v{3.0} + {py3.6,py3.9,py3.11}-django-v{3.2} + # - Django 4.x + {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2} + # - Django 5.x + {py3.10,py3.11,py3.12}-django-v{5.0,5.1} + {py3.10,py3.12,py3.13}-django-latest + + # dramatiq + {py3.6,py3.9}-dramatiq-v{1.13} + {py3.7,py3.10,py3.11}-dramatiq-v{1.15} + {py3.8,py3.11,py3.12}-dramatiq-v{1.17} + {py3.8,py3.11,py3.12}-dramatiq-latest + + # Falcon + {py3.6,py3.7}-falcon-v{1,1.4,2} + {py3.6,py3.11,py3.12}-falcon-v{3} + {py3.8,py3.11,py3.12}-falcon-v{4} + {py3.7,py3.11,py3.12}-falcon-latest + + # FastAPI + {py3.7,py3.10}-fastapi-v{0.79} + {py3.8,py3.12,py3.13}-fastapi-latest + + # Flask + {py3.6,py3.8}-flask-v{1} + {py3.8,py3.11,py3.12}-flask-v{2} + {py3.10,py3.11,py3.12}-flask-v{3} + {py3.10,py3.12,py3.13}-flask-latest + + # GCP + {py3.7}-gcp + + # GQL + {py3.7,py3.11}-gql-v{3.4} + {py3.7,py3.12,py3.13}-gql-latest + + # Graphene + {py3.7,py3.11}-graphene-v{3.3} + {py3.7,py3.12,py3.13}-graphene-latest + + # gRPC + {py3.7,py3.9}-grpc-v{1.39} + {py3.7,py3.10}-grpc-v{1.49} + {py3.7,py3.11}-grpc-v{1.59} + {py3.8,py3.11,py3.12}-grpc-latest + + # HTTPX + {py3.6,py3.9}-httpx-v{0.16,0.18} + {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} + {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} + {py3.9,py3.12,py3.13}-httpx-latest + + # Huey + {py3.6,py3.11,py3.12}-huey-v{2.0} + {py3.6,py3.12,py3.13}-huey-latest + + # Huggingface Hub + {py3.9,py3.12,py3.13}-huggingface_hub-{v0.22} + {py3.9,py3.12,py3.13}-huggingface_hub-latest + + # Langchain + {py3.9,py3.11,py3.12}-langchain-v0.1 + {py3.9,py3.11,py3.12}-langchain-v0.3 + {py3.9,py3.11,py3.12}-langchain-latest + {py3.9,py3.11,py3.12}-langchain-notiktoken + + # LaunchDarkly + {py3.8,py3.12,py3.13}-launchdarkly-v9.8.0 + {py3.8,py3.12,py3.13}-launchdarkly-latest + + # Litestar + {py3.8,py3.11}-litestar-v{2.0} + {py3.8,py3.11,py3.12}-litestar-v{2.6} + {py3.8,py3.11,py3.12}-litestar-v{2.12} + {py3.8,py3.11,py3.12}-litestar-latest + + # Loguru + {py3.6,py3.11,py3.12}-loguru-v{0.5} + {py3.6,py3.12,py3.13}-loguru-latest + + # OpenAI + {py3.9,py3.11,py3.12}-openai-v1.0 + {py3.9,py3.11,py3.12}-openai-v1.22 + {py3.9,py3.11,py3.12}-openai-v1.55 + {py3.9,py3.11,py3.12}-openai-latest + {py3.9,py3.11,py3.12}-openai-notiktoken + + # OpenFeature + {py3.8,py3.12,py3.13}-openfeature-v0.7 + {py3.8,py3.12,py3.13}-openfeature-latest + + # OpenTelemetry (OTel) + {py3.7,py3.9,py3.12,py3.13}-opentelemetry + + # OpenTelemetry Experimental (POTel) + {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel + + # pure_eval + {py3.6,py3.12,py3.13}-pure_eval + + # PyMongo (Mongo DB) + {py3.6}-pymongo-v{3.1} + {py3.6,py3.9}-pymongo-v{3.12} + {py3.6,py3.11}-pymongo-v{4.0} + {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.7} + {py3.7,py3.12,py3.13}-pymongo-latest + + # Pyramid + {py3.6,py3.11}-pyramid-v{1.6} + {py3.6,py3.11,py3.12}-pyramid-v{1.10} + {py3.6,py3.11,py3.12}-pyramid-v{2.0} + {py3.6,py3.11,py3.12}-pyramid-latest + + # Quart + {py3.7,py3.11}-quart-v{0.16} + {py3.8,py3.11,py3.12}-quart-v{0.19} + {py3.8,py3.12,py3.13}-quart-latest + + # Ray + {py3.10,py3.11}-ray-v{2.34} + {py3.10,py3.11}-ray-latest + + # Redis + {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8,py3.11}-redis-v{4} + {py3.7,py3.11,py3.12}-redis-v{5} + {py3.7,py3.12,py3.13}-redis-latest + + # Redis Cluster + {py3.6,py3.8}-redis_py_cluster_legacy-v{1,2} + # no -latest, not developed anymore + + # Requests + {py3.6,py3.8,py3.12,py3.13}-requests + + # RQ (Redis Queue) + {py3.6}-rq-v{0.6} + {py3.6,py3.9}-rq-v{0.13,1.0} + {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} + {py3.7,py3.12,py3.13}-rq-latest + + # Sanic + {py3.6,py3.7}-sanic-v{0.8} + {py3.6,py3.8}-sanic-v{20} + {py3.8,py3.11,py3.12}-sanic-v{24.6} + {py3.9,py3.12,py3.13}-sanic-latest + + # Spark + {py3.8,py3.10,py3.11}-spark-v{3.1,3.3,3.5} + {py3.8,py3.10,py3.11,py3.12}-spark-latest + + # Starlette + {py3.7,py3.10}-starlette-v{0.19} + {py3.7,py3.11}-starlette-v{0.24,0.28} + {py3.8,py3.11,py3.12}-starlette-v{0.32,0.36,0.40} + {py3.8,py3.12,py3.13}-starlette-latest + + # Starlite + {py3.8,py3.11}-starlite-v{1.48,1.51} + # 1.51.14 is the last starlite version; the project continues as litestar + + # SQL Alchemy + {py3.6,py3.9}-sqlalchemy-v{1.2,1.4} + {py3.7,py3.11}-sqlalchemy-v{2.0} + {py3.7,py3.12,py3.13}-sqlalchemy-latest + + # Strawberry + {py3.8,py3.11}-strawberry-v{0.209} + {py3.8,py3.11,py3.12}-strawberry-v{0.222} + {py3.8,py3.12,py3.13}-strawberry-latest + + # Tornado + {py3.8,py3.11,py3.12}-tornado-v{6.0} + {py3.8,py3.11,py3.12}-tornado-v{6.2} + {py3.8,py3.11,py3.12}-tornado-latest + + # Trytond + {py3.6}-trytond-v{4} + {py3.6,py3.8}-trytond-v{5} + {py3.6,py3.11}-trytond-v{6} + {py3.8,py3.11,py3.12}-trytond-v{7} + {py3.8,py3.12,py3.13}-trytond-latest + + # Typer + {py3.7,py3.12,py3.13}-typer-v{0.15} + {py3.7,py3.12,py3.13}-typer-latest + + # Unleash + {py3.8,py3.12,py3.13}-unleash-v6.0.1 + {py3.8,py3.12,py3.13}-unleash-latest + + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + {% for group, integrations in groups.items() %} + # ~~~ {{ group }} ~~~ + {% for integration in integrations %} + {% for release in integration.releases %} + {{ release.rendered_python_versions }}-{{ integration.name }}-v{{ release }} + {% endfor %} + + {% endfor %} + + {% endfor %} + +[testenv] +deps = + # if you change requirements-testing.txt and your change is not being reflected + # in what's installed by tox (when running tox locally), try running tox + # with the -r flag + -r requirements-testing.txt + + linters: -r requirements-linting.txt + linters: werkzeug<2.3.0 + + # === Common === + py3.8-common: hypothesis + common: pytest-asyncio + # See https://github.com/pytest-dev/pytest/issues/9621 + # and https://github.com/pytest-dev/pytest-forked/issues/67 + # for justification of the upper bound on pytest + {py3.6,py3.7}-common: pytest<7.0.0 + {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest + + # === Gevent === + {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.12}-gevent: gevent + # See https://github.com/pytest-dev/pytest/issues/9621 + # and https://github.com/pytest-dev/pytest-forked/issues/67 + # for justification of the upper bound on pytest + {py3.6,py3.7}-gevent: pytest<7.0.0 + {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest + + # === Integrations === + + # AIOHTTP + aiohttp-v3.4: aiohttp~=3.4.0 + aiohttp-v3.8: aiohttp~=3.8.0 + aiohttp-latest: aiohttp + aiohttp: pytest-aiohttp + aiohttp-v3.8: pytest-asyncio + aiohttp-latest: pytest-asyncio + + # Anthropic + anthropic: pytest-asyncio + anthropic-v{0.16,0.28}: httpx<0.28.0 + anthropic-v0.16: anthropic~=0.16.0 + anthropic-v0.28: anthropic~=0.28.0 + anthropic-v0.40: anthropic~=0.40.0 + anthropic-latest: anthropic + + # Ariadne + ariadne-v0.20: ariadne~=0.20.0 + ariadne-latest: ariadne + ariadne: fastapi + ariadne: flask + ariadne: httpx + + # Arq + arq-v0.23: arq~=0.23.0 + arq-v0.23: pydantic<2 + arq-latest: arq + arq: fakeredis>=2.2.0,<2.8 + arq: pytest-asyncio + arq: async-timeout + + # Asgi + asgi: pytest-asyncio + asgi: async-asgi-testclient + + # Asyncpg + asyncpg-v0.23: asyncpg~=0.23.0 + asyncpg-latest: asyncpg + asyncpg: pytest-asyncio + + # AWS Lambda + aws_lambda: boto3 + + # Beam + beam-v2.12: apache-beam~=2.12.0 + beam-latest: apache-beam + + # Boto3 + boto3-v1.12: boto3~=1.12.0 + boto3-v1.23: boto3~=1.23.0 + boto3-v1.34: boto3~=1.34.0 + boto3-latest: boto3 + + # Bottle + bottle: Werkzeug<2.1.0 + bottle-v0.12: bottle~=0.12.0 + bottle-latest: bottle + + # Celery + celery: redis + celery-v4: Celery~=4.0 + celery-v5.0: Celery~=5.0.0 + celery-v5.1: Celery~=5.1.0 + celery-v5.2: Celery~=5.2.0 + celery-v5.3: Celery~=5.3.0 + celery-v5.4: Celery~=5.4.0 + # TODO: update when stable is out + celery-v5.5: Celery==5.5.0rc4 + celery-latest: Celery + + celery: newrelic + {py3.7}-celery: importlib-metadata<5.0 + + # Chalice + chalice: pytest-chalice==0.0.5 + chalice-v1.16: chalice~=1.16.0 + chalice-latest: chalice + + # Clickhouse Driver + clickhouse_driver-v0.2.0: clickhouse_driver~=0.2.0 + clickhouse_driver-latest: clickhouse_driver + + # Cohere + cohere-v5: cohere~=5.3.3 + cohere-latest: cohere + + # Django + django: psycopg2-binary + django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 + django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0,5.1}: channels[daphne] + django-v{2.2,3.0}: six + django-v{1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0 + django-v{1.11,2.0,2.2,3.0}: pytest-django<4.0 + django-v{3.2,4.0,4.1,4.2,5.0,5.1}: pytest-django + django-v{4.0,4.1,4.2,5.0,5.1}: djangorestframework + django-v{4.0,4.1,4.2,5.0,5.1}: pytest-asyncio + django-v{4.0,4.1,4.2,5.0,5.1}: Werkzeug + django-latest: djangorestframework + django-latest: pytest-asyncio + django-latest: pytest-django + django-latest: Werkzeug + django-latest: channels[daphne] + + django-v1.11: Django~=1.11.0 + django-v2.0: Django~=2.0.0 + django-v2.2: Django~=2.2.0 + django-v3.0: Django~=3.0.0 + django-v3.2: Django~=3.2.0 + django-v4.0: Django~=4.0.0 + django-v4.1: Django~=4.1.0 + django-v4.2: Django~=4.2.0 + django-v5.0: Django~=5.0.0 + django-v5.1: Django==5.1rc1 + django-latest: Django + + # dramatiq + dramatiq-v1.13: dramatiq>=1.13,<1.14 + dramatiq-v1.15: dramatiq>=1.15,<1.16 + dramatiq-v1.17: dramatiq>=1.17,<1.18 + dramatiq-latest: dramatiq + + # Falcon + falcon-v1.4: falcon~=1.4.0 + falcon-v1: falcon~=1.0 + falcon-v2: falcon~=2.0 + falcon-v3: falcon~=3.0 + falcon-v4: falcon~=4.0 + falcon-latest: falcon + + # FastAPI + fastapi: httpx + # (this is a dependency of httpx) + fastapi: anyio<4.0.0 + fastapi: pytest-asyncio + fastapi: python-multipart + fastapi: requests + fastapi-v{0.79}: fastapi~=0.79.0 + fastapi-latest: fastapi + + # Flask + flask: flask-login + flask-v{1,2.0}: Werkzeug<2.1.0 + flask-v{1,2.0}: markupsafe<2.1.0 + flask-v{3}: Werkzeug + flask-v1: Flask~=1.0 + flask-v2: Flask~=2.0 + flask-v3: Flask~=3.0 + flask-latest: Flask + + # GQL + gql-v{3.4}: gql[all]~=3.4.0 + gql-latest: gql[all] + + # Graphene + graphene: blinker + graphene: fastapi + graphene: flask + graphene: httpx + graphene-v{3.3}: graphene~=3.3.0 + graphene-latest: graphene + + # gRPC + grpc: protobuf + grpc: mypy-protobuf + grpc: types-protobuf + grpc: pytest-asyncio + grpc-v1.39: grpcio~=1.39.0 + grpc-v1.49: grpcio~=1.49.1 + grpc-v1.59: grpcio~=1.59.0 + grpc-latest: grpcio + + # HTTPX + httpx-v0.16: pytest-httpx==0.10.0 + httpx-v0.18: pytest-httpx==0.12.0 + httpx-v0.20: pytest-httpx==0.14.0 + httpx-v0.22: pytest-httpx==0.19.0 + httpx-v0.23: pytest-httpx==0.21.0 + httpx-v0.24: pytest-httpx==0.22.0 + httpx-v0.25: pytest-httpx==0.25.0 + httpx: pytest-httpx + # anyio is a dep of httpx + httpx: anyio<4.0.0 + httpx-v0.16: httpx~=0.16.0 + httpx-v0.18: httpx~=0.18.0 + httpx-v0.20: httpx~=0.20.0 + httpx-v0.22: httpx~=0.22.0 + httpx-v0.23: httpx~=0.23.0 + httpx-v0.24: httpx~=0.24.0 + httpx-v0.25: httpx~=0.25.0 + httpx-v0.27: httpx~=0.27.0 + httpx-latest: httpx + + # Huey + huey-v2.0: huey~=2.0.0 + huey-latest: huey + + # Huggingface Hub + huggingface_hub-v0.22: huggingface_hub~=0.22.2 + huggingface_hub-latest: huggingface_hub + + # Langchain + langchain-v0.1: openai~=1.0.0 + langchain-v0.1: langchain~=0.1.11 + langchain-v0.1: tiktoken~=0.6.0 + langchain-v0.1: httpx<0.28.0 + langchain-v0.3: langchain~=0.3.0 + langchain-v0.3: langchain-community + langchain-v0.3: tiktoken + langchain-v0.3: openai + langchain-{latest,notiktoken}: langchain + langchain-{latest,notiktoken}: langchain-openai + langchain-{latest,notiktoken}: openai>=1.6.1 + langchain-latest: tiktoken~=0.6.0 + + # Litestar + litestar: pytest-asyncio + litestar: python-multipart + litestar: requests + litestar: cryptography + litestar-v{2.0,2.6}: httpx<0.28 + litestar-v2.0: litestar~=2.0.0 + litestar-v2.6: litestar~=2.6.0 + litestar-v2.12: litestar~=2.12.0 + litestar-latest: litestar + + # Loguru + loguru-v0.5: loguru~=0.5.0 + loguru-latest: loguru + + # OpenAI + openai: pytest-asyncio + openai-v1.0: openai~=1.0.0 + openai-v1.0: tiktoken + openai-v1.0: httpx<0.28.0 + openai-v1.22: openai~=1.22.0 + openai-v1.22: tiktoken + openai-v1.22: httpx<0.28.0 + openai-v1.55: openai~=1.55.0 + openai-v1.55: tiktoken + openai-latest: openai + openai-latest: tiktoken~=0.6.0 + openai-notiktoken: openai + + # OpenFeature + openfeature-v0.7: openfeature-sdk~=0.7.1 + openfeature-latest: openfeature-sdk + + # LaunchDarkly + launchdarkly-v9.8.0: launchdarkly-server-sdk~=9.8.0 + launchdarkly-latest: launchdarkly-server-sdk + + # Unleash + unleash-v6.0.1: UnleashClient~=6.0.1 + unleash-latest: UnleashClient + + # OpenTelemetry (OTel) + opentelemetry: opentelemetry-distro + + # OpenTelemetry Experimental (POTel) + potel: -e .[opentelemetry-experimental] + + # pure_eval + pure_eval: pure_eval + + # PyMongo (MongoDB) + pymongo: mockupdb + pymongo-v3.1: pymongo~=3.1.0 + pymongo-v3.13: pymongo~=3.13.0 + pymongo-v4.0: pymongo~=4.0.0 + pymongo-v4.3: pymongo~=4.3.0 + pymongo-v4.7: pymongo~=4.7.0 + pymongo-latest: pymongo + + # Pyramid + pyramid: Werkzeug<2.1.0 + pyramid-v1.6: pyramid~=1.6.0 + pyramid-v1.10: pyramid~=1.10.0 + pyramid-v2.0: pyramid~=2.0.0 + pyramid-latest: pyramid + + # Quart + quart: quart-auth + quart: pytest-asyncio + quart-v0.16: blinker<1.6 + quart-v0.16: jinja2<3.1.0 + quart-v0.16: Werkzeug<2.1.0 + quart-v0.16: hypercorn<0.15.0 + quart-v0.16: quart~=0.16.0 + quart-v0.19: Werkzeug>=3.0.0 + quart-v0.19: quart~=0.19.0 + {py3.8}-quart: taskgroup==0.0.0a4 + quart-latest: quart + + # Ray + ray-v2.34: ray~=2.34.0 + ray-latest: ray + + # Redis + redis: fakeredis!=1.7.4 + redis: pytest<8.0.0 + {py3.6,py3.7}-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio + redis-v3: redis~=3.0 + redis-v4: redis~=4.0 + redis-v5: redis~=5.0 + redis-latest: redis + + # Redis Cluster + redis_py_cluster_legacy-v1: redis-py-cluster~=1.0 + redis_py_cluster_legacy-v2: redis-py-cluster~=2.0 + + # Requests + requests: requests>=2.0 + + # RQ (Redis Queue) + # https://github.com/jamesls/fakeredis/issues/245 + rq-v{0.6}: fakeredis<1.0 + rq-v{0.6}: redis<3.2.2 + rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 + rq-v{1.15,1.16}: fakeredis + {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + rq-latest: fakeredis + {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + rq-v0.6: rq~=0.6.0 + rq-v0.13: rq~=0.13.0 + rq-v1.0: rq~=1.0.0 + rq-v1.5: rq~=1.5.0 + rq-v1.10: rq~=1.10.0 + rq-v1.15: rq~=1.15.0 + rq-v1.16: rq~=1.16.0 + rq-latest: rq + + # Sanic + sanic: websockets<11.0 + sanic: aiohttp + sanic-v{24.6}: sanic_testing + sanic-latest: sanic_testing + {py3.6}-sanic: aiocontextvars==0.2.1 + sanic-v0.8: sanic~=0.8.0 + sanic-v20: sanic~=20.0 + sanic-v24.6: sanic~=24.6.0 + sanic-latest: sanic + + # Spark + spark-v3.1: pyspark~=3.1.0 + spark-v3.3: pyspark~=3.3.0 + spark-v3.5: pyspark~=3.5.0 + # TODO: update to ~=4.0.0 once stable is out + spark-v4.0: pyspark==4.0.0.dev2 + spark-latest: pyspark + + # Starlette + starlette: pytest-asyncio + starlette: python-multipart + starlette: requests + # (this is a dependency of httpx) + starlette: anyio<4.0.0 + starlette: jinja2 + starlette-v{0.19,0.24,0.28,0.32,0.36}: httpx<0.28.0 + starlette-v0.40: httpx + starlette-latest: httpx + starlette-v0.19: starlette~=0.19.0 + starlette-v0.24: starlette~=0.24.0 + starlette-v0.28: starlette~=0.28.0 + starlette-v0.32: starlette~=0.32.0 + starlette-v0.36: starlette~=0.36.0 + starlette-v0.40: starlette~=0.40.0 + starlette-latest: starlette + + # Starlite + starlite: pytest-asyncio + starlite: python-multipart + starlite: requests + starlite: cryptography + starlite: pydantic<2.0.0 + starlite: httpx<0.28 + starlite-v{1.48}: starlite~=1.48.0 + starlite-v{1.51}: starlite~=1.51.0 + + # SQLAlchemy + sqlalchemy-v1.2: sqlalchemy~=1.2.0 + sqlalchemy-v1.4: sqlalchemy~=1.4.0 + sqlalchemy-v2.0: sqlalchemy~=2.0.0 + sqlalchemy-latest: sqlalchemy + + # Strawberry + strawberry: fastapi + strawberry: flask + strawberry: httpx + strawberry-v0.209: strawberry-graphql[fastapi,flask]~=0.209.0 + strawberry-v0.222: strawberry-graphql[fastapi,flask]~=0.222.0 + strawberry-latest: strawberry-graphql[fastapi,flask] + + # Tornado + # Tornado <6.4.1 is incompatible with Pytest ≥8.2 + # See https://github.com/tornadoweb/tornado/pull/3382. + tornado-{v6.0,v6.2}: pytest<8.2 + tornado-v6.0: tornado~=6.0.0 + tornado-v6.2: tornado~=6.2.0 + tornado-latest: tornado + + # Trytond + trytond: werkzeug + trytond-v4: werkzeug<1.0 + trytond-v4: trytond~=4.0 + trytond-v5: trytond~=5.0 + trytond-v6: trytond~=6.0 + trytond-v7: trytond~=7.0 + trytond-latest: trytond + + # Typer + typer-v0.15: typer~=0.15.0 + typer-latest: typer + + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + {% for group, integrations in groups.items() %} + # ~~~ {{ group }} ~~~ + {% for integration in integrations %} + {% for release in integration.releases %} + {% if integration.extra %} + {{ integration.name }}-v{{ release }}: {{ integration.package }}[{{ integration.extra }}]=={{ release }} + {% else %} + {{ integration.name }}-v{{ release }}: {{ integration.package }}=={{ release }} + {% endif %} + {% endfor %} + {% for dep in integration.dependencies %} + {{ dep }} + {% endfor %} + + {% endfor %} + + {% endfor %} + +setenv = + PYTHONDONTWRITEBYTECODE=1 + OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES + COVERAGE_FILE=.coverage-sentry-{envname} + py3.6: COVERAGE_RCFILE=.coveragerc36 + + django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + + common: TESTPATH=tests + gevent: TESTPATH=tests + aiohttp: TESTPATH=tests/integrations/aiohttp + anthropic: TESTPATH=tests/integrations/anthropic + ariadne: TESTPATH=tests/integrations/ariadne + arq: TESTPATH=tests/integrations/arq + asgi: TESTPATH=tests/integrations/asgi + asyncpg: TESTPATH=tests/integrations/asyncpg + aws_lambda: TESTPATH=tests/integrations/aws_lambda + beam: TESTPATH=tests/integrations/beam + boto3: TESTPATH=tests/integrations/boto3 + bottle: TESTPATH=tests/integrations/bottle + celery: TESTPATH=tests/integrations/celery + chalice: TESTPATH=tests/integrations/chalice + clickhouse_driver: TESTPATH=tests/integrations/clickhouse_driver + cohere: TESTPATH=tests/integrations/cohere + cloud_resource_context: TESTPATH=tests/integrations/cloud_resource_context + django: TESTPATH=tests/integrations/django + dramatiq: TESTPATH=tests/integrations/dramatiq + falcon: TESTPATH=tests/integrations/falcon + fastapi: TESTPATH=tests/integrations/fastapi + flask: TESTPATH=tests/integrations/flask + gcp: TESTPATH=tests/integrations/gcp + gql: TESTPATH=tests/integrations/gql + graphene: TESTPATH=tests/integrations/graphene + grpc: TESTPATH=tests/integrations/grpc + httpx: TESTPATH=tests/integrations/httpx + huey: TESTPATH=tests/integrations/huey + huggingface_hub: TESTPATH=tests/integrations/huggingface_hub + langchain: TESTPATH=tests/integrations/langchain + launchdarkly: TESTPATH=tests/integrations/launchdarkly + litestar: TESTPATH=tests/integrations/litestar + loguru: TESTPATH=tests/integrations/loguru + openai: TESTPATH=tests/integrations/openai + openfeature: TESTPATH=tests/integrations/openfeature + opentelemetry: TESTPATH=tests/integrations/opentelemetry + potel: TESTPATH=tests/integrations/opentelemetry + pure_eval: TESTPATH=tests/integrations/pure_eval + pymongo: TESTPATH=tests/integrations/pymongo + pyramid: TESTPATH=tests/integrations/pyramid + quart: TESTPATH=tests/integrations/quart + ray: TESTPATH=tests/integrations/ray + redis: TESTPATH=tests/integrations/redis + redis_py_cluster_legacy: TESTPATH=tests/integrations/redis_py_cluster_legacy + requests: TESTPATH=tests/integrations/requests + rq: TESTPATH=tests/integrations/rq + sanic: TESTPATH=tests/integrations/sanic + spark: TESTPATH=tests/integrations/spark + starlette: TESTPATH=tests/integrations/starlette + starlite: TESTPATH=tests/integrations/starlite + sqlalchemy: TESTPATH=tests/integrations/sqlalchemy + strawberry: TESTPATH=tests/integrations/strawberry + tornado: TESTPATH=tests/integrations/tornado + trytond: TESTPATH=tests/integrations/trytond + typer: TESTPATH=tests/integrations/typer + unleash: TESTPATH=tests/integrations/unleash + socket: TESTPATH=tests/integrations/socket + +passenv = + SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID + SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY + SENTRY_PYTHON_TEST_POSTGRES_HOST + SENTRY_PYTHON_TEST_POSTGRES_USER + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD + SENTRY_PYTHON_TEST_POSTGRES_NAME + +usedevelop = True + +extras = + bottle: bottle + falcon: falcon + flask: flask + pymongo: pymongo + +basepython = + py3.6: python3.6 + py3.7: python3.7 + py3.8: python3.8 + py3.9: python3.9 + py3.10: python3.10 + py3.11: python3.11 + py3.12: python3.12 + py3.13: python3.13 + + # Python version is pinned here because flake8 actually behaves differently + # depending on which version is used. You can patch this out to point to + # some random Python 3 binary, but then you get guaranteed mismatches with + # CI. Other tools such as mypy and black have options that pin the Python + # version. + linters: python3.12 + +commands = + {py3.7,py3.8}-boto3: pip install urllib3<2.0.0 + + ; https://github.com/pallets/flask/issues/4455 + {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" + + ; Running `pytest` as an executable suffers from an import error + ; when loading tests in scenarios. In particular, django fails to + ; load the settings from the test module. + python -m pytest {env:TESTPATH} -o junit_suite_name={envname} {posargs} + +[testenv:linters] +commands = + flake8 tests sentry_sdk + black --check tests sentry_sdk + mypy sentry_sdk diff --git a/tox.ini b/tox.ini index 3cab20a1f1..c82d7d9159 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,13 @@ # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. +# +# This file has been generated from a template +# by "scripts/populate_tox/populate_tox.py". Any changes to the file should +# be made in the template (if you want to change a hardcoded part of the file) +# or in the script (if you want to change the auto-generated part). +# The file (and all resulting CI YAMLs) then need to be regenerated via +# "scripts/generate-test-files.sh". [tox] requires = @@ -294,6 +301,11 @@ envlist = {py3.8,py3.12,py3.13}-unleash-v6.0.1 {py3.8,py3.12,py3.13}-unleash-latest + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + [testenv] deps = # if you change requirements-testing.txt and your change is not being reflected @@ -738,6 +750,11 @@ deps = typer-v0.15: typer~=0.15.0 typer-latest: typer + # === Integrations - Auto-generated === + # These come from the populate_tox.py script. Eventually we should move all + # integration tests there. + + setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES From c2c645077568e7195144132ead05f17c2528e2a1 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 20 Jan 2025 15:50:43 +0100 Subject: [PATCH 02/17] add missing file --- scripts/populate_tox/requirements.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 scripts/populate_tox/requirements.txt diff --git a/scripts/populate_tox/requirements.txt b/scripts/populate_tox/requirements.txt new file mode 100644 index 0000000000..0402fac5ab --- /dev/null +++ b/scripts/populate_tox/requirements.txt @@ -0,0 +1,3 @@ +jinja2 +packaging +requests From 6b9417f466de80442fcd0b0656f6b38e403af64d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 20 Jan 2025 16:54:09 +0100 Subject: [PATCH 03/17] capitalization --- scripts/populate_tox/README.md | 2 +- scripts/populate_tox/populate_tox.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index f279dd939e..5e990dfaab 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -13,7 +13,7 @@ There is a template in this directory called `tox.jinja` which contains a combination of hardcoded and generated entries. The `populate_tox.py` script fills out the auto-generated part of that template. -It does this by querying PYPI for each framework's package and its metadata and +It does this by querying PyPI for each framework's package and its metadata and then determining which versions make sense to test to get good coverage. The lowest supported and latest version of a framework are always tested, with diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 7337a4c9f8..cc259a2928 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -112,7 +112,7 @@ @functools.cache def fetch_package(package: str) -> dict: - """Fetch package metadata from PYPI.""" + """Fetch package metadata from PyPI.""" url = PYPI_PROJECT_URL.format(project=package) pypi_data = requests.get(url) From c97198174fb5b4b117721af9e77c7c6d8f4c570d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 20 Jan 2025 17:07:18 +0100 Subject: [PATCH 04/17] comment --- scripts/populate_tox/populate_tox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index cc259a2928..6953829c81 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -252,7 +252,7 @@ def pick_releases_to_test(releases: list[Version]) -> list[Version]: indexes = [ 0, # oldest version supported len(releases) // 3, - len(releases) // 3 * 2, + len(releases) // 3 * 2, # two releases in between, roughly evenly spaced -1, # latest ] From c2d59aea4117141f6365d539acc7dcf699c3deef Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 20 Jan 2025 17:12:03 +0100 Subject: [PATCH 05/17] explicit none check --- scripts/populate_tox/populate_tox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 6953829c81..e26d6a580f 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -136,7 +136,7 @@ def fetch_release(package: str, version: Version) -> dict: def _prefilter_releases(integration: str, releases: dict[str, dict]) -> list[Version]: """Drop versions that are unsupported without making additional API calls.""" min_supported = _MIN_VERSIONS.get(integration) - if min_supported: + if min_supported is not None: min_supported = Version(".".join(map(str, min_supported))) else: print( From efaa55fd4d8d164eee177d9c65b4088c73885869 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 09:55:35 +0100 Subject: [PATCH 06/17] Apply suggestions from code review Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- scripts/populate_tox/README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index 5e990dfaab..64bb8d747c 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -102,11 +102,10 @@ you can say: 1. Add the minimum supported version of the framework/library to `_MIN_VERSIONS` in `integrations/__init__.py`. This should be the lowest version of the framework that we can guarantee works with the SDK. If you've just added the - integration, it's fine to set this to the latest version of the framework + integration, you should generally set this to the latest version of the framework at the time. 2. Add the integration and any constraints to `TEST_SUITE_CONFIG`. See the - "Defining constraints" section for the format (or copy-paste one - of the existing entries). + "Defining constraints" section for the format. 3. Add the integration to one of the groups in the `GROUPS` dictionary in `scripts/split_tox_gh_actions/split_tox_gh_actions.py`. 4. Add the `TESTPATH` for the test suite in `tox.jinja`'s `setenv` section. From 474c67965a72f5a076e2838c2b04f86030f6cfba Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 10:15:13 +0100 Subject: [PATCH 07/17] better readme structure --- scripts/populate_tox/README.md | 62 +++++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index 64bb8d747c..1c119bcfa4 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -48,7 +48,20 @@ integration_name: { } ``` -The following can be set as a rule: +### `package` + +The name of the third party package as it's listed on PyPI. The script will +be picking different versions of this package to test. + +This key is mandatory. + +### `deps` + +The test dependencies of the test suite. They're defined as a dictionary of +`rule: [package1, package2, ...]` key-value pairs. All packages +in the package list of a rule will be installed as long as the rule applies. + +`rule`s are predefined. Each `rule` must be one of the following: - `*`: packages will be always installed - a version specifier on the main package (e.g. `<=0.32`): packages will only be installed if the main package falls into the version bounds specified @@ -60,41 +73,50 @@ package's dependencies, for example. If e.g. Flask tests generally need Werkzeug and don't care about its version, but Flask older than 3.0 needs a specific Werkzeug version to work, you can say: -``` +```python "flask": { "deps": { "*": ["Werkzeug"], "<3.0": ["Werkzeug<2.1.0"], - } -} -``` - -Sometimes, things depend on the Python version installed. If the integration -test should only run on specific Python version, e.g. if you want AIOHTTP -tests to only run on Python 3.7+, you can say: - -``` -"aiohttp": { + }, ... - "python": ">=3.7", } ``` -If, on the other hand, you need to install a specific version of a secondary -dependency on specific Python versions (so the test suite should still run on -said Python versions, just with different dependency-of-a-dependency bounds), -you can say: +If you need to install a specific version of a secondary dependency on specific +Python versions, you can say: -``` +```python "celery": { - ... "deps": { "*": ["newrelic", "redis"], "py3.7": ["importlib-metadata<5.0"], }, -}, + ... +} +``` + +### `python` + +Sometimes, the whole test suite should only run on specific Python versions. +This can be achieved via the `python` key, which expects a version specifier. + +For example, if you want AIOHTTP tests to only run on Python 3.7+, you can say: + +```python +"aiohttp": { + "python": ">=3.7", + ... +} ``` +Specifying `python` is discouraged as the script itself finds out which +Python versions are supported by the package. However, if a package has broken +metadata or the SDK is explicitly not supporting some packages on specific +Python versions (because of, for example, broken context vars), the `python` +key can be used. + + ## How-Tos ### Add a new test suite From 8904a1bd53fb3c3dccebd82b99501ccdefc9d763 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 10:21:45 +0100 Subject: [PATCH 08/17] remove test suite config --- scripts/populate_tox/config.py | 379 +-------------------------------- 1 file changed, 1 insertion(+), 378 deletions(-) diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 788c7eedac..9e1366c25b 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -5,381 +5,4 @@ # # See scripts/populate_tox/README.md for more info on the format and examples. -TEST_SUITE_CONFIG = { - "aiohttp": { - "package": "aiohttp", - "deps": {"*": ["pytest-aiohttp", "pytest-asyncio"]}, - "python": ">=3.7", - }, - "anthropic": { - "package": "anthropic", - "deps": { - "*": ["pytest-asyncio"], - "<=0.32": ["httpx<0.28.0"], - }, - "python": ">=3.7", - }, - "ariadne": { - "package": "ariadne", - "deps": { - "*": ["fastapi", "flask", "httpx"], - }, - "python": ">=3.8", - }, - "arq": { - "package": "arq", - "deps": { - "*": ["fakeredis>=2.2.0,<2.8", "pytest-asyncio", "async-timeout"], - "<=0.25": ["pydantic<2"], - }, - "python": ">=3.7", - }, - "asyncpg": { - "package": "asyncpg", - "deps": { - "*": ["pytest-asyncio"], - }, - "python": ">=3.7", - }, - "beam": { - "package": "apache-beam", - "deps": { - "*": [], - }, - "python": ">=3.7", - }, - "boto3": { - "package": "boto3", - "deps": { - "*": [], - }, - }, - "bottle": { - "package": "bottle", - "deps": { - "*": ["werkzeug<2.1.0"], - }, - }, - "celery": { - "package": "celery", - "deps": { - "*": ["newrelic", "redis"], - "py3.7": ["importlib-metadata<5.0"], - }, - }, - "chalice": { - "package": "chalice", - "deps": { - "*": ["pytest-chalice==0.0.5"], - }, - }, - "clickhouse_driver": { - "package": "clickhouse-driver", - "deps": { - "*": [], - }, - }, - "cohere": { - "package": "cohere", - "deps": { - "*": ["httpx"], - }, - }, - "django": { - "package": "django", - "deps": { - "*": [ - "psycopg2-binary", - "werkzeug", - ], - ">=2.0,<3.0": ["six"], - "<=3.2": [ - "werkzeug<2.1.0", - "djangorestframework>=3.0.0,<4.0.0", - "pytest-django", - ], - ">=2.0": ["channels[daphne]"], - "<=3.0": ["pytest-django<4.0"], - ">=4.0": ["djangorestframework", "pytest-asyncio"], - }, - }, - "dramatiq": { - "package": "dramatiq", - "deps": {}, - }, - "falcon": { - "package": "falcon", - "deps": {}, - "python": "<3.13", - }, - "fastapi": { - "package": "fastapi", - "deps": { - "*": [ - "httpx", - "anyio<4.0.0", - "python-multipart", - "pytest-asyncio", - "requests", - ] - }, - "python": ">=3.7", - }, - "flask": { - "package": "flask", - "deps": { - "*": ["flask-login", "werkzeug"], - "<2.0": ["werkzeug<2.1.0", "markupsafe<2.1.0"], - }, - }, - "gql": { - "package": "gql[all]", - "deps": {}, - }, - "graphene": { - "package": "graphene", - "deps": { - "*": ["blinker", "fastapi", "flask", "httpx"], - "py3.6": ["aiocontextvars"], - }, - }, - "grpc": { - "package": "grpcio", - "deps": { - "*": ["protobuf", "mypy-protobuf", "types-protobuf", "pytest-asyncio"], - }, - "python": ">=3.7", - }, - "httpx": { - "package": "httpx", - "deps": { - "*": ["anyio<4.0.0", "pytest-httpx"], - "==0.16": ["pytest-httpx==0.10.0"], - "==0.18": ["pytest-httpx==0.12.0"], - "==0.20": ["pytest-httpx==0.14.0"], - "==0.22": ["pytest-httpx==0.19.0"], - "==0.23": ["pytest-httpx==0.21.0"], - "==0.24": ["pytest-httpx==0.22.0"], - "==0.25": ["pytest-httpx==0.25.0"], - }, - }, - "huey": { - "package": "huey", - "deps": { - "*": [], - }, - }, - "huggingface_hub": { - "package": "huggingface_hub", - "deps": {"*": []}, - }, - "langchain": { - "package": "langchain", - "deps": { - "*": ["openai", "tiktoken", "httpx"], - ">=0.3": ["langchain-community"], - }, - }, - "langchain_notiktoken": { - "package": "langchain", - "deps": { - "*": ["openai", "httpx"], - ">=0.3": ["langchain-community"], - }, - }, - "litestar": { - "package": "litestar", - "deps": { - "*": ["pytest-asyncio", "python-multipart", "requests", "cryptography"], - "<=2.6": ["httpx<0.28"], - }, - }, - "loguru": { - "package": "loguru", - "deps": { - "*": [], - }, - }, - # XXX - # openai-latest: tiktoken~=0.6.0 - "openai": { - "package": "openai", - "deps": { - "*": ["pytest-asyncio", "tiktoken", "httpx"], - "<=1.22": ["httpx<0.28.0"], - }, - }, - "openai_notiktoken": { - "package": "openai", - "deps": { - "*": ["pytest-asyncio", "httpx"], - "<=1.22": ["httpx<0.28.0"], - }, - }, - "openfeature": { - "package": "openfeature-sdk", - "deps": { - "*": [], - }, - }, - "launchdarkly": { - "package": "launchdarkly-server-sdk", - "deps": { - "*": [], - }, - }, - "opentelemetry": { - "package": "opentelemetry-distro", - "deps": { - "*": [], - }, - }, - "pure_eval": { - "package": "pure_eval", - "deps": { - "*": [], - }, - }, - "pymongo": { - "package": "pymongo", - "deps": { - "*": ["mockupdb"], - }, - }, - "pyramid": { - "package": "pyramid", - "deps": { - "*": ["werkzeug<2.1.0"], - }, - }, - "quart": { - "package": "quart", - "deps": { - "*": [ - "quart-auth", - "pytest-asyncio", - "werkzeug", - ], - "<=0.19": [ - "blinker<1.6", - "jinja2<3.1.0", - "Werkzeug<2.1.0", - "hypercorn<0.15.0", - ], - "py3.8": ["taskgroup==0.0.0a4"], - }, - }, - "ray": { - "package": "ray", - "deps": {}, - }, - "redis": { - "package": "redis", - "deps": { - "*": ["fakeredis!=1.7.4", "pytest<8.0.0", "pytest-asyncio"], - "py3.6,py3.7": [ - "fakeredis!=2.26.0" - ], # https://github.com/cunla/fakeredis-py/issues/341 - }, - }, - "redis_py_cluster_legacy": { - "package": "redis-py-cluster", - "deps": {}, - }, - "requests": { - "package": "requests", - "deps": {}, - }, - "rq": { - "package": "rq", - "deps": { - "*": ["fakeredis"], - "<0.13": [ - "fakeredis<1.0", - "redis<3.2.2", - ], # https://github.com/jamesls/fakeredis/issues/245 - ">=0.13,<=1.10": ["fakeredis>=1.0,<1.7.4"], - "py3.6,py3.7": [ - "fakeredis!=2.26.0" - ], # https://github.com/cunla/fakeredis-py/issues/341 - }, - }, - "sanic": { - "package": "sanic", - "deps": { - "*": ["websockets<11.0", "aiohttp", "sanic_testing"], - ">=22.0": ["sanic_testing"], - "py3.6": ["aiocontextvars==0.2.1"], - }, - }, - "spark": { - "package": "pyspark", - "deps": {}, - "python": ">=3.8", - }, - "starlette": { - "package": "starlette", - "deps": { - "*": [ - "pytest-asyncio", - "python-multipart", - "requests", - "anyio<4.0.0", - "jinja2", - "httpx", - ], - "<=0.36": ["httpx<0.28.0"], - "<0.15": ["jinja2<3.1"], - "py3.6": ["aiocontextvars"], - }, - }, - "starlite": { - "package": "starlite", - "deps": { - "*": [ - "pytest-asyncio", - "python-multipart", - "requests", - "cryptography", - "pydantic<2.0.0", - "httpx<0.28", - ], - }, - "python": "<=3.11", - }, - "sqlalchemy": { - "package": "sqlalchemy", - "deps": {}, - }, - "strawberry": { - "package": "strawberry-graphql[fastapi,flask]", - "deps": { - "*": ["fastapi", "flask", "httpx"], - }, - }, - "tornado": { - "package": "tornado", - "deps": { - "*": ["pytest"], - "<=6.4.1": [ - "pytest<8.2" - ], # https://github.com/tornadoweb/tornado/pull/3382 - "py3.6": ["aiocontextvars"], - }, - }, - "trytond": { - "package": "trytond", - "deps": { - "*": ["werkzeug"], - "<=5.0": ["werkzeug<1.0"], - }, - }, - "typer": { - "package": "typer", - "deps": {}, - }, - "unleash": { - "package": "UnleashClient", - "deps": {}, - }, -} +TEST_SUITE_CONFIG = {} From 85724894bf02493caad70c10a3f56d1561a32836 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 10:50:06 +0100 Subject: [PATCH 09/17] dont require deps --- scripts/populate_tox/populate_tox.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index e26d6a580f..53ad9f01b4 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -347,6 +347,10 @@ def _render_python_versions(python_versions: list[Version]) -> str: def _render_dependencies(integration: str, releases: list[Version]) -> list[str]: rendered = [] + + if TEST_SUITE_CONFIG[integration].get("deps") is None: + return rendered + for constraint, deps in TEST_SUITE_CONFIG[integration]["deps"].items(): if constraint == "*": for dep in deps: From 3e1c28d574771b3f415a70b539f7afa6a1307420 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 11:11:34 +0100 Subject: [PATCH 10/17] remove fail-on-changes --- scripts/populate_tox/populate_tox.py | 25 ++----------------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 53ad9f01b4..2e44aea405 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -3,7 +3,6 @@ """ import functools -import hashlib import os import sys import time @@ -394,16 +393,7 @@ def write_tox_file(packages: dict) -> None: file.write("\n") -def _get_tox_hash(): - hasher = hashlib.md5() - with open(TOX_FILE, "rb") as f: - buf = f.read() - hasher.update(buf) - - return hasher.hexdigest() - - -def main(fail_on_changes: bool = False) -> None: +def main() -> None: print("Finding out the lowest and highest Python version supported by the SDK...") global MIN_PYTHON_VERSION, MAX_PYTHON_VERSION sdk_python_versions = determine_python_versions(fetch_package("sentry_sdk")) @@ -488,19 +478,8 @@ def main(fail_on_changes: bool = False) -> None: } ) - old_hash = _get_tox_hash() write_tox_file(packages) - new_hash = _get_tox_hash() - if fail_on_changes and old_hash != new_hash: - raise RuntimeError( - "There are unexpected changes in tox.ini. tox.ini is not meant to " - "be edited directly. It's generated from a template located in " - "scripts/populate_tox/tox.jinja. " - "Please make sure that both the template and the tox generation " - "script in scripts/populate_tox/populate_tox.py are updated as well." - ) if __name__ == "__main__": - fail_on_changes = len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" - main(fail_on_changes) + main() From c96bf0c32b384d62381a1fc5c5b4bdeb716aa6de Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Wed, 22 Jan 2025 14:07:52 +0100 Subject: [PATCH 11/17] put more stuff into functions --- scripts/populate_tox/populate_tox.py | 83 +++++++++++++++++----------- 1 file changed, 50 insertions(+), 33 deletions(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 2e44aea405..6bd9723d3a 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -12,7 +12,7 @@ from packaging.specifiers import SpecifierSet from packaging.version import Version from pathlib import Path -from typing import Optional, Union +from typing import Optional, Tuple, Union sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) @@ -393,6 +393,50 @@ def write_tox_file(packages: dict) -> None: file.write("\n") +def _get_package_name(integration: str) -> Tuple[str, Optional[str]]: + package = TEST_SUITE_CONFIG[integration]["package"] + extra = None + if "[" in package: + extra = package[package.find("[") + 1 : package.find("]")] + package = package[: package.find("[")] + + return package, extra + + +def _compare_min_version_with_defined( + integration: str, releases: list[Version] +) -> None: + defined_min_version = _MIN_VERSIONS.get(integration) + if defined_min_version: + defined_min_version = Version(".".join([str(v) for v in defined_min_version])) + if ( + defined_min_version.major != releases[0].major + or defined_min_version.minor != releases[0].minor + ): + print( + f" Integration defines {defined_min_version} as minimum " + f"version, but the effective minimum version is {releases[0]}." + ) + + +def _add_python_versions_to_release(integration: str, package: str, release: Version): + release_pypi_data = fetch_release(package, release) + time.sleep(0.1) # give PYPI some breathing room + + target_python_versions = TEST_SUITE_CONFIG[integration].get("python") + if target_python_versions: + target_python_versions = SpecifierSet(target_python_versions) + + release.python_versions = pick_python_versions_to_test( + supported_python_versions( + determine_python_versions(release_pypi_data), + target_python_versions, + ) + ) + + release.rendered_python_versions = _render_python_versions(release.python_versions) + + def main() -> None: print("Finding out the lowest and highest Python version supported by the SDK...") global MIN_PYTHON_VERSION, MAX_PYTHON_VERSION @@ -413,11 +457,7 @@ def main() -> None: print(f"Processing {integration}...") # Figure out the actual main package - package = TEST_SUITE_CONFIG[integration]["package"] - extra = None - if "[" in package: - extra = package[package.find("[") + 1 : package.find("]")] - package = package[: package.find("[")] + package, extra = _get_package_name(integration) # Fetch data for the main package pypi_data = fetch_package(package) @@ -428,18 +468,7 @@ def main() -> None: print(" Found no supported releases.") continue - defined_min_version = _MIN_VERSIONS.get(integration) - if defined_min_version: - defined_min_version = Version( - ".".join([str(v) for v in defined_min_version]) - ) - if ( - defined_min_version.major != releases[0].major - or defined_min_version.minor != releases[0].minor - ): - print( - f" Integration defines {defined_min_version} as minimum version, but the effective minimum version is {releases[0]}." - ) + _compare_min_version_with_defined(integration, releases) # Pick a handful of the supported releases to actually test against # and fetch the PYPI data for each to determine which Python versions @@ -447,23 +476,11 @@ def main() -> None: test_releases = pick_releases_to_test(releases) for release in test_releases: - target_python_versions = TEST_SUITE_CONFIG[integration].get("python") - if target_python_versions: - target_python_versions = SpecifierSet(target_python_versions) - release_pypi_data = fetch_release(package, release) - release.python_versions = pick_python_versions_to_test( - supported_python_versions( - determine_python_versions(release_pypi_data), - target_python_versions, - ) + py_versions = _add_python_versions_to_release( + integration, package, release ) - if not release.python_versions: + if not py_versions: print(f" Release {release} has no Python versions, skipping.") - release.rendered_python_versions = _render_python_versions( - release.python_versions - ) - - time.sleep(0.1) # give PYPI some breathing room test_releases = [ release for release in test_releases if release.python_versions From 2d272607a92a80119d30669b16cd0e136d0cd7f2 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 10:03:16 +0100 Subject: [PATCH 12/17] fix numbering, env name --- scripts/generate-test-files.sh | 4 ++-- scripts/populate_tox/README.md | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/generate-test-files.sh b/scripts/generate-test-files.sh index b7420b28e9..40e279cdf4 100755 --- a/scripts/generate-test-files.sh +++ b/scripts/generate-test-files.sh @@ -6,8 +6,8 @@ set -xe cd "$(dirname "$0")" -python -m venv .venv -. .venv/bin/activate +python -m venv toxgen.venv +. toxgen.venv/bin/activate pip install -e .. pip install -r populate_tox/requirements.txt diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index 1c119bcfa4..b68e74bd26 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -140,9 +140,9 @@ them all to `populate_tox.py` over time. 1. Remove the integration from the `IGNORE` list in `populate_tox.py`. 2. Remove the hardcoded entries for the integration from the `envlist` and `deps` sections of `tox.jinja`. -2. Run `scripts/generate-test-files.sh`. -3. Run the test suite, either locally or by creating a PR. -4. Address any test failures that happen. +3. Run `scripts/generate-test-files.sh`. +4. Run the test suite, either locally or by creating a PR. +5. Address any test failures that happen. You might have to introduce additional version bounds on the dependencies of the package. Try to determine the source of the failure and address it. From 286e76f305c95b495437ec97fa80e8a331df8b0e Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 10:39:47 +0100 Subject: [PATCH 13/17] rename var --- scripts/populate_tox/populate_tox.py | 37 +++++++++++++++++----------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 6bd9723d3a..59a4c5a0fe 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -9,6 +9,7 @@ from bisect import bisect_left from collections import defaultdict from datetime import datetime, timedelta +from importlib.metadata import metadata from packaging.specifiers import SpecifierSet from packaging.version import Version from pathlib import Path @@ -144,11 +145,11 @@ def _prefilter_releases(integration: str, releases: dict[str, dict]) -> list[Ver filtered_releases = [] - for release, metadata in releases.items(): - if not metadata: + for release, data in releases.items(): + if not data: continue - meta = metadata[0] + meta = data[0] if datetime.fromisoformat(meta["upload_time"]) < CUTOFF: continue @@ -298,15 +299,7 @@ def pick_python_versions_to_test(python_versions: list[Version]) -> list[Version return sorted(filtered_python_versions) -def determine_python_versions(pypi_data: dict) -> Union[SpecifierSet, list[Version]]: - try: - classifiers = pypi_data["info"]["classifiers"] - except (AttributeError, KeyError): - # This function assumes `pypi_data` contains classifiers. This is the case - # for the most recent release in the /{project} endpoint or for any release - # fetched via the /{project}/{version} endpoint. - return [] - +def _parse_python_versions_from_classifiers(classifiers: list[str]) -> list[Version]: python_versions = [] for classifier in classifiers: if classifier.startswith(CLASSIFIER_PREFIX): @@ -322,6 +315,21 @@ def determine_python_versions(pypi_data: dict) -> Union[SpecifierSet, list[Versi python_versions.sort() return python_versions + +def determine_python_versions(pypi_data: dict) -> Union[SpecifierSet, list[Version]]: + try: + classifiers = pypi_data["info"]["classifiers"] + except (AttributeError, KeyError): + # This function assumes `pypi_data` contains classifiers. This is the case + # for the most recent release in the /{project} endpoint or for any release + # fetched via the /{project}/{version} endpoint. + return [] + + # Try parsing classifiers + python_versions = _parse_python_versions_from_classifiers(classifiers) + if python_versions: + return python_versions + # We only use `requires_python` if there are no classifiers. This is because # `requires_python` doesn't tell us anything about the upper bound, which # depends on when the release first came out @@ -438,9 +446,10 @@ def _add_python_versions_to_release(integration: str, package: str, release: Ver def main() -> None: - print("Finding out the lowest and highest Python version supported by the SDK...") global MIN_PYTHON_VERSION, MAX_PYTHON_VERSION - sdk_python_versions = determine_python_versions(fetch_package("sentry_sdk")) + sdk_python_versions = _parse_python_versions_from_classifiers( + metadata("sentry-sdk").get_all("Classifier") + ) MIN_PYTHON_VERSION = sdk_python_versions[0] MAX_PYTHON_VERSION = sdk_python_versions[-1] print( From 0ccb44396e60219dcbac4fe76ea7dca933fcc69e Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 12:27:06 +0100 Subject: [PATCH 14/17] Apply suggestions from code review Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- scripts/populate_tox/README.md | 7 ++++--- scripts/populate_tox/populate_tox.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index b68e74bd26..32dc3c76cf 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -95,7 +95,7 @@ Python versions, you can say: ... } ``` - +This key is optional. ### `python` Sometimes, the whole test suite should only run on specific Python versions. @@ -110,8 +110,9 @@ For example, if you want AIOHTTP tests to only run on Python 3.7+, you can say: } ``` -Specifying `python` is discouraged as the script itself finds out which -Python versions are supported by the package. However, if a package has broken +The `python` key is optional, and when possible, it should be omitted. The script +should automatically detect which Python versions the package supports. +However, if a package has broken metadata or the SDK is explicitly not supporting some packages on specific Python versions (because of, for example, broken context vars), the `python` key can be used. diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 59a4c5a0fe..ae4a260956 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -401,7 +401,7 @@ def write_tox_file(packages: dict) -> None: file.write("\n") -def _get_package_name(integration: str) -> Tuple[str, Optional[str]]: +def _get_package_name(integration: str) -> tuple[str, Optional[str]]: package = TEST_SUITE_CONFIG[integration]["package"] extra = None if "[" in package: From 42d6bba37034cebbebbf913738fb9a7e7a6d435a Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 12:27:59 +0100 Subject: [PATCH 15/17] Update scripts/populate_tox/README.md Co-authored-by: Daniel Szoke <7881302+szokeasaurusrex@users.noreply.github.com> --- scripts/populate_tox/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index 32dc3c76cf..6c428a93c3 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -35,7 +35,7 @@ the main package (framework, library) to test with; any additional test dependencies, optionally gated behind specific conditions; and optionally the Python versions to test on. -The format is: +Constraints are defined using the format specified below. The following sections describe each key. ``` integration_name: { From 82de68c4899ddbbf299f0d48964432876a586d5d Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 12:56:01 +0100 Subject: [PATCH 16/17] docstring fix --- scripts/populate_tox/populate_tox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index ae4a260956..e2bd93b100 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -269,7 +269,7 @@ def supported_python_versions( package_python_versions: Union[SpecifierSet, list[Version]], custom_supported_versions: Optional[SpecifierSet] = None, ) -> list[Version]: - """Get an intersection of python_versions and Python versions supported in the SDK.""" + """Get an intersection of package_python_versions and Python versions supported in the SDK.""" supported = [] curr = MIN_PYTHON_VERSION From de3ccef8affed8223986724c5a7dc4519b633ab3 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Mon, 3 Feb 2025 16:10:41 +0100 Subject: [PATCH 17/17] add comment to path extension --- scripts/populate_tox/populate_tox.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index e2bd93b100..e562871429 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -13,8 +13,10 @@ from packaging.specifiers import SpecifierSet from packaging.version import Version from pathlib import Path -from typing import Optional, Tuple, Union +from typing import Optional, Union +# Adding the scripts directory to PATH. This is necessary in order to be able +# to import stuff from the split_tox_gh_actions script sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import requests