PK!M XXpoetry/__init__.pyimport os import sys _ROOT = os.path.dirname(os.path.realpath(__file__)) _VENDOR = os.path.join(_ROOT, "_vendor") _CURRENT_VENDOR = os.path.join( _VENDOR, "py{}".format(".".join(str(v) for v in sys.version_info[:2])) ) # Add vendored dependencies to path. sys.path.insert(0, _CURRENT_VENDOR) from .__version__ import __version__ # noqa PK!);[[poetry/__main__.pyimport sys if __name__ == "__main__": from .console import main sys.exit(main()) PK!poetry/__version__.py__version__ = "0.12.11a" PK!뚩poetry/_vendor/.gitignore* !.gitignore PK!9[# poetry/config.pyfrom __future__ import absolute_import import io import os from typing import Any from tomlkit import document from tomlkit import table from .locations import CONFIG_DIR from .utils._compat import Path from .utils.toml_file import TomlFile class Config: def __init__(self, file): # type: (TomlFile) -> None self._file = file if not self._file.exists(): self._content = document() else: self._content = file.read() @property def name(self): return str(self._file.path) @property def file(self): return self._file @property def content(self): return self._content def setting(self, setting_name, default=None): # type: (str) -> Any """ Retrieve a setting value. """ keys = setting_name.split(".") config = self._content for key in keys: if key not in config: return default config = config[key] return config def add_property(self, key, value): keys = key.split(".") config = self._content for i, key in enumerate(keys): if key not in config and i < len(keys) - 1: config[key] = table() if i == len(keys) - 1: config[key] = value break config = config[key] self.dump() def remove_property(self, key): keys = key.split(".") config = self._content for i, key in enumerate(keys): if key not in config: return if i == len(keys) - 1: del config[key] break config = config[key] self.dump() def dump(self): # Ensuring the file is only readable and writable # by the current user mode = 0o600 umask = 0o777 ^ mode if self._file.exists(): # If the file already exists, remove it # if the permissions are higher than what we want current_mode = os.stat(str(self._file)).st_mode & 0o777 if current_mode != 384: os.remove(str(self._file)) if self._file.exists(): fd = str(self._file) else: umask_original = os.umask(umask) try: fd = os.open(str(self._file), os.O_WRONLY | os.O_CREAT, mode) finally: os.umask(umask_original) with io.open(fd, "w", encoding="utf-8") as f: f.write(self._content.as_string()) @classmethod def create(cls, file, base_dir=None): # type: (...) -> Config if base_dir is None: base_dir = CONFIG_DIR file = TomlFile(Path(base_dir) / file) return cls(file) PK!RRpoetry/console/__init__.pyfrom .application import Application def main(): return Application().run() PK!èot!!poetry/console/application.pyimport os import re import sys import traceback from cleo import Application as BaseApplication from cleo.formatters import Formatter from cleo.inputs import ArgvInput from cleo.outputs import ConsoleOutput from cleo.outputs import Output from poetry import __version__ from poetry.io.raw_argv_input import RawArgvInput from .commands import AboutCommand from .commands import AddCommand from .commands import BuildCommand from .commands import CheckCommand from .commands import ConfigCommand from .commands import DevelopCommand from .commands import InitCommand from .commands import InstallCommand from .commands import LockCommand from .commands import NewCommand from .commands import PublishCommand from .commands import RemoveCommand from .commands import RunCommand from .commands import ScriptCommand from .commands import SearchCommand from .commands import ShellCommand from .commands import ShowCommand from .commands import UpdateCommand from .commands import VersionCommand from .commands.cache import CacheClearCommand from .commands.debug import DebugInfoCommand from .commands.debug import DebugResolveCommand from .commands.self import SelfUpdateCommand class Application(BaseApplication): def __init__(self): super(Application, self).__init__("Poetry", __version__) self._poetry = None self._skip_io_configuration = False self._formatter = Formatter(True) self._formatter.add_style("error", "red", options=["bold"]) @property def poetry(self): from poetry.poetry import Poetry if self._poetry is not None: return self._poetry self._poetry = Poetry.create(os.getcwd()) return self._poetry def reset_poetry(self): # type: () -> None self._poetry = None def run(self, i=None, o=None): # type: (...) -> int if i is None: i = ArgvInput() if o is None: o = ConsoleOutput() self._formatter.with_colors(o.is_decorated()) o.set_formatter(self._formatter) name = i.get_first_argument() if name in ["run", "script"]: self._skip_io_configuration = True i = RawArgvInput() return super(Application, self).run(i, o) def do_run(self, i, o): name = self.get_command_name(i) if name not in ["run", "script"]: return super(Application, self).do_run(i, o) command = self.find(name) self._running_command = command status_code = command.run(i, o) self._running_command = None return status_code def configure_io(self, i, o): if self._skip_io_configuration: return super(Application, self).configure_io(i, o) def get_default_commands(self): # type: () -> list commands = super(Application, self).get_default_commands() commands += [ AboutCommand(), AddCommand(), BuildCommand(), CheckCommand(), ConfigCommand(), DevelopCommand(), InitCommand(), InstallCommand(), LockCommand(), NewCommand(), PublishCommand(), RemoveCommand(), RunCommand(), ScriptCommand(), SearchCommand(), ShellCommand(), ShowCommand(), UpdateCommand(), VersionCommand(), ] # Cache commands commands += [CacheClearCommand()] # Debug commands commands += [DebugInfoCommand(), DebugResolveCommand()] # Self commands commands += [SelfUpdateCommand()] return commands def render_exception(self, e, o): tb = traceback.extract_tb(sys.exc_info()[2]) title = "[%s] " % e.__class__.__name__ l = len(title) width = self._terminal.width if not width: width = sys.maxsize formatter = o.get_formatter() lines = [] for line in re.split(r"\r?\n", str(e)): for splitline in [ line[x : x + (width - 4)] for x in range(0, len(line), width - 4) ]: line_length = ( len(re.sub(r"\[[^m]*m", "", formatter.format(splitline))) + 4 ) lines.append((splitline, line_length)) l = max(line_length, l) messages = [] empty_line = formatter.format("%s" % (" " * l)) messages.append(empty_line) messages.append( formatter.format("%s%s" % (title, " " * max(0, l - len(title)))) ) for line in lines: messages.append( formatter.format( "%s %s" % (line[0], " " * (l - line[1])) ) ) messages.append(empty_line) o.writeln(messages, Output.OUTPUT_RAW) if Output.VERBOSITY_VERBOSE <= o.get_verbosity(): o.writeln("Exception trace:") for exc_info in tb: file_ = exc_info[0] line_number = exc_info[1] function = exc_info[2] line = exc_info[3] o.writeln( " %s in %s() " "at line %s" % (file_, function, line_number) ) o.writeln(" %s" % line) o.writeln("") if self._running_command is not None: o.writeln("%s" % self._running_command.get_synopsis()) o.writeln("") PK!BF3,hh#poetry/console/commands/__init__.pyfrom .about import AboutCommand from .add import AddCommand from .build import BuildCommand from .check import CheckCommand from .config import ConfigCommand from .develop import DevelopCommand from .init import InitCommand from .install import InstallCommand from .lock import LockCommand from .new import NewCommand from .publish import PublishCommand from .remove import RemoveCommand from .run import RunCommand from .script import ScriptCommand from .search import SearchCommand from .shell import ShellCommand from .show import ShowCommand from .update import UpdateCommand from .version import VersionCommand PK!Rs߯ poetry/console/commands/about.pyfrom .command import Command class AboutCommand(Command): """ Short information about Poetry. about """ def handle(self): self.line( """Poetry - Package Management for Python Poetry is a dependency manager tracking local dependencies of your projects and libraries. See https://github.com/sdispater/poetry for more information. """ ) PK!==poetry/console/commands/add.pyfrom .init import InitCommand from .env_command import EnvCommand class AddCommand(EnvCommand, InitCommand): """ Add a new dependency to pyproject.toml. add { name* : Packages to add. } { --D|dev : Add package as development dependency. } { --git= : The url of the Git repository. } { --path= : The path to a dependency. } { --E|extras=* : Extras to activate for the dependency. } { --optional : Add as an optional dependency. } { --python= : Python version( for which the dependencies must be installed. } { --platform= : Platforms for which the dependencies must be installed. } { --allow-prereleases : Accept prereleases. } { --dry-run : Outputs the operations but will not execute anything (implicitly enables --verbose). } """ help = """The add command adds required packages to your pyproject.toml and installs them. If you do not specify a version constraint, poetry will choose a suitable one based on the available package versions. """ _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.installation import Installer from poetry.semver import parse_constraint from tomlkit import inline_table packages = self.argument("name") is_dev = self.option("dev") if (self.option("git") or self.option("path") or self.option("extras")) and len( packages ) > 1: raise ValueError( "You can only specify one package " "when using the --git or --path options" ) if self.option("git") and self.option("path"): raise RuntimeError("--git and --path cannot be used at the same time") section = "dependencies" if is_dev: section = "dev-dependencies" original_content = self.poetry.file.read() content = self.poetry.file.read() poetry_content = content["tool"]["poetry"] if section not in poetry_content: poetry_content[section] = {} for name in packages: for key in poetry_content[section]: if key.lower() == name.lower(): raise ValueError("Package {} is already present".format(name)) if self.option("git") or self.option("path"): requirements = {packages[0]: ""} else: requirements = self._determine_requirements( packages, allow_prereleases=self.option("allow-prereleases") ) requirements = self._format_requirements(requirements) # validate requirements format for constraint in requirements.values(): parse_constraint(constraint) for name, _constraint in requirements.items(): constraint = inline_table() constraint["version"] = _constraint if self.option("git"): del constraint["version"] constraint["git"] = self.option("git") elif self.option("path"): del constraint["version"] constraint["path"] = self.option("path") if self.option("optional"): constraint["optional"] = True if self.option("allow-prereleases"): constraint["allows-prereleases"] = True if self.option("extras"): extras = [] for extra in self.option("extras"): if " " in extra: extras += [e.strip() for e in extra.split(" ")] else: extras.append(extra) constraint["extras"] = self.option("extras") if self.option("python"): constraint["python"] = self.option("python") if self.option("platform"): constraint["platform"] = self.option("platform") if len(constraint) == 1 and "version" in constraint: constraint = constraint["version"] poetry_content[section][name] = constraint # Write new content self.poetry.file.write(content) # Cosmetic new line self.line("") # Update packages self.reset_poetry() installer = Installer( self.output, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool, ) installer.dry_run(self.option("dry-run")) installer.update(True) installer.whitelist(requirements) try: status = installer.run() except Exception: self.poetry.file.write(original_content) raise if status != 0 or self.option("dry-run"): # Revert changes if not self.option("dry-run"): self.error( "\n" "Addition failed, reverting pyproject.toml " "to its original content." ) self.poetry.file.write(original_content) return status PK!cP9 poetry/console/commands/build.pyfrom .env_command import EnvCommand class BuildCommand(EnvCommand): """ Builds a package, as a tarball and a wheel by default. build { --f|format= : Limit the format to either wheel or sdist. } """ def handle(self): from poetry.masonry import Builder fmt = "all" if self.option("format"): fmt = self.option("format") package = self.poetry.package self.line( "Building {} ({})".format( package.pretty_name, package.version ) ) builder = Builder(self.poetry, self.env, self.output) builder.build(fmt) PK!Hu%%)poetry/console/commands/cache/__init__.pyfrom .clear import CacheClearCommand PK! 0w &poetry/console/commands/cache/clear.pyimport os from ..command import Command class CacheClearCommand(Command): """ Clears poetry's cache. cache:clear { cache : The name of the cache to clear. } { --all : Clear all caches. } """ def handle(self): from cachy import CacheManager from poetry.locations import CACHE_DIR from poetry.utils._compat import Path cache = self.argument("cache") parts = cache.split(":") root = parts[0] base_cache = Path(CACHE_DIR) / "cache" / "repositories" cache_dir = base_cache / root try: cache_dir.relative_to(base_cache) except ValueError: raise ValueError("{} is not a valid repository cache".format(root)) cache = CacheManager( { "default": parts[0], "serializer": "json", "stores": {parts[0]: {"driver": "file", "path": str(cache_dir)}}, } ) if len(parts) == 1: if not self.option("all"): raise RuntimeError( "Add the --all option if you want to clear all " "{} caches".format(parts[0]) ) if not os.path.exists(str(cache_dir)): self.line("No cache entries for {}".format(parts[0])) return 0 # Calculate number of entries entries_count = 0 for path, dirs, files in os.walk(str(cache_dir)): entries_count += len(files) delete = self.confirm( "Delete {} entries?".format(entries_count) ) if not delete: return 0 cache.flush() elif len(parts) == 2: raise RuntimeError( "Only specifying the package name is not yet supported. " "Add a specific version to clear" ) elif len(parts) == 3: package = parts[1] version = parts[2] if not cache.has("{}:{}".format(package, version)): self.line("No cache entries for {}:{}".format(package, version)) return 0 delete = self.confirm("Delete cache entry {}:{}".format(package, version)) if not delete: return 0 cache.forget("{}:{}".format(package, version)) else: raise ValueError("Invalid cache key") PK!/MҞ poetry/console/commands/check.pyfrom .command import Command class CheckCommand(Command): """ Checks the validity of the pyproject.toml file. check """ def handle(self): # Load poetry and display errors, if any check_result = self.poetry.check(self.poetry.local_config, strict=True) if not check_result["errors"] and not check_result["warnings"]: self.info("All set!") return 0 for error in check_result["errors"]: self.error("Error: {}".format(error)) for error in check_result["warnings"]: self.line("Warning: {}".format(error)) return 1 PK! "poetry/console/commands/command.pyimport logging from cleo import Command as BaseCommand from ..styles.poetry import PoetryStyle class CommandFormatter(logging.Formatter): _colors = { "error": "fg=red", "warning": "fg=yellow", "debug": "debug", "info": "fg=blue", } def format(self, record): if not record.exc_info: level = record.levelname.lower() msg = record.msg if level in self._colors: msg = "<{}>{}".format(self._colors[level], msg) return msg return super(CommandFormatter, self).format(record) class CommandHandler(logging.Handler): def __init__(self, command): self._command = command output = self._command.output level = logging.WARNING if output.is_debug(): level = logging.DEBUG elif output.is_very_verbose() or output.is_verbose(): level = logging.INFO super(CommandHandler, self).__init__(level) def emit(self, record): try: msg = self.format(record) level = record.levelname.lower() err = level in ("warning", "error", "exception", "critical") if err: self._command.output.write_error(msg, newline=True) else: self._command.line(msg) except Exception: self.handleError(record) class Command(BaseCommand): _loggers = [] @property def poetry(self): return self.get_application().poetry def reset_poetry(self): # type: () -> None self.get_application().reset_poetry() def run(self, i, o): # type: () -> int """ Initialize command. """ self.input = i self.output = PoetryStyle(i, o) for logger in self._loggers: self.register_logger(logging.getLogger(logger)) return super(BaseCommand, self).run(i, o) def register_logger(self, logger): """ Register a new logger. """ handler = CommandHandler(self) handler.setFormatter(CommandFormatter()) logger.handlers = [handler] logger.propagate = False output = self.output level = logging.WARNING if output.is_debug(): level = logging.DEBUG elif output.is_very_verbose() or output.is_verbose(): level = logging.INFO logger.setLevel(level) PK!k'k'!poetry/console/commands/config.pyimport json import re from .command import Command TEMPLATE = """[settings] [repositories] """ AUTH_TEMPLATE = """[http-basic] """ class ConfigCommand(Command): """ Sets/Gets config options. config { key : Setting key. } { value?* : Setting value. } { --list : List configuration settings } { --unset : Unset configuration setting } """ help = """This command allows you to edit the poetry config settings and repositories. To add a repository: poetry config repositories.foo https://bar.com/simple/ To remove a repository (repo is a short alias for repositories): poetry config --unset repo.foo """ def __init__(self): from poetry.config import Config super(ConfigCommand, self).__init__() self._config = Config.create("config.toml") self._auth_config = Config.create("auth.toml") @property def unique_config_values(self): from poetry.locations import CACHE_DIR from poetry.utils._compat import Path boolean_validator = lambda val: val in {"true", "false", "1", "0"} boolean_normalizer = lambda val: True if val in ["true", "1"] else False unique_config_values = { "settings.virtualenvs.create": ( boolean_validator, boolean_normalizer, True, ), "settings.virtualenvs.in-project": ( boolean_validator, boolean_normalizer, False, ), "settings.virtualenvs.path": ( str, lambda val: str(Path(val)), str(Path(CACHE_DIR) / "virtualenvs"), ), } return unique_config_values def initialize(self, i, o): from poetry.utils._compat import decode super(ConfigCommand, self).initialize(i, o) # Create config file if it does not exist if not self._config.file.exists(): self._config.file.parent.mkdir(parents=True, exist_ok=True) with self._config.file.open("w", encoding="utf-8") as f: f.write(decode(TEMPLATE)) if not self._auth_config.file.exists(): self._auth_config.file.parent.mkdir(parents=True, exist_ok=True) with self._auth_config.file.open("w", encoding="utf-8") as f: f.write(decode(AUTH_TEMPLATE)) def handle(self): if self.option("list"): self._list_configuration(self._config.content) return 0 setting_key = self.argument("key") if not setting_key: return 0 if self.argument("value") and self.option("unset"): raise RuntimeError("You can not combine a setting value with --unset") # show the value if no value is provided if not self.argument("value") and not self.option("unset"): m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key")) if m: if not m.group(1): value = {} if self._config.setting("repositories") is not None: value = self._config.setting("repositories") else: repo = self._config.setting("repositories.{}".format(m.group(1))) if repo is None: raise ValueError( "There is no {} repository defined".format(m.group(1)) ) value = repo self.line(str(value)) else: values = self.unique_config_values if setting_key not in values: raise ValueError("There is no {} setting.".format(setting_key)) values = self._get_setting( self._config.content, setting_key, default=values[setting_key][-1] ) for value in values: self.line(value[1]) return 0 values = self.argument("value") unique_config_values = self.unique_config_values if setting_key in unique_config_values: if self.option("unset"): return self._remove_single_value(setting_key) return self._handle_single_value( setting_key, unique_config_values[setting_key], values ) # handle repositories m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key")) if m: if not m.group(1): raise ValueError("You cannot remove the [repositories] section") if self.option("unset"): repo = self._config.setting("repositories.{}".format(m.group(1))) if repo is None: raise ValueError( "There is no {} repository defined".format(m.group(1)) ) self._config.remove_property("repositories.{}".format(m.group(1))) return 0 if len(values) == 1: url = values[0] self._config.add_property("repositories.{}.url".format(m.group(1)), url) return 0 raise ValueError( "You must pass the url. " "Example: poetry config repositories.foo https://bar.com" ) # handle auth m = re.match(r"^(http-basic)\.(.+)", self.argument("key")) if m: if self.option("unset"): if not self._auth_config.setting( "{}.{}".format(m.group(1), m.group(2)) ): raise ValueError( "There is no {} {} defined".format(m.group(2), m.group(1)) ) self._auth_config.remove_property( "{}.{}".format(m.group(1), m.group(2)) ) return 0 if m.group(1) == "http-basic": if len(values) == 1: username = values[0] # Only username, so we prompt for password password = self.secret("Password:") elif len(values) != 2: raise ValueError( "Expected one or two arguments " "(username, password), got {}".format(len(values)) ) else: username = values[0] password = values[1] self._auth_config.add_property( "{}.{}".format(m.group(1), m.group(2)), {"username": username, "password": password}, ) return 0 raise ValueError("Setting {} does not exist".format(self.argument("key"))) def _handle_single_value(self, key, callbacks, values): validator, normalizer, _ = callbacks if len(values) > 1: raise RuntimeError("You can only pass one value.") value = values[0] if not validator(value): raise RuntimeError('"{}" is an invalid value for {}'.format(value, key)) self._config.add_property(key, normalizer(value)) return 0 def _remove_single_value(self, key): self._config.remove_property(key) return 0 def _list_configuration(self, contents): if "settings" not in contents: settings = {} else: settings = contents["settings"] for setting_key, value in sorted(self.unique_config_values.items()): self._list_setting( settings, setting_key.replace("settings.", ""), "settings.", default=value[-1], ) repositories = contents.get("repositories") if not repositories: self.line("repositories = {}") else: self._list_setting(repositories, k="repositories.") def _list_setting(self, contents, setting=None, k=None, default=None): values = self._get_setting(contents, setting, k, default) for value in values: self.line( "{} = {}".format(value[0], value[1]) ) def _get_setting(self, contents, setting=None, k=None, default=None): orig_k = k if setting and setting.split(".")[0] not in contents: value = json.dumps(default) return [((k or "") + setting, value)] else: values = [] for key, value in contents.items(): if k is None and key not in ["config", "repositories", "settings"]: continue if setting and key != setting.split(".")[0]: continue if isinstance(value, dict) or key == "repositories" and k is None: if k is None: k = "" k += re.sub(r"^config\.", "", key + ".") if setting and len(setting) > 1: setting = ".".join(setting.split(".")[1:]) values += self._get_setting( value, k=k, setting=setting, default=default ) k = orig_k continue if isinstance(value, list): value = [ json.dumps(val) if isinstance(val, list) else val for val in value ] value = "[{}]".format(", ".join(value)) value = json.dumps(value) values.append(((k or "") + key, value)) return values def _get_formatted_value(self, value): if isinstance(value, list): value = [json.dumps(val) if isinstance(val, list) else val for val in value] value = "[{}]".format(", ".join(value)) return json.dumps(value) PK!LL)poetry/console/commands/debug/__init__.pyfrom .info import DebugInfoCommand from .resolve import DebugResolveCommand PK!vD%poetry/console/commands/debug/info.pyimport os import sys from ..command import Command class DebugInfoCommand(Command): """ Shows debug information. debug:info """ def handle(self): from ....utils.env import Env poetry = self.poetry env = Env.get(poetry.file.parent) poetry_python_version = ".".join(str(s) for s in sys.version_info[:3]) self.output.title("Poetry") self.output.listing( [ "Version: {}".format(poetry.VERSION), "Python: {}".format(poetry_python_version), ] ) self.line("") env_python_version = ".".join(str(s) for s in env.version_info[:3]) self.output.title("Virtualenv") listing = [ "Python: {}".format(env_python_version), "Implementation: {}".format( env.python_implementation ), "Path: {}".format( env.path if env.is_venv() else "NA" ), ] if env.is_venv(): listing.append( "Valid: <{tag}>{is_valid}".format( tag="comment" if env.is_sane() else "error", is_valid=env.is_sane() ) ) self.output.listing(listing) self.line("") self.output.title("System") self.output.listing( [ "Platform: {}".format(sys.platform), "OS: {}".format(os.name), "Python: {}".format(env.base), ] ) self.line("") PK!y Hjj(poetry/console/commands/debug/resolve.pyimport re from typing import List from ..command import Command class DebugResolveCommand(Command): """ Debugs dependency resolution. debug:resolve { package?* : packages to resolve. } { --E|extras=* : Extras to activate for the dependency. } { --python= : Python version(s) to use for resolution. } { --tree : Displays the dependency tree. } { --install : Show what would be installed for the current system. } """ _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.packages import ProjectPackage from poetry.puzzle import Solver from poetry.repositories.repository import Repository from poetry.semver import parse_constraint from poetry.utils.env import Env packages = self.argument("package") if not packages: package = self.poetry.package else: package = ProjectPackage( self.poetry.package.name, self.poetry.package.version ) requirements = self._format_requirements(packages) for name, constraint in requirements.items(): dep = package.add_dependency(name, constraint) extras = [] for extra in self.option("extras"): if " " in extra: extras += [e.strip() for e in extra.split(" ")] else: extras.append(extra) for ex in extras: dep.extras.append(ex) package.python_versions = self.option("python") or ( self.poetry.package.python_versions ) pool = self.poetry.pool solver = Solver(package, pool, Repository(), Repository(), self.output) ops = solver.solve() self.line("") self.line("Resolution results:") self.line("") if self.option("tree"): show_command = self.get_application().find("show") show_command.output = self.output show_command.init_styles() packages = [op.package for op in ops] repo = Repository(packages) requires = package.requires + package.dev_requires for pkg in repo.packages: for require in requires: if pkg.name == require.name: show_command.display_package_tree(pkg, repo) break return 0 env = Env.get(self.poetry.file.parent) current_python_version = parse_constraint( ".".join(str(v) for v in env.version_info) ) for op in ops: pkg = op.package if self.option("install"): if not pkg.python_constraint.allows( current_python_version ) or not env.is_valid_for_marker(pkg.marker): continue self.line( " - {} ({})".format( pkg.name, pkg.version ) ) if not pkg.python_constraint.is_any(): self.line(" - python: {}".format(pkg.python_versions)) if not pkg.marker.is_any(): self.line(" - marker: {}".format(pkg.marker)) def _determine_requirements(self, requires): # type: (List[str]) -> List[str] from poetry.semver import parse_constraint if not requires: return [] requires = self._parse_name_version_pairs(requires) for requirement in requires: if "version" in requirement: parse_constraint(requirement["version"]) return requires def _parse_name_version_pairs(self, pairs): # type: (list) -> list result = [] for i in range(len(pairs)): if pairs[i].startswith("git+https://"): url = pairs[i].lstrip("git+") rev = None if "@" in url: url, rev = url.split("@") pair = {"name": url.split("/")[-1].rstrip(".git"), "git": url} if rev: pair["rev"] = rev result.append(pair) continue pair = re.sub("^([^=: ]+)[=: ](.*)$", "\\1 \\2", pairs[i].strip()) pair = pair.strip() if " " in pair: name, version = pair.split(" ", 2) result.append({"name": name, "version": version}) else: result.append({"name": pair, "version": "*"}) return result def _format_requirements(self, requirements): # type: (List[str]) -> dict requires = {} requirements = self._determine_requirements(requirements) for requirement in requirements: name = requirement.pop("name") requires[name] = requirement return requires PK!@"poetry/console/commands/develop.pyfrom .env_command import EnvCommand class DevelopCommand(EnvCommand): """ Installs the current project in development mode. (Deprecated) develop """ help = """\ The develop command is deprecated. Please use install instead. """ def handle(self): self.line("develop is deprecated use install instead.") self.line("") return self.call("install") PK! FF&poetry/console/commands/env_command.pyfrom .command import Command class EnvCommand(Command): def __init__(self): self._env = None super(EnvCommand, self).__init__() def initialize(self, i, o): from poetry.semver import parse_constraint from poetry.utils.env import Env super(EnvCommand, self).initialize(i, o) # Checking compatibility of the current environment with # the python dependency specified in pyproject.toml current_env = Env.get(self.poetry.file.parent) supported_python = self.poetry.package.python_constraint current_python = parse_constraint( ".".join(str(v) for v in current_env.version_info[:3]) ) if not supported_python.allows(current_python): raise RuntimeError( "The current Python version ({}) is not supported by the project ({})\n" "Please activate a compatible Python version.".format( current_python, self.poetry.package.python_versions ) ) self._env = Env.create_venv( self.poetry.file.parent, o, self.poetry.package.name ) if self._env.is_venv() and o.is_verbose(): o.writeln("Using virtualenv: {}".format(self._env.path)) @property def env(self): return self._env PK!Ȏ**poetry/console/commands/init.py# -*- coding: utf-8 -*- from __future__ import unicode_literals import re from typing import List from typing import Tuple from .command import Command from .env_command import EnvCommand class InitCommand(Command): """ Creates a basic pyproject.toml file in the current directory. init {--name= : Name of the package} {--description= : Description of the package} {--author= : Author name of the package} {--dependency=* : Package to require with an optional version constraint, e.g. requests:^2.10.0 or requests=2.11.1} {--dev-dependency=* : Package to require for development with an optional version constraint, e.g. requests:^2.10.0 or requests=2.11.1} {--l|license= : License of the package} """ help = """\ The init command creates a basic pyproject.toml file in the current directory. """ def __init__(self): super(InitCommand, self).__init__() self._pool = None def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.utils.env import Env from poetry.vcs.git import GitConfig if (Path.cwd() / "pyproject.toml").exists(): self.error("A pyproject.toml file already exists.") return 1 vcs_config = GitConfig() self.line( [ "", "This command will guide you through creating your pyproject.toml config.", "", ] ) name = self.option("name") if not name: name = Path.cwd().name.lower() question = self.create_question( "Package name [{}]: ".format(name), default=name ) name = self.ask(question) version = "0.1.0" question = self.create_question( "Version [{}]: ".format(version), default=version ) version = self.ask(question) description = self.option("description") or "" question = self.create_question( "Description [{}]: ".format(description), default=description, ) description = self.ask(question) author = self.option("author") if not author and vcs_config and vcs_config.get("user.name"): author = vcs_config["user.name"] author_email = vcs_config.get("user.email") if author_email: author += " <{}>".format(author_email) question = self.create_question( "Author [{}, n to skip]: ".format(author), default=author ) question.validator = lambda v: self._validate_author(v, author) author = self.ask(question) if not author: authors = [] else: authors = [author] license = self.option("license") or "" question = self.create_question( "License [{}]: ".format(license), default=license ) question.validator = self._validate_license license = self.ask(question) current_env = Env.get(Path.cwd()) default_python = "^{}".format( ".".join(str(v) for v in current_env.version_info[:2]) ) question = self.create_question( "Compatible Python versions [{}]: ".format( default_python ), default=default_python, ) python = self.ask(question) self.line("") requirements = {} question = ( "Would you like to define your dependencies" " (require) interactively?" ) if self.confirm(question, True): requirements = self._format_requirements( self._determine_requirements(self.option("dependency")) ) dev_requirements = {} question = ( "Would you like to define your dev dependencies" " (require-dev) interactively" ) if self.confirm(question, True): dev_requirements = self._format_requirements( self._determine_requirements(self.option("dev-dependency")) ) layout_ = layout("standard")( name, version, description=description, author=authors[0] if authors else None, license=license, python=python, dependencies=requirements, dev_dependencies=dev_requirements, ) content = layout_.generate_poetry_content() if self.input.is_interactive(): self.line("Generated file") self.line(["", content, ""]) if not self.confirm("Do you confirm generation?", True): self.line("Command aborted") return 1 with (Path.cwd() / "pyproject.toml").open("w") as f: f.write(content) def _determine_requirements( self, requires, allow_prereleases=False # type: List[str] # type: bool ): # type: (...) -> List[str] if not requires: requires = [] package = self.ask("Search for package:") while package is not None: matches = self._get_pool().search(package) if not matches: self.line("Unable to find package") package = False else: choices = [] for found_package in matches: choices.append(found_package.pretty_name) self.line( "Found {} packages matching {}".format( len(matches), package ) ) package = self.choice( "\nEnter package # to add, or the complete package name if it is not listed", choices, attempts=3, ) # no constraint yet, determine the best version automatically if package is not False and " " not in package: question = self.create_question( "Enter the version constraint to require " "(or leave blank to use the latest version):" ) question.attempts = 3 question.validator = lambda x: (x or "").strip() or False constraint = self.ask(question) if constraint is False: _, constraint = self._find_best_version_for_package(package) self.line( "Using version {} for {}".format( constraint, package ) ) package += " {}".format(constraint) if package is not False: requires.append(package) package = self.ask("\nSearch for a package:") return requires requires = self._parse_name_version_pairs(requires) result = [] for requirement in requires: if "version" not in requirement: # determine the best version automatically name, version = self._find_best_version_for_package( requirement["name"], allow_prereleases=allow_prereleases ) requirement["version"] = version requirement["name"] = name self.line( "Using version {} for {}".format(version, name) ) else: # check that the specified version/constraint exists # before we proceed name, _ = self._find_best_version_for_package( requirement["name"], requirement["version"], allow_prereleases=allow_prereleases, ) requirement["name"] = name result.append("{} {}".format(requirement["name"], requirement["version"])) return result def _find_best_version_for_package( self, name, required_version=None, allow_prereleases=False ): # type: (...) -> Tuple[str, str] from poetry.version.version_selector import VersionSelector selector = VersionSelector(self._get_pool()) package = selector.find_best_candidate( name, required_version, allow_prereleases=allow_prereleases ) if not package: # TODO: find similar raise ValueError( "Could not find a matching version of package {}".format(name) ) return (package.pretty_name, selector.find_recommended_require_version(package)) def _parse_name_version_pairs(self, pairs): # type: (list) -> list result = [] for i in range(len(pairs)): pair = re.sub("^([^=: ]+)[=: ](.*)$", "\\1 \\2", pairs[i].strip()) pair = pair.strip() if " " in pair: name, version = pair.split(" ", 2) result.append({"name": name, "version": version}) else: result.append({"name": pair}) return result def _format_requirements(self, requirements): # type: (List[str]) -> dict requires = {} requirements = self._parse_name_version_pairs(requirements) for requirement in requirements: requires[requirement["name"]] = requirement["version"] return requires def _validate_author(self, author, default): from poetry.packages.package import AUTHOR_REGEX author = author or default if author in ["n", "no"]: return m = AUTHOR_REGEX.match(author) if not m: raise ValueError( "Invalid author string. Must be in the format: " "John Smith " ) return author def _validate_license(self, license): from poetry.spdx import license_by_id if license: license_by_id(license) return license def _get_pool(self): from poetry.repositories import Pool from poetry.repositories.pypi_repository import PyPiRepository if isinstance(self, EnvCommand): return self.poetry.pool if self._pool is None: self._pool = Pool() self._pool.add_repository(PyPiRepository()) return self._pool PK!8 "poetry/console/commands/install.pyimport os from .env_command import EnvCommand class InstallCommand(EnvCommand): """ Installs the project dependencies. install { --no-dev : Do not install dev dependencies. } { --dry-run : Outputs the operations but will not execute anything (implicitly enables --verbose). } { --E|extras=* : Extra sets of dependencies to install. } { --develop=* : Install given packages in development mode. } """ help = """The install command reads the poetry.lock file from the current directory, processes it, and downloads and installs all the libraries and dependencies outlined in that file. If the file does not exist it will look for pyproject.toml and do the same. poetry install """ _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.installation import Installer from poetry.io import NullIO from poetry.masonry.builders import SdistBuilder from poetry.masonry.utils.module import ModuleOrPackageNotFound from poetry.utils._compat import decode from poetry.utils.env import NullEnv installer = Installer( self.output, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool, ) extras = [] for extra in self.option("extras"): if " " in extra: extras += [e.strip() for e in extra.split(" ")] else: extras.append(extra) installer.extras(extras) installer.dev_mode(not self.option("no-dev")) installer.develop(self.option("develop")) installer.dry_run(self.option("dry-run")) installer.verbose(self.option("verbose")) return_code = installer.run() if return_code != 0: return return_code try: builder = SdistBuilder(self.poetry, NullEnv(), NullIO()) except ModuleOrPackageNotFound: # This is likely due to the fact that the project is an application # not following the structure expected by Poetry # If this is a true error it will be picked up later by build anyway. return 0 self.line( " - Installing {} ({})".format( self.poetry.package.pretty_name, self.poetry.package.pretty_version ) ) if self.option("dry-run"): return 0 setup = self.poetry.file.parent / "setup.py" has_setup = setup.exists() if has_setup: self.line("A setup.py file already exists. Using it.") else: with setup.open("w", encoding="utf-8") as f: f.write(decode(builder.build_setup())) try: self.env.run("pip", "install", "-e", str(setup.parent), "--no-deps") finally: if not has_setup: os.remove(str(setup)) PK!nBpoetry/console/commands/lock.pyfrom .env_command import EnvCommand class LockCommand(EnvCommand): """ Locks the project dependencies. lock """ help = """The lock command reads the pyproject.toml file from the current directory, processes it, and locks the depdencies in the poetry.lock file. poetry lock """ _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.installation import Installer installer = Installer( self.output, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool, ) installer.lock() return installer.run() PK!CCpoetry/console/commands/new.pyfrom .command import Command class NewCommand(Command): """ Creates a new Python project at new { path : The path to create the project at. } { --name= : Set the resulting package name. } { --src : Use the src layout for the project. } """ def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.utils.env import Env from poetry.vcs.git import GitConfig if self.option("src"): layout_ = layout("src") else: layout_ = layout("standard") path = Path.cwd() / Path(self.argument("path")) name = self.option("name") if not name: name = path.name if path.exists(): if list(path.glob("*")): # Directory is not empty. Aborting. raise RuntimeError( "Destination {} " "exists and is not empty".format(path) ) readme_format = "rst" config = GitConfig() author = None if config.get("user.name"): author = config["user.name"] author_email = config.get("user.email") if author_email: author += " <{}>".format(author_email) current_env = Env.get(Path.cwd()) default_python = "^{}".format( ".".join(str(v) for v in current_env.version_info[:2]) ) layout_ = layout_( name, "0.1.0", author=author, readme_format=readme_format, python=default_python, ) layout_.create(path) self.line( "Created package {} in {}".format( name, path.relative_to(Path.cwd()) ) ) PK!2"poetry/console/commands/publish.pyfrom .command import Command class PublishCommand(Command): """ Publishes a package to a remote repository. publish { --r|repository= : The repository to publish the package to. } { --u|username= : The username to access the repository. } { --p|password= : The password to access the repository. } { --build : Build the package before publishing. } """ help = """The publish command builds and uploads the package to a remote repository. By default, it will upload to PyPI but if you pass the --repository option it will upload to it instead. The --repository option should match the name of a configured repository using the config command. """ def handle(self): from poetry.masonry.publishing.publisher import Publisher publisher = Publisher(self.poetry, self.output) # Building package first, if told if self.option("build"): if publisher.files: if not self.confirm( "There are {} files ready for publishing. " "Build anyway?".format(len(publisher.files)) ): self.line_error("Aborted!") return 1 self.call("build") files = publisher.files if not files: self.line_error( "No files to publish. " "Run poetry build first or use the --build option." ) return 1 self.line("") publisher.publish( self.option("repository"), self.option("username"), self.option("password") ) PK!5 !poetry/console/commands/remove.pyfrom .env_command import EnvCommand class RemoveCommand(EnvCommand): """ Removes a package from the project dependencies. remove { packages* : Packages that should be removed. } {--D|dev : Removes a package from the development dependencies. } {--dry-run : Outputs the operations but will not execute anything (implicitly enables --verbose). } """ help = """The remove command removes a package from the current list of installed packages poetry remove""" _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.installation import Installer packages = self.argument("packages") is_dev = self.option("dev") original_content = self.poetry.file.read() content = self.poetry.file.read() poetry_content = content["tool"]["poetry"] section = "dependencies" if is_dev: section = "dev-dependencies" # Deleting entries requirements = {} for name in packages: found = False for key in poetry_content[section]: if key.lower() == name.lower(): found = True requirements[key] = poetry_content[section][key] break if not found: raise ValueError("Package {} not found".format(name)) for key in requirements: del poetry_content[section][key] # Write the new content back self.poetry.file.write(content) # Update packages self.reset_poetry() installer = Installer( self.output, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool, ) installer.dry_run(self.option("dry-run")) installer.update(True) installer.whitelist(requirements) try: status = installer.run() except Exception: self.poetry.file.write(original_content) raise if status != 0 or self.option("dry-run"): # Revert changes if not self.option("dry-run"): self.error( "\n" "Removal failed, reverting pyproject.toml " "to its original content." ) self.poetry.file.write(original_content) return status PK! |  poetry/console/commands/run.pyfrom .env_command import EnvCommand class RunCommand(EnvCommand): """ Runs a command in the appropriate environment. run { args* : The command and arguments/options to run. } """ def handle(self): args = self.argument("args") script = args[0] scripts = self.poetry.local_config.get("scripts") if scripts and script in scripts: return self.run_script(scripts[script], args) return self.env.execute(*args) def run_script(self, script, args): if isinstance(script, dict): script = script["callable"] module, callable_ = script.split(":") src_in_sys_path = "sys.path.append('src'); " if self._module.is_in_src() else "" cmd = ["python", "-c"] cmd += [ '"import sys; ' "from importlib import import_module; " "sys.argv = {!r}; {}" "import_module('{}').{}()\"".format( args, src_in_sys_path, module, callable_ ) ] return self.env.run(*cmd, shell=True, call=True) @property def _module(self): from ...masonry.utils.module import Module poetry = self.poetry package = poetry.package path = poetry.file.parent module = Module(package.name, path.as_posix(), package.packages) return module def merge_application_definition(self, merge_args=True): if self._application is None or ( self._application_definition_merged and (self._application_definition_merged_with_args or not merge_args) ): return if merge_args: current_arguments = self._definition.get_arguments() self._definition.set_arguments( self._application.get_definition().get_arguments() ) self._definition.add_arguments(current_arguments) self._application_definition_merged = True if merge_args: self._application_definition_merged_with_args = True PK!2- - !poetry/console/commands/script.pyfrom .env_command import EnvCommand class ScriptCommand(EnvCommand): """ Executes a script defined in pyproject.toml. (Deprecated) script { script-name : The name of the script to execute } { args?* : The command and arguments/options to pass to the script. } """ help = """The script command is deprecated. Please use run instead. """ def handle(self): self.line("script is deprecated use run instead.") self.line("") script = self.argument("script-name") argv = [script] + self.argument("args") scripts = self.poetry.local_config.get("scripts") if not scripts: raise RuntimeError("No scripts defined in pyproject.toml") if script not in scripts: raise ValueError("Script {} is not defined".format(script)) module, callable_ = scripts[script].split(":") src_in_sys_path = "sys.path.append('src'); " if self._module.is_in_src() else "" cmd = ["python", "-c"] cmd += [ '"import sys; ' "from importlib import import_module; " "sys.argv = {!r}; {}" "import_module('{}').{}()\"".format( argv, src_in_sys_path, module, callable_ ) ] self.env.run(*cmd, shell=True, call=True) @property def _module(self): from ...masonry.utils.module import Module poetry = self.poetry package = poetry.package path = poetry.file.parent module = Module(package.name, path.as_posix()) return module def merge_application_definition(self, merge_args=True): if self._application is None or ( self._application_definition_merged and (self._application_definition_merged_with_args or not merge_args) ): return if merge_args: current_arguments = self._definition.get_arguments() self._definition.set_arguments( self._application.get_definition().get_arguments() ) self._definition.add_arguments(current_arguments) self._application_definition_merged = True if merge_args: self._application_definition_merged_with_args = True PK!EE!poetry/console/commands/search.pyfrom .command import Command class SearchCommand(Command): """ Searches for packages on remote repositories. search { tokens* : The tokens to search for. } { --N|only-name : Search only in name. } """ def handle(self): from poetry.repositories.pypi_repository import PyPiRepository flags = PyPiRepository.SEARCH_FULLTEXT if self.option("only-name"): flags = PyPiRepository.SEARCH_NAME results = PyPiRepository().search(self.argument("tokens"), flags) for result in results: self.line("") name = "{}".format(result.name) name += " ({})".format(result.version) self.line(name) if result.description: self.line(" {}".format(result.description)) PK!Z&&(poetry/console/commands/self/__init__.pyfrom .update import SelfUpdateCommand PK!Q&poetry/console/commands/self/update.pyimport hashlib import os import shutil import subprocess import sys import tarfile from functools import cmp_to_key from gzip import GzipFile try: from urllib.error import HTTPError from urllib.request import urlopen except ImportError: from urllib2 import HTTPError from urllib2 import urlopen from ..command import Command class SelfUpdateCommand(Command): """ Updates poetry to the latest version. self:update { version? : The version to update to. } { --preview : Install prereleases. } """ BASE_URL = "https://github.com/sdispater/poetry/releases/download" @property def home(self): from poetry.utils._compat import Path from poetry.utils.appdirs import expanduser home = Path(expanduser("~")) return home / ".poetry" @property def lib(self): return self.home / "lib" @property def lib_backup(self): return self.home / "lib-backup" def handle(self): from poetry.__version__ import __version__ from poetry.repositories.pypi_repository import PyPiRepository from poetry.semver import Version from poetry.utils._compat import Path from poetry.utils._compat import decode current = Path(__file__) try: current.relative_to(self.home) except ValueError: raise RuntimeError( "Poetry was not installed with the recommended installer. " "Cannot update automatically." ) version = self.argument("version") if not version: version = ">=" + __version__ repo = PyPiRepository(fallback=False) packages = repo.find_packages( "poetry", version, allow_prereleases=self.option("preview") ) if not packages: self.line("No release found for the specified version") return packages.sort( key=cmp_to_key( lambda x, y: 0 if x.version == y.version else int(x.version < y.version or -1) ) ) release = None for package in packages: if package.is_prerelease(): if self.option("preview"): release = package break continue release = package break if release is None: self.line("No new release found") return if release.version == Version.parse(__version__): self.line("You are using the latest version") return try: self.update(release) except subprocess.CalledProcessError as e: self.line("") self.output.block( [ "[CalledProcessError]", "An error has occured: {}".format(str(e)), decode(e.output), ], style="error", ) return e.returncode def update(self, release): version = release.version self.line("Updating to {}".format(version)) if self.lib_backup.exists(): shutil.rmtree(str(self.lib_backup)) # Backup the current installation if self.lib.exists(): shutil.copytree(str(self.lib), str(self.lib_backup)) shutil.rmtree(str(self.lib)) try: self._update(version) except Exception: if not self.lib_backup.exists(): raise shutil.copytree(str(self.lib_backup), str(self.lib)) shutil.rmtree(str(self.lib_backup)) raise finally: if self.lib_backup.exists(): shutil.rmtree(str(self.lib_backup)) self.line("") self.line("") self.line( "Poetry ({}) is installed now. Great!".format( version ) ) def _update(self, version): from poetry.utils.helpers import temporary_directory platform = sys.platform if platform == "linux2": platform = "linux" checksum = "poetry-{}-{}.sha256sum".format(version, platform) try: r = urlopen(self.BASE_URL + "/{}/{}".format(version, checksum)) except HTTPError as e: if e.code == 404: raise RuntimeError("Could not find {} file".format(checksum)) raise checksum = r.read().decode() # We get the payload from the remote host name = "poetry-{}-{}.tar.gz".format(version, platform) try: r = urlopen(self.BASE_URL + "/{}/{}".format(version, name)) except HTTPError as e: if e.code == 404: raise RuntimeError("Could not find {} file".format(name)) raise meta = r.info() size = int(meta["Content-Length"]) current = 0 block_size = 8192 bar = self.progress_bar(max=size) bar.set_format(" - Downloading {} %percent%%".format(name)) bar.start() sha = hashlib.sha256() with temporary_directory(prefix="poetry-updater-") as dir_: tar = os.path.join(dir_, name) with open(tar, "wb") as f: while True: buffer = r.read(block_size) if not buffer: break current += len(buffer) f.write(buffer) sha.update(buffer) bar.set_progress(current) bar.finish() # Checking hashes if checksum != sha.hexdigest(): raise RuntimeError( "Hashes for {} do not match: {} != {}".format( name, checksum, sha.hexdigest() ) ) gz = GzipFile(tar, mode="rb") try: with tarfile.TarFile(tar, fileobj=gz, format=tarfile.PAX_FORMAT) as f: f.extractall(str(self.lib)) finally: gz.close() def process(self, *args): return subprocess.check_output(list(args), stderr=subprocess.STDOUT) def _bin_path(self, base_path, bin): if sys.platform == "win32": return (base_path / "Scripts" / bin).with_suffix(".exe") return base_path / "bin" / bin PK!Sy poetry/console/commands/shell.pyimport sys from os import environ from distutils.util import strtobool from .env_command import EnvCommand class ShellCommand(EnvCommand): """ Spawns a shell within the virtual environment. shell [options] """ help = """The shell command spawns a shell, according to the $SHELL environment variable, within the virtual environment. If one doesn't exist yet, it will be created. """ def handle(self): from poetry.utils.shell import Shell # Check if it's already activated or doesn't exist and won't be created venv_activated = strtobool(environ.get("POETRY_ACTIVE", "0")) or getattr( sys, "real_prefix", sys.prefix ) == str(self.env.path) if venv_activated: self.line( "Virtual environment already activated: " "{}".format(self.env.path) ) return self.line("Spawning shell within {}".format(self.env.path)) # Setting this to avoid spawning unnecessary nested shells environ["POETRY_ACTIVE"] = "1" shell = Shell.get() self.env.execute(shell.path) environ.pop("POETRY_ACTIVE") PK!Q,,poetry/console/commands/show.py# -*- coding: utf-8 -*- import sys from .env_command import EnvCommand class ShowCommand(EnvCommand): """ Shows information about packages. show { package? : Package to inspect. } { --no-dev : Do not list the dev dependencies. } { --t|tree : List the dependencies as a tree. } { --l|latest : Show the latest version. } { --o|outdated : Show the latest version but only for packages that are outdated. } { --a|all : Show all packages (even those not compatible with current system). } """ help = """The show command displays detailed information about a package, or lists all packages available.""" colors = ["green", "yellow", "cyan", "magenta", "blue"] def handle(self): from poetry.repositories.installed_repository import InstalledRepository from poetry.semver import Version package = self.argument("package") if self.option("tree"): self.init_styles() if self.option("outdated"): self.input.set_option("latest", True) locked_repo = self.poetry.locker.locked_repository(not self.option("no-dev")) # Show tree view if requested if self.option("tree") and not package: requires = self.poetry.package.requires + self.poetry.package.dev_requires packages = locked_repo.packages for package in packages: for require in requires: if package.name == require.name: self.display_package_tree(package, locked_repo) break return 0 table = self.table(style="compact") table.get_style().set_vertical_border_char("") locked_packages = locked_repo.packages if package: pkg = None for locked in locked_packages: if package.lower() == locked.name: pkg = locked break if not pkg: raise ValueError("Package {} not found".format(package)) if self.option("tree"): self.display_package_tree(pkg, locked_repo) return 0 rows = [ ["name", " : {}".format(pkg.pretty_name)], ["version", " : {}".format(pkg.pretty_version)], ["description", " : {}".format(pkg.description)], ] table.add_rows(rows) table.render() if pkg.requires: self.line("") self.line("dependencies") for dependency in pkg.requires: self.line( " - {} {}".format( dependency.pretty_name, dependency.pretty_constraint ) ) return 0 show_latest = self.option("latest") show_all = self.option("all") terminal = self.get_application().terminal width = terminal.width name_length = version_length = latest_length = 0 latest_packages = {} installed_repo = InstalledRepository.load(self.env) skipped = [] python = Version.parse(".".join([str(i) for i in self.env.version_info[:3]])) # Computing widths for locked in locked_packages: python_constraint = locked.python_constraint if not python_constraint.allows(python) or not self.env.is_valid_for_marker( locked.marker ): skipped.append(locked) if not show_all: continue current_length = len(locked.pretty_name) if not self.output.is_decorated(): installed_status = self.get_installed_status(locked, installed_repo) if installed_status == "not-installed": current_length += 4 name_length = max(name_length, current_length) version_length = max(version_length, len(locked.full_pretty_version)) if show_latest: latest = self.find_latest_package(locked) if not latest: latest = locked latest_packages[locked.pretty_name] = latest latest_length = max(latest_length, len(latest.full_pretty_version)) write_version = name_length + version_length + 3 <= width write_latest = name_length + version_length + latest_length + 3 <= width write_description = name_length + version_length + latest_length + 24 <= width for locked in locked_packages: color = "green" name = locked.pretty_name install_marker = "" if locked in skipped: if not show_all: continue color = "black;options=bold" else: installed_status = self.get_installed_status(locked, installed_repo) if installed_status == "not-installed": color = "red" if not self.output.is_decorated(): # Non installed in non decorated mode install_marker = " (!)" line = "{:{}}{}".format( color, name, name_length - len(install_marker), install_marker ) if write_version: line += " {:{}}".format( locked.full_pretty_version, version_length ) if show_latest and write_latest: latest = latest_packages[locked.pretty_name] update_status = self.get_update_status(latest, locked) color = "green" if update_status == "semver-safe-update": color = "red" elif update_status == "update-possible": color = "yellow" line += " {:{}}".format( color, latest.full_pretty_version, latest_length ) if self.option("outdated") and update_status == "up-to-date": continue if write_description: description = locked.description remaining = width - name_length - version_length - 4 if show_latest: remaining -= latest_length if len(locked.description) > remaining: description = description[: remaining - 3] + "..." line += " " + description self.line(line) def display_package_tree(self, package, installed_repo): self.write("{}".format(package.pretty_name)) description = "" if package.description: description = " " + package.description self.line(" {}{}".format(package.pretty_version, description)) dependencies = package.requires dependencies = sorted(dependencies, key=lambda x: x.name) tree_bar = "├" j = 0 total = len(dependencies) for dependency in dependencies: j += 1 if j == total: tree_bar = "└" level = 1 color = self.colors[level] info = "{tree_bar}── <{color}>{name} {constraint}".format( tree_bar=tree_bar, color=color, name=dependency.name, constraint=dependency.pretty_constraint, ) self._write_tree_line(info) tree_bar = tree_bar.replace("└", " ") packages_in_tree = [package.name, dependency.name] self._display_tree( dependency, installed_repo, packages_in_tree, tree_bar, level + 1 ) def _display_tree( self, dependency, installed_repo, packages_in_tree, previous_tree_bar="├", level=1, ): previous_tree_bar = previous_tree_bar.replace("├", "│") dependencies = [] for package in installed_repo.packages: if package.name == dependency.name: dependencies = package.requires break dependencies = sorted(dependencies, key=lambda x: x.name) tree_bar = previous_tree_bar + " ├" i = 0 total = len(dependencies) for dependency in dependencies: i += 1 current_tree = packages_in_tree if i == total: tree_bar = previous_tree_bar + " └" color_ident = level % len(self.colors) color = self.colors[color_ident] circular_warn = "" if dependency.name in current_tree: circular_warn = "(circular dependency aborted here)" info = "{tree_bar}── <{color}>{name} {constraint} {warn}".format( tree_bar=tree_bar, color=color, name=dependency.name, constraint=dependency.pretty_constraint, warn=circular_warn, ) self._write_tree_line(info) tree_bar = tree_bar.replace("└", " ") if dependency.name not in current_tree: current_tree.append(dependency.name) self._display_tree( dependency, installed_repo, current_tree, tree_bar, level + 1 ) def _write_tree_line(self, line): if not self.output.is_decorated(): line = line.replace("└", "`-") line = line.replace("├", "|-") line = line.replace("──", "-") line = line.replace("│", "|") self.line(line) def init_styles(self): for color in self.colors: self.set_style(color, color) def find_latest_package(self, package): from poetry.io import NullIO from poetry.puzzle.provider import Provider from poetry.version.version_selector import VersionSelector # find the latest version allowed in this pool if package.source_type == "git": for dep in self.poetry.package.requires: if dep.name == package.name and dep.is_vcs(): return Provider( self.poetry.package, self.poetry.pool, NullIO() ).search_for_vcs(dep)[0] name = package.name selector = VersionSelector(self.poetry.pool) return selector.find_best_candidate(name, ">={}".format(package.pretty_version)) def get_update_status(self, latest, package): from poetry.semver import parse_constraint if latest.full_pretty_version == package.full_pretty_version: return "up-to-date" constraint = parse_constraint("^" + package.pretty_version) if latest.version and constraint.allows(latest.version): # It needs an immediate semver-compliant upgrade return "semver-safe-update" # it needs an upgrade but has potential BC breaks so is not urgent return "update-possible" def get_installed_status(self, locked, installed_repo): for package in installed_repo.packages: if locked.name == package.name: return "installed" return "not-installed" PK!!poetry/console/commands/update.pyfrom .env_command import EnvCommand class UpdateCommand(EnvCommand): """ Update dependencies as according to the pyproject.toml file. update { packages?* : The packages to update. } { --no-dev : Do not install dev dependencies. } { --dry-run : Outputs the operations but will not execute anything (implicitly enables --verbose). } { --lock : Do not perform install (only update the lockfile). } """ _loggers = ["poetry.repositories.pypi_repository"] def handle(self): from poetry.installation import Installer packages = self.argument("packages") installer = Installer( self.output, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool, ) if packages: installer.whitelist({name: "*" for name in packages}) installer.dev_mode(not self.option("no-dev")) installer.dry_run(self.option("dry-run")) installer.execute_operations(not self.option("lock")) # Force update installer.update(True) return installer.run() PK!pyproject.toml. The new version should ideally be a valid semver string or a valid bump rule: patch, minor, major, prepatch, preminor, premajor, prerelease. """ RESERVED = { "major", "minor", "patch", "premajor", "preminor", "prepatch", "prerelease", } def handle(self): version = self.argument("version") version = self.increment_version(self.poetry.package.pretty_version, version) self.line( "Bumping version from {} to {}".format( self.poetry.package.pretty_version, version ) ) content = self.poetry.file.read() poetry_content = content["tool"]["poetry"] poetry_content["version"] = version.text self.poetry.file.write(content) def increment_version(self, version, rule): from poetry.semver import Version try: version = Version.parse(version) except ValueError: raise ValueError("The project's version doesn't seem to follow semver") if rule in {"major", "premajor"}: new = version.next_major if rule == "premajor": new = new.first_prerelease elif rule in {"minor", "preminor"}: new = version.next_minor if rule == "preminor": new = new.first_prerelease elif rule in {"patch", "prepatch"}: new = version.next_patch if rule == "prepatch": new = new.first_prerelease elif rule == "prerelease": if version.is_prerelease(): pre = version.prerelease new_prerelease = int(pre[1]) + 1 new = Version.parse( "{}.{}.{}-{}".format( version.major, version.minor, version.patch, ".".join([pre[0], str(new_prerelease)]), ) ) else: new = version.next_patch.first_prerelease else: new = Version.parse(rule) return new PK!!poetry/console/styles/__init__.pyPK!33poetry/console/styles/poetry.pyfrom cleo.styles import CleoStyle from cleo.styles import OutputStyle class PoetryStyle(CleoStyle): def __init__(self, i, o): super(PoetryStyle, self).__init__(i, o) self.output.get_formatter().add_style("error", "red") self.output.get_formatter().add_style("warning", "yellow") self.output.get_formatter().add_style("question", "blue") self.output.get_formatter().add_style("comment", "cyan") self.output.get_formatter().add_style("debug", "black", options=["bold"]) def writeln( self, messages, type=OutputStyle.OUTPUT_NORMAL, verbosity=OutputStyle.VERBOSITY_NORMAL, ): if self.output.verbosity >= verbosity: super(PoetryStyle, self).writeln(messages, type=type) def write( self, messages, newline=False, type=OutputStyle.OUTPUT_NORMAL, verbosity=OutputStyle.VERBOSITY_NORMAL, ): if self.output.verbosity >= verbosity: super(PoetryStyle, self).write(messages, newline=newline, type=type) PK!=N2ccpoetry/exceptions.pyclass PoetryException(Exception): pass class InvalidProjectFile(PoetryException): pass PK!!!poetry/installation/__init__.pyfrom .installer import Installer PK!q %poetry/installation/base_installer.pyclass BaseInstaller: def install(self, package): raise NotImplementedError def update(self, source, target): raise NotImplementedError def remove(self, package): raise NotImplementedError PK!_HH poetry/installation/installer.pyimport sys from typing import List from typing import Union from poetry.io import NullIO from poetry.packages import Dependency from poetry.packages import Locker from poetry.packages import Package from poetry.packages.constraints import parse_constraint as parse_generic_constraint from poetry.puzzle import Solver from poetry.puzzle.operations import Install from poetry.puzzle.operations import Uninstall from poetry.puzzle.operations import Update from poetry.puzzle.operations.operation import Operation from poetry.repositories import Pool from poetry.repositories import Repository from poetry.repositories.installed_repository import InstalledRepository from poetry.semver import parse_constraint from poetry.semver import Version from poetry.utils.helpers import canonicalize_name from .base_installer import BaseInstaller from .pip_installer import PipInstaller class Installer: def __init__( self, io, env, package, # type: Package locker, # type: Locker pool, # type: Pool installed=None, # type: (Union[InstalledRepository, None]) ): self._io = io self._env = env self._package = package self._locker = locker self._pool = pool self._dry_run = False self._update = False self._verbose = False self._write_lock = True self._dev_mode = True self._develop = [] self._execute_operations = True self._lock = False self._whitelist = [] self._extras = [] self._installer = self._get_installer() if installed is None: installed = self._get_installed() self._installed_repository = installed @property def installer(self): return self._installer def run(self): # Force update if there is no lock file present if not self._update and not self._locker.is_locked(): self._update = True if self.is_dry_run(): self.verbose(True) self._write_lock = False self._execute_operations = False local_repo = Repository() self._do_install(local_repo) return 0 def dry_run(self, dry_run=True): # type: (bool) -> Installer self._dry_run = dry_run return self def is_dry_run(self): # type: () -> bool return self._dry_run def verbose(self, verbose=True): # type: (bool) -> Installer self._verbose = verbose return self def is_verbose(self): # type: () -> bool return self._verbose def dev_mode(self, dev_mode=True): # type: (bool) -> Installer self._dev_mode = dev_mode return self def is_dev_mode(self): # type: () -> bool return self._dev_mode def develop(self, packages): # type: (dict) -> Installer self._develop = [canonicalize_name(p) for p in packages] return self def update(self, update=True): # type: (bool) -> Installer self._update = update return self def lock(self): # type: () -> Installer """ Prepare the installer for locking only. """ self.update() self.execute_operations(False) self._lock = True return self def is_updating(self): # type: () -> bool return self._update def execute_operations(self, execute=True): # type: (bool) -> Installer self._execute_operations = execute return self def whitelist(self, packages): # type: (dict) -> Installer self._whitelist = [canonicalize_name(p) for p in packages] return self def extras(self, extras): # type: (list) -> Installer self._extras = extras return self def _do_install(self, local_repo): locked_repository = Repository() if self._update: if self._locker.is_locked(): locked_repository = self._locker.locked_repository(True) # If no packages have been whitelisted (The ones we want to update), # we whitelist every package in the lock file. if not self._whitelist: for pkg in locked_repository.packages: self._whitelist.append(pkg.name) # Checking extras for extra in self._extras: if extra not in self._package.extras: raise ValueError("Extra [{}] is not specified.".format(extra)) self._io.writeln("Updating dependencies") solver = Solver( self._package, self._pool, self._installed_repository, locked_repository, self._io, ) ops = solver.solve(use_latest=self._whitelist) else: self._io.writeln("Installing dependencies from lock file") locked_repository = self._locker.locked_repository(True) if not self._locker.is_fresh(): self._io.writeln( "" "Warning: The lock file is not up to date with " "the latest changes in pyproject.toml. " "You may be getting outdated dependencies. " "Run update to update them." "" ) for extra in self._extras: if extra not in self._locker.lock_data.get("extras", {}): raise ValueError("Extra [{}] is not specified.".format(extra)) # If we are installing from lock # Filter the operations by comparing it with what is # currently installed ops = self._get_operations_from_lock(locked_repository) self._populate_local_repo(local_repo, ops, locked_repository) if self._lock: # If we are only in lock mode, no need to go any further self._write_lock_file(local_repo) return 0 root = self._package if not self.is_dev_mode(): root = root.clone() del root.dev_requires[:] with root.with_python_versions( ".".join([str(i) for i in self._env.version_info[:3]]) ): # We resolve again by only using the lock file pool = Pool() # Making a new repo containing the packages # newly resolved and the ones from the current lock file locked_repository = self._locker.locked_repository(True) repo = Repository() for package in local_repo.packages + locked_repository.packages: if not repo.has_package(package): repo.add_package(package) pool.add_repository(repo) # We whitelist all packages to be sure # that the latest ones are picked up whitelist = [] for pkg in locked_repository.packages: whitelist.append(pkg.name) solver = Solver( root, pool, self._installed_repository, locked_repository, NullIO() ) ops = solver.solve(use_latest=whitelist) # We need to filter operations so that packages # not compatible with the current system, # or optional and not requested, are dropped self._filter_operations(ops, local_repo) self._io.new_line() # Execute operations actual_ops = [op for op in ops if not op.skipped] if not actual_ops and (self._execute_operations or self._dry_run): self._io.writeln("Nothing to install or update") if actual_ops and (self._execute_operations or self._dry_run): installs = [] updates = [] uninstalls = [] skipped = [] for op in ops: if op.skipped: skipped.append(op) continue if op.job_type == "install": installs.append( "{}:{}".format( op.package.pretty_name, op.package.full_pretty_version ) ) elif op.job_type == "update": updates.append( "{}:{}".format( op.target_package.pretty_name, op.target_package.full_pretty_version, ) ) elif op.job_type == "uninstall": uninstalls.append(op.package.pretty_name) self._io.new_line() self._io.writeln( "Package operations: " "{} install{}, " "{} update{}, " "{} removal{}" "{}".format( len(installs), "" if len(installs) == 1 else "s", len(updates), "" if len(updates) == 1 else "s", len(uninstalls), "" if len(uninstalls) == 1 else "s", ", {} skipped".format(len(skipped)) if skipped and self.is_verbose() else "", ) ) # Writing lock before installing self._write_lock_file(local_repo) self._io.writeln("") for op in ops: self._execute(op) def _write_lock_file(self, repo): # type: (Repository) -> None if self._update and self._write_lock: updated_lock = self._locker.set_lock_data(self._package, repo.packages) if updated_lock: self._io.writeln("") self._io.writeln("Writing lock file") def _execute(self, operation): # type: (Operation) -> None """ Execute a given operation. """ method = operation.job_type getattr(self, "_execute_{}".format(method))(operation) def _execute_install(self, operation): # type: (Install) -> None if operation.skipped: if self.is_verbose() and (self._execute_operations or self.is_dry_run()): self._io.writeln( " - Skipping {} ({}) {}".format( operation.package.pretty_name, operation.package.full_pretty_version, operation.skip_reason, ) ) return if self._execute_operations or self.is_dry_run(): self._io.writeln( " - Installing {} ({})".format( operation.package.pretty_name, operation.package.full_pretty_version ) ) if not self._execute_operations: return self._installer.install(operation.package) def _execute_update(self, operation): # type: (Update) -> None source = operation.initial_package target = operation.target_package if operation.skipped: if self.is_verbose() and (self._execute_operations or self.is_dry_run()): self._io.writeln( " - Skipping {} ({}) {}".format( target.pretty_name, target.full_pretty_version, operation.skip_reason, ) ) return if self._execute_operations or self.is_dry_run(): self._io.writeln( " - Updating {} ({} -> {})".format( target.pretty_name, source.full_pretty_version, target.full_pretty_version, ) ) if not self._execute_operations: return self._installer.update(source, target) def _execute_uninstall(self, operation): # type: (Uninstall) -> None if operation.skipped: if self.is_verbose() and (self._execute_operations or self.is_dry_run()): self._io.writeln( " - Not removing {} ({}) {}".format( operation.package.pretty_name, operation.package.full_pretty_version, operation.skip_reason, ) ) return if self._execute_operations or self.is_dry_run(): self._io.writeln( " - Removing {} ({})".format( operation.package.pretty_name, operation.package.full_pretty_version ) ) if not self._execute_operations: return self._installer.remove(operation.package) def _populate_local_repo(self, local_repo, ops, locked_repository): # We walk through all operations and add/remove/update accordingly for op in ops: if isinstance(op, Update): package = op.target_package else: package = op.package acted_on = False for pkg in locked_repository.packages: if pkg.name == package.name: # The package we operate on is in the local repo if op.job_type == "update": if pkg.version == package.version: break local_repo.remove_package(pkg) local_repo.add_package(op.target_package) elif op.job_type == "uninstall": local_repo.remove_package(op.package) else: # Even though the package already exists # in the lock file we will prefer the new one # to force updates local_repo.remove_package(pkg) local_repo.add_package(package) acted_on = True if not acted_on: if not local_repo.has_package(package): local_repo.add_package(package) def _get_operations_from_lock( self, locked_repository # type: Repository ): # type: (...) -> List[Operation] installed_repo = self._installed_repository ops = [] extra_packages = [p.name for p in self._get_extra_packages(locked_repository)] for locked in locked_repository.packages: is_installed = False for installed in installed_repo.packages: if locked.name == installed.name: is_installed = True if locked.category == "dev" and not self.is_dev_mode(): ops.append(Uninstall(locked)) elif locked.optional and locked.name not in extra_packages: # Installed but optional and not requested in extras ops.append(Uninstall(locked)) elif locked.version != installed.version: ops.append(Update(installed, locked)) # If it's optional and not in required extras # we do not install if locked.optional and locked.name not in extra_packages: continue op = Install(locked) if is_installed: op.skip("Already installed") ops.append(op) return ops def _filter_operations( self, ops, repo ): # type: (List[Operation], Repository) -> None extra_packages = [p.name for p in self._get_extra_packages(repo)] for op in ops: if isinstance(op, Update): package = op.target_package else: package = op.package if op.job_type == "uninstall": continue if package.name in self._develop and package.source_type == "directory": package.develop = True if op.skipped: op.unskip() current_python = parse_constraint( ".".join(str(v) for v in self._env.version_info[:3]) ) if not package.python_constraint.allows( current_python ) or not self._env.is_valid_for_marker(package.marker): op.skip("Not needed for the current environment") continue if self._update: extras = {} for extra, deps in self._package.extras.items(): extras[extra] = [dep.name for dep in deps] else: extras = {} for extra, deps in self._locker.lock_data.get("extras", {}).items(): extras[extra] = [dep.lower() for dep in deps] # If a package is optional and not requested # in any extra we skip it if package.optional: if package.name not in extra_packages: op.skip("Not required") # If the package is a dev package and dev packages # are not requested, we skip it if package.category == "dev" and not self.is_dev_mode(): op.skip("Dev dependencies not requested") def _get_extra_packages(self, repo): """ Returns all packages required by extras. Maybe we just let the solver handle it? """ if self._update: extras = {k: [d.name for d in v] for k, v in self._package.extras.items()} else: extras = self._locker.lock_data.get("extras", {}) extra_packages = [] for extra_name, packages in extras.items(): if extra_name not in self._extras: continue extra_packages += [Dependency(p, "*") for p in packages] def _extra_packages(packages): pkgs = [] for package in packages: for pkg in repo.packages: if pkg.name == package.name: pkgs.append(package) pkgs += _extra_packages(pkg.requires) break return pkgs return _extra_packages(extra_packages) def _get_installer(self): # type: () -> BaseInstaller return PipInstaller(self._env, self._io) def _get_installed(self): # type: () -> InstalledRepository return InstalledRepository.load(self._env) PK!umm%poetry/installation/noop_installer.pyfrom .base_installer import BaseInstaller class NoopInstaller(BaseInstaller): def __init__(self): self._installs = [] self._updates = [] self._removals = [] @property def installs(self): return self._installs @property def updates(self): return self._updates @property def removals(self): return self._removals def install(self, package): self._installs.append(package) def update(self, source, target): self._updates.append((source, target)) def remove(self, package): self._removals.append(package) PK!8<<$poetry/installation/pip_installer.pyimport os import shutil import tempfile from subprocess import CalledProcessError from poetry.config import Config from poetry.utils.helpers import get_http_basic_auth from poetry.utils.helpers import safe_rmtree try: import urllib.parse as urlparse except ImportError: import urlparse from poetry.utils._compat import encode from poetry.utils.env import Env from .base_installer import BaseInstaller class PipInstaller(BaseInstaller): def __init__(self, env, io): # type: (Env, ...) -> None self._env = env self._io = io def install(self, package, update=False): if package.source_type == "directory": self.install_directory(package) return if package.source_type == "git": self.install_git(package) return args = ["install", "--no-deps"] if package.source_type == "legacy" and package.source_url: parsed = urlparse.urlparse(package.source_url) if parsed.scheme == "http": self._io.write_error( " Installing from unsecure host: {}".format( parsed.hostname ) ) args += ["--trusted-host", parsed.hostname] auth = get_http_basic_auth( Config.create("auth.toml"), package.source_reference ) if auth: index_url = "{scheme}://{username}:{password}@{netloc}{path}".format( scheme=parsed.scheme, username=auth[0], password=auth[1], netloc=parsed.netloc, path=parsed.path, ) else: index_url = package.source_url args += ["--index-url", index_url] if update: args.append("-U") if package.hashes and not package.source_type: # Format as a requirements.txt # We need to create a requirements.txt file # for each package in order to check hashes. # This is far from optimal but we do not have any # other choice since this is the only way for pip # to verify hashes. req = self.create_temporary_requirement(package) args += ["-r", req] try: self.run(*args) finally: os.unlink(req) else: req = self.requirement(package) if not isinstance(req, list): args.append(req) else: args += req self.run(*args) def update(self, _, target): self.install(target, update=True) def remove(self, package): # If we have a VCS package, remove its source directory if package.source_type == "git": src_dir = self._env.path / "src" / package.name if src_dir.exists(): safe_rmtree(str(src_dir)) try: self.run("uninstall", package.name, "-y") except CalledProcessError as e: if "not installed" in str(e): return raise def run(self, *args, **kwargs): # type: (...) -> str return self._env.run("python", "-m", "pip", *args, **kwargs) def requirement(self, package, formatted=False): if formatted and not package.source_type: req = "{}=={}".format(package.name, package.version) for h in package.hashes: req += " --hash sha256:{}".format(h) req += "\n" return req if package.source_type in ["file", "directory"]: if package.root_dir: req = os.path.join(package.root_dir, package.source_url) else: req = os.path.realpath(package.source_url) if package.develop and package.source_type == "directory": req = ["-e", req] return req if package.source_type == "git": return "git+{}@{}#egg={}".format( package.source_url, package.source_reference, package.name ) return "{}=={}".format(package.name, package.version) def create_temporary_requirement(self, package): fd, name = tempfile.mkstemp( "reqs.txt", "{}-{}".format(package.name, package.version) ) try: os.write(fd, encode(self.requirement(package, formatted=True))) finally: os.close(fd) return name def install_directory(self, package): from poetry.io import NullIO from poetry.masonry.builder import SdistBuilder from poetry.poetry import Poetry from poetry.utils._compat import decode from poetry.utils.env import NullEnv from poetry.utils.toml_file import TomlFile if package.root_dir: req = os.path.join(package.root_dir, package.source_url) else: req = os.path.realpath(package.source_url) args = ["install", "--no-deps", "-U"] pyproject = TomlFile(os.path.join(req, "pyproject.toml")) has_poetry = False has_build_system = False if pyproject.exists(): pyproject_content = pyproject.read() has_poetry = ( "tool" in pyproject_content and "poetry" in pyproject_content["tool"] ) # Even if there is a build system specified # pip as of right now does not support it fully # TODO: Check for pip version when proper PEP-517 support lands # has_build_system = ("build-system" in pyproject_content) setup = os.path.join(req, "setup.py") has_setup = os.path.exists(setup) if not has_setup and has_poetry and (package.develop or not has_build_system): # We actually need to rely on creating a temporary setup.py # file since pip, as of this comment, does not support # build-system for editable packages # We also need it for non-PEP-517 packages builder = SdistBuilder(Poetry.create(pyproject.parent), NullEnv(), NullIO()) with open(setup, "w") as f: f.write(decode(builder.build_setup())) if package.develop: args.append("-e") args.append(req) try: return self.run(*args) finally: if not has_setup and os.path.exists(setup): os.remove(setup) def install_git(self, package): from poetry.packages import Package from poetry.vcs import Git src_dir = self._env.path / "src" / package.name if src_dir.exists(): safe_rmtree(str(src_dir)) src_dir.parent.mkdir(exist_ok=True) git = Git() git.clone(package.source_url, src_dir) git.checkout(package.source_reference, src_dir) # Now we just need to install from the source directory pkg = Package(package.name, package.version) pkg.source_type = "directory" pkg.source_url = str(src_dir) pkg.develop = True self.install_directory(pkg) PK!Ypoetry/io/__init__.pyfrom .null_io import NullIO PK!}poetry/io/null_io.pyfrom cleo.inputs import ListInput from cleo.outputs import NullOutput from poetry.console.styles.poetry import PoetryStyle class NullIO(PoetryStyle): def __init__(self): super(NullIO, self).__init__(ListInput([]), NullOutput()) def is_quiet(self): # type: () -> bool return False def is_verbose(self): # type: () -> bool return False def is_very_verbose(self): # type: () -> bool return False def is_debug(self): # type: () -> bool return False def writeln(self, *args, **kwargs): pass def write(self, *args, **kwargs): pass def new_line(self, *args, **kwargs): pass PK!66poetry/io/raw_argv_input.pyimport sys from cleo.inputs import ArgvInput class RawArgvInput(ArgvInput): def parse(self): self._parsed = self._tokens while True: try: token = self._parsed.pop(0) except IndexError: break self.parse_argument(token) PK!?^^poetry/json/__init__.pyimport json import os import jsonschema from typing import List SCHEMA_DIR = os.path.join(os.path.dirname(__file__), "schemas") class ValidationError(ValueError): pass def validate_object(obj, schema_name): # type: (dict, str) -> List[str] schema = os.path.join(SCHEMA_DIR, "{}.json".format(schema_name)) if not os.path.exists(schema): raise ValueError("Schema {} does not exist.".format(schema_name)) with open(schema) as f: schema = json.loads(f.read()) validator = jsonschema.Draft7Validator(schema) validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path) errors = [] for error in validation_errors: message = error.message if error.path: message = "[{}] {}".format(".".join(error.path), message) errors.append(message) return errors PK!hY88&poetry/json/schemas/poetry-schema.json{ "$schema": "http://json-schema.org/draft-04/schema#", "name": "Package", "type": "object", "additionalProperties": false, "required": [ "name", "version", "description" ], "properties": { "name": { "type": "string", "description": "Package name." }, "version": { "type": "string", "description": "Package version." }, "description": { "type": "string", "description": "Short package description." }, "keywords": { "type": "array", "items": { "type": "string", "description": "A tag/keyword that this package relates to." } }, "homepage": { "type": "string", "description": "Homepage URL for the project.", "format": "uri" }, "repository": { "type": "string", "description": "Repository URL for the project.", "format": "uri" }, "documentation": { "type": "string", "description": "Documentation URL for the project.", "format": "uri" }, "license": { "type": "string", "description": "License name." }, "authors": { "$ref": "#/definitions/authors" }, "readme": { "type": "string", "description": "The path to the README file" }, "classifiers": { "type": "array", "description": "A list of trove classifers." }, "packages": { "type": "array", "description": "A list of packages to include in the final distribution.", "items": { "type": "object", "description": "Information about where the package resides.", "additionalProperties": false, "required": [ "include" ], "properties": { "include": { "type": "string", "description": "What to include in the package." }, "from": { "type": "string", "description": "Where the source directory of the package resides." } } } }, "include": { "type": "array", "description": "A list of files and folders to include." }, "exclude": { "type": "array", "description": "A list of files and folders to exclude." }, "dependencies": { "type": "object", "description": "This is a hash of package name (keys) and version constraints (values) that are required to run this package.", "required": [ "python" ], "properties": { "python": { "type": "string", "description": "The Python versions the package is compatible with." } }, "$ref": "#/definitions/dependencies", "additionalProperties": false }, "dev-dependencies": { "type": "object", "description": "This is a hash of package name (keys) and version constraints (values) that this package requires for developing it (testing tools and such).", "$ref": "#/definitions/dependencies", "additionalProperties": false }, "extras": { "type": "object", "patternProperties": { "^[a-zA-Z-_.0-9]+$": { "type": "array", "items": { "type": "string" } } } }, "build": { "type": "string", "description": "The file used to build extensions." }, "source": { "type": "array", "description": "A set of additional repositories where packages can be found.", "additionalProperties": { "$ref": "#/definitions/repository" }, "items": { "$ref": "#/definitions/repository" } }, "scripts": { "type": "object", "description": "A hash of scripts to be installed.", "items": { "type": "string" } }, "plugins": { "type": "object", "description": "A hash of hashes representing plugins", "patternProperties": { "^[a-zA-Z-_.0-9]+$": { "type": "object", "patternProperties": { "^[a-zA-Z-_.0-9]+$": { "type": "string" } } } } } }, "definitions": { "authors": { "type": "array", "description": "List of authors that contributed to the package. This is typically the main maintainers, not the full list.", "items": { "type": "string" } }, "dependencies": { "type": "object", "patternProperties": { "^[a-zA-Z-_.0-9]+$": { "oneOf": [ { "$ref": "#/definitions/dependency" }, { "$ref": "#/definitions/long-dependency" }, { "$ref": "#/definitions/git-dependency" }, { "$ref": "#/definitions/file-dependency" }, { "$ref": "#/definitions/path-dependency" }, { "$ref": "#/definitions/multiple-constraints-dependency" } ] } } }, "dependency": { "type": "string", "description": "The constraint of the dependency." }, "long-dependency": { "type": "object", "required": [ "version" ], "additionalProperties": false, "properties": { "version": { "type": "string", "description": "The constraint of the dependency." }, "python": { "type": "string", "description": "The python versions for which the dependency should be installed." }, "platform": { "type": "string", "description": "The platform(s) for which the dependency should be installed." }, "allows-prereleases": { "type": "boolean", "description": "Whether the dependency allows prereleases or not." }, "optional": { "type": "boolean", "description": "Whether the dependency is optional or not." }, "extras": { "type": "array", "description": "The required extras for this dependency.", "items": { "type": "string" } } } }, "git-dependency": { "type": "object", "required": [ "git" ], "additionalProperties": false, "properties": { "git": { "type": "string", "description": "The url of the git repository.", "format": "uri" }, "branch": { "type": "string", "description": "The branch to checkout." }, "tag": { "type": "string", "description": "The tag to checkout." }, "rev": { "type": "string", "description": "The revision to checkout." }, "python": { "type": "string", "description": "The python versions for which the dependency should be installed." }, "platform": { "type": "string", "description": "The platform(s) for which the dependency should be installed." }, "allows-prereleases": { "type": "boolean", "description": "Whether the dependency allows prereleases or not." }, "optional": { "type": "boolean", "description": "Whether the dependency is optional or not." }, "extras": { "type": "array", "description": "The required extras for this dependency.", "items": { "type": "string" } } } }, "file-dependency": { "type": "object", "required": [ "file" ], "additionalProperties": false, "properties": { "file": { "type": "string", "description": "The path to the file." }, "python": { "type": "string", "description": "The python versions for which the dependency should be installed." }, "platform": { "type": "string", "description": "The platform(s) for which the dependency should be installed." }, "optional": { "type": "boolean", "description": "Whether the dependency is optional or not." }, "extras": { "type": "array", "description": "The required extras for this dependency.", "items": { "type": "string" } } } }, "path-dependency": { "type": "object", "required": [ "path" ], "additionalProperties": false, "properties": { "path": { "type": "string", "description": "The path to the dependency." }, "python": { "type": "string", "description": "The python versions for which the dependency should be installed." }, "platform": { "type": "string", "description": "The platform(s) for which the dependency should be installed." }, "optional": { "type": "boolean", "description": "Whether the dependency is optional or not." }, "extras": { "type": "array", "description": "The required extras for this dependency.", "items": { "type": "string" } }, "develop": { "type": "boolean", "description": "Whether to install the dependency in development mode." } } }, "multiple-constraints-dependency": { "type": "array", "minItems": 1, "items": { "oneOf": [ { "$ref": "#/definitions/dependency" }, { "$ref": "#/definitions/long-dependency" }, { "$ref": "#/definitions/git-dependency" }, { "$ref": "#/definitions/file-dependency" }, { "$ref": "#/definitions/path-dependency" } ] } }, "scripts": { "type": "object", "patternProperties": { "^[a-zA-Z-_.0-9]+$": { "oneOf": [ { "$ref": "#/definitions/script" }, { "$ref": "#/definitions/extra-script" } ] } } }, "script": { "type": "string", "description": "A simple script pointing to a callable object." }, "extra-script": { "type": "object", "description": "A script that should be installed only if extras are activated.", "properties": { "callable": { "$ref": "#/definitions/script" }, "extras": { "type": "array", "description": "The required extras for this script.", "items": { "type": "string" } } } }, "repository": { "type": "object", "properties": { "name": { "type": "string", "description": "The name of the repository" }, "url": { "type": "string", "description": "The url of the repository", "format": "uri" } } } } } PK!FFpoetry/layouts/__init__.pyfrom typing import Type from .layout import Layout from .src import SrcLayout from .standard import StandardLayout _LAYOUTS = {"src": SrcLayout, "standard": StandardLayout} def layout(name): # type: (str) -> Type[Layout] if name not in _LAYOUTS: raise ValueError("Invalid layout") return _LAYOUTS[name] PK!S0 poetry/layouts/layout.pyfrom tomlkit import dumps from tomlkit import loads from tomlkit import table from poetry.utils.helpers import module_name TESTS_DEFAULT = u"""from {package_name} import __version__ def test_version(): assert __version__ == '{version}' """ POETRY_DEFAULT = """\ [tool.poetry] name = "" version = "" description = "" authors = [] [tool.poetry.dependencies] [tool.poetry.dev-dependencies] """ POETRY_WITH_LICENSE = """\ [tool.poetry] name = "" version = "" description = "" authors = [] license = "" [tool.poetry.dependencies] [tool.poetry.dev-dependencies] """ BUILD_SYSTEM_MIN_VERSION = "0.12" BUILD_SYSTEM_MAX_VERSION = None class Layout(object): def __init__( self, project, version="0.1.0", description="", readme_format="md", author=None, license=None, python="*", dependencies=None, dev_dependencies=None, ): self._project = project self._package_name = module_name(project) self._version = version self._description = description self._readme_format = readme_format self._license = license self._python = python self._dependencies = dependencies or {} if dev_dependencies is None: dev_dependencies = {"pytest": "^3.5"} self._dev_dependencies = dev_dependencies if not author: author = "Your Name " self._author = author def create(self, path, with_tests=True): path.mkdir(parents=True, exist_ok=True) self._create_default(path) self._create_readme(path) if with_tests: self._create_tests(path) self._write_poetry(path) def generate_poetry_content(self): template = POETRY_DEFAULT if self._license: template = POETRY_WITH_LICENSE content = loads(template) poetry_content = content["tool"]["poetry"] poetry_content["name"] = self._project poetry_content["version"] = self._version poetry_content["description"] = self._description poetry_content["authors"].append(self._author) if self._license: poetry_content["license"] = self._license poetry_content["dependencies"]["python"] = self._python for dep_name, dep_constraint in self._dependencies.items(): poetry_content["dependencies"][dep_name] = dep_constraint for dep_name, dep_constraint in self._dev_dependencies.items(): poetry_content["dev-dependencies"][dep_name] = dep_constraint # Add build system build_system = table() build_system_version = ">=" + BUILD_SYSTEM_MIN_VERSION if BUILD_SYSTEM_MAX_VERSION is not None: build_system_version += ",<" + BUILD_SYSTEM_MAX_VERSION build_system.add("requires", ["poetry" + build_system_version]) build_system.add("build-backend", "poetry.masonry.api") content.add("build-system", build_system) return dumps(content) def _create_default(self, path, src=True): raise NotImplementedError() def _create_readme(self, path): if self._readme_format == "rst": readme_file = path / "README.rst" else: readme_file = path / "README.md" readme_file.touch() def _create_tests(self, path): self._dev_dependencies["pytest"] = "^3.0" tests = path / "tests" tests_init = tests / "__init__.py" tests_default = tests / "test_{}.py".format(self._package_name) tests.mkdir() tests_init.touch(exist_ok=False) with tests_default.open("w") as f: f.write( TESTS_DEFAULT.format( package_name=self._package_name, version=self._version ) ) def _write_poetry(self, path): content = self.generate_poetry_content() poetry = path / "pyproject.toml" with poetry.open("w") as f: f.write(content) PK! ѺUpoetry/layouts/src.py# -*- coding: utf-8 -*- from .layout import Layout DEFAULT = u"""__version__ = '{version}' """ class SrcLayout(Layout): def _create_default(self, path): package_path = path / "src" / self._package_name package_init = package_path / "__init__.py" package_path.mkdir(parents=True) with package_init.open("w") as f: f.write(DEFAULT.format(version=self._version)) PK!ѐpoetry/layouts/standard.py# -*- coding: utf-8 -*- from .layout import Layout DEFAULT = u"""__version__ = '{version}' """ class StandardLayout(Layout): def _create_default(self, path): package_path = path / self._package_name package_init = package_path / "__init__.py" package_path.mkdir() with package_init.open("w") as f: f.write(DEFAULT.format(version=self._version)) PK!B)&*poetry/locations.pyfrom .utils.appdirs import user_cache_dir from .utils.appdirs import user_config_dir CACHE_DIR = user_cache_dir("pypoetry") CONFIG_DIR = user_config_dir("pypoetry") PK!ΨDpoetry/masonry/__init__.py""" This module handles the packaging and publishing of python projects. A lot of the code used here has been taken from `flit `__ and adapted to work with the poetry codebase, so kudos to them for showing the way. """ from .builder import Builder PK!``poetry/masonry/api.py""" PEP-517 compliant buildsystem API """ import logging import sys from poetry.poetry import Poetry from poetry.io import NullIO from poetry.utils._compat import Path from poetry.utils._compat import unicode from poetry.utils.env import SystemEnv from .builders import SdistBuilder from .builders import WheelBuilder log = logging.getLogger(__name__) def get_requires_for_build_wheel(config_settings=None): """ Returns a list of requirements for building, as strings """ poetry = Poetry.create(".") main, _ = SdistBuilder.convert_dependencies(poetry.package, poetry.package.requires) return main # For now, we require all dependencies to build either a wheel or an sdist. get_requires_for_build_sdist = get_requires_for_build_wheel def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): """Builds a wheel, places it in wheel_directory""" poetry = Poetry.create(".") return unicode( WheelBuilder.make_in( poetry, SystemEnv(Path(sys.prefix)), NullIO(), Path(wheel_directory) ) ) def build_sdist(sdist_directory, config_settings=None): """Builds an sdist, places it in sdist_directory""" poetry = Poetry.create(".") path = SdistBuilder(poetry, SystemEnv(Path(sys.prefix)), NullIO()).build( Path(sdist_directory) ) return unicode(path.name) PK!v 22poetry/masonry/builder.pyfrom .builders import CompleteBuilder from .builders import SdistBuilder from .builders import WheelBuilder class Builder: _FORMATS = {"sdist": SdistBuilder, "wheel": WheelBuilder, "all": CompleteBuilder} def __init__(self, poetry, env, io): self._poetry = poetry self._env = env self._io = io def build(self, fmt): if fmt not in self._FORMATS: raise ValueError("Invalid format: {}".format(fmt)) builder = self._FORMATS[fmt](self._poetry, self._env, self._io) return builder.build() PK!i>$ff#poetry/masonry/builders/__init__.pyfrom .complete import CompleteBuilder from .sdist import SdistBuilder from .wheel import WheelBuilder PK!=ɯcc"poetry/masonry/builders/builder.py# -*- coding: utf-8 -*- import os import re import shutil import tempfile from collections import defaultdict from contextlib import contextmanager from typing import Set from typing import Union from poetry.utils._compat import Path from poetry.utils._compat import basestring from poetry.utils._compat import glob from poetry.utils._compat import lru_cache from poetry.vcs import get_vcs from ..metadata import Metadata from ..utils.module import Module from ..utils.package_include import PackageInclude AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()]+) <(?P.+?)>$") class Builder(object): AVAILABLE_PYTHONS = {"2", "2.7", "3", "3.4", "3.5", "3.6", "3.7"} def __init__(self, poetry, env, io): self._poetry = poetry self._env = env self._io = io self._package = poetry.package self._path = poetry.file.parent self._module = Module( self._package.name, self._path.as_posix(), packages=self._package.packages, includes=self._package.include, ) self._meta = Metadata.from_package(self._package) def build(self): raise NotImplementedError() @lru_cache(maxsize=None) def find_excluded_files(self): # type: () -> Set[str] # Checking VCS vcs = get_vcs(self._path) if not vcs: vcs_ignored_files = set() else: vcs_ignored_files = set(vcs.get_ignored_files()) explicitely_excluded = set() for excluded_glob in self._package.exclude: for excluded in glob( os.path.join(self._path.as_posix(), str(excluded_glob)), recursive=True ): explicitely_excluded.add( Path(excluded).relative_to(self._path).as_posix() ) ignored = vcs_ignored_files | explicitely_excluded result = set() for file in ignored: result.add(file) # The list of excluded files might be big and we will do a lot # containment check (x in excluded). # Returning a set make those tests much much faster. return result def is_excluded(self, filepath): # type: (Union[str, Path]) -> bool if not isinstance(filepath, basestring): filepath = filepath.as_posix() return filepath in self.find_excluded_files() def find_files_to_add(self, exclude_build=True): # type: (bool) -> list """ Finds all files to add to the tarball """ to_add = [] for include in self._module.includes: for file in include.elements: if "__pycache__" in str(file): continue if file.is_dir(): continue file = file.relative_to(self._path) if self.is_excluded(file) and isinstance(include, PackageInclude): continue if file.suffix == ".pyc": continue if file in to_add: # Skip duplicates continue self._io.writeln( " - Adding: {}".format(str(file)), verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) to_add.append(file) # Include project files self._io.writeln( " - Adding: pyproject.toml", verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) to_add.append(Path("pyproject.toml")) # If a license file exists, add it for license_file in self._path.glob("LICENSE*"): self._io.writeln( " - Adding: {}".format( license_file.relative_to(self._path) ), verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) to_add.append(license_file.relative_to(self._path)) # If a README is specificed we need to include it # to avoid errors if "readme" in self._poetry.local_config: readme = self._path / self._poetry.local_config["readme"] if readme.exists(): self._io.writeln( " - Adding: {}".format( readme.relative_to(self._path) ), verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) to_add.append(readme.relative_to(self._path)) # If a build script is specified and explicitely required # we add it to the list of files if self._package.build and not exclude_build: to_add.append(Path(self._package.build)) return sorted(to_add) def convert_entry_points(self): # type: () -> dict result = defaultdict(list) # Scripts -> Entry points for name, ep in self._poetry.local_config.get("scripts", {}).items(): extras = "" if isinstance(ep, dict): extras = "[{}]".format(", ".join(ep["extras"])) ep = ep["callable"] result["console_scripts"].append("{} = {}{}".format(name, ep, extras)) # Plugins -> entry points plugins = self._poetry.local_config.get("plugins", {}) for groupname, group in plugins.items(): for name, ep in sorted(group.items()): result[groupname].append("{} = {}".format(name, ep)) for groupname in result: result[groupname] = sorted(result[groupname]) return dict(result) @classmethod def convert_author(cls, author): # type: () -> dict m = AUTHOR_REGEX.match(author) name = m.group("name") email = m.group("email") return {"name": name, "email": email} @classmethod @contextmanager def temporary_directory(cls, *args, **kwargs): try: from tempfile import TemporaryDirectory with TemporaryDirectory(*args, **kwargs) as name: yield name except ImportError: name = tempfile.mkdtemp(*args, **kwargs) yield name shutil.rmtree(name) PK!W[RYY#poetry/masonry/builders/complete.pyimport os import tarfile import poetry.poetry from contextlib import contextmanager from .builder import Builder from .sdist import SdistBuilder from .wheel import WheelBuilder class CompleteBuilder(Builder): def build(self): # We start by building the tarball # We will use it to build the wheel sdist_builder = SdistBuilder(self._poetry, self._env, self._io) sdist_file = sdist_builder.build() self._io.writeln("") dist_dir = self._path / "dist" with self.unpacked_tarball(sdist_file) as tmpdir: WheelBuilder.make_in( poetry.poetry.Poetry.create(tmpdir), self._env, self._io, dist_dir, original=self._poetry, ) @classmethod @contextmanager def unpacked_tarball(cls, path): tf = tarfile.open(str(path)) with cls.temporary_directory() as tmpdir: tf.extractall(tmpdir) files = os.listdir(tmpdir) assert len(files) == 1, files yield os.path.join(tmpdir, files[0]) PK!]q=@1@1 poetry/masonry/builders/sdist.py# -*- coding: utf-8 -*- import os import re import tarfile from collections import defaultdict from copy import copy from gzip import GzipFile from io import BytesIO from posixpath import join as pjoin from pprint import pformat from poetry.utils._compat import Path from poetry.utils._compat import encode from poetry.utils._compat import to_str from ..utils.helpers import normalize_file_permissions from ..utils.package_include import PackageInclude from .builder import Builder SETUP = """\ # -*- coding: utf-8 -*- from distutils.core import setup {before} setup_kwargs = {{ 'name': {name!r}, 'version': {version!r}, 'description': {description!r}, 'long_description': {long_description!r}, 'author': {author!r}, 'author_email': {author_email!r}, 'url': {url!r}, {extra} }} {after} setup(**setup_kwargs) """ PKG_INFO = """\ Metadata-Version: 2.1 Name: {name} Version: {version} Summary: {summary} Home-page: {home_page} Author: {author} Author-email: {author_email} """ class SdistBuilder(Builder): def build(self, target_dir=None): # type: (Path) -> Path self._io.writeln(" - Building sdist") if target_dir is None: target_dir = self._path / "dist" if not target_dir.exists(): target_dir.mkdir(parents=True) target = target_dir / "{}-{}.tar.gz".format( self._package.pretty_name, self._meta.version ) gz = GzipFile(target.as_posix(), mode="wb") tar = tarfile.TarFile( target.as_posix(), mode="w", fileobj=gz, format=tarfile.PAX_FORMAT ) try: tar_dir = "{}-{}".format(self._package.pretty_name, self._meta.version) files_to_add = self.find_files_to_add(exclude_build=False) for relpath in files_to_add: path = self._path / relpath tar_info = tar.gettarinfo( str(path), arcname=pjoin(tar_dir, str(relpath)) ) tar_info = self.clean_tarinfo(tar_info) if tar_info.isreg(): with path.open("rb") as f: tar.addfile(tar_info, f) else: tar.addfile(tar_info) # Symlinks & ? setup = self.build_setup() tar_info = tarfile.TarInfo(pjoin(tar_dir, "setup.py")) tar_info.size = len(setup) tar.addfile(tar_info, BytesIO(setup)) pkg_info = self.build_pkg_info() tar_info = tarfile.TarInfo(pjoin(tar_dir, "PKG-INFO")) tar_info.size = len(pkg_info) tar.addfile(tar_info, BytesIO(pkg_info)) finally: tar.close() gz.close() self._io.writeln(" - Built {}".format(target.name)) return target def build_setup(self): # type: () -> bytes before, extra, after = [], [], [] package_dir = {} # If we have a build script, use it if self._package.build: after += [ "from {} import *".format(self._package.build.split(".")[0]), "build(setup_kwargs)", ] modules = [] packages = [] package_data = {} for include in self._module.includes: if isinstance(include, PackageInclude): if include.is_package(): pkg_dir, _packages, _package_data = self.find_packages(include) if pkg_dir is not None: package_dir[""] = os.path.relpath(pkg_dir, str(self._path)) packages += [p for p in _packages if p not in packages] package_data.update(_package_data) else: if include.source is not None: package_dir[""] = str(include.base.relative_to(self._path)) module = include.elements[0].relative_to(include.base).stem if module not in modules: modules.append(module) else: pass if package_dir: before.append("package_dir = \\\n{}\n".format(pformat(package_dir))) extra.append("'package_dir': package_dir,") if packages: before.append("packages = \\\n{}\n".format(pformat(sorted(packages)))) extra.append("'packages': packages,") if package_data: before.append("package_data = \\\n{}\n".format(pformat(package_data))) extra.append("'package_data': package_data,") if modules: before.append("modules = \\\n{}".format(pformat(modules))) extra.append("'py_modules': modules,".format()) dependencies, extras = self.convert_dependencies( self._package, self._package.requires ) if dependencies: before.append( "install_requires = \\\n{}\n".format(pformat(sorted(dependencies))) ) extra.append("'install_requires': install_requires,") if extras: before.append("extras_require = \\\n{}\n".format(pformat(extras))) extra.append("'extras_require': extras_require,") entry_points = self.convert_entry_points() if entry_points: before.append("entry_points = \\\n{}\n".format(pformat(entry_points))) extra.append("'entry_points': entry_points,") if self._package.python_versions != "*": python_requires = self._meta.requires_python extra.append("'python_requires': {!r},".format(python_requires)) return encode( SETUP.format( before="\n".join(before), name=to_str(self._meta.name), version=to_str(self._meta.version), description=to_str(self._meta.summary), long_description=to_str(self._meta.description), author=to_str(self._meta.author), author_email=to_str(self._meta.author_email), url=to_str(self._meta.home_page), extra="\n ".join(extra), after="\n".join(after), ) ) def build_pkg_info(self): pkg_info = PKG_INFO.format( name=self._meta.name, version=self._meta.version, summary=self._meta.summary, home_page=self._meta.home_page, author=to_str(self._meta.author), author_email=to_str(self._meta.author_email), ) if self._meta.keywords: pkg_info += "Keywords: {}\n".format(self._meta.keywords) if self._meta.requires_python: pkg_info += "Requires-Python: {}\n".format(self._meta.requires_python) for classifier in self._meta.classifiers: pkg_info += "Classifier: {}\n".format(classifier) for extra in sorted(self._meta.provides_extra): pkg_info += "Provides-Extra: {}\n".format(extra) for dep in sorted(self._meta.requires_dist): pkg_info += "Requires-Dist: {}\n".format(dep) for url in sorted(self._meta.project_urls, key=lambda u: u[0]): pkg_info += "Project-URL: {}\n".format(url) return encode(pkg_info) def find_packages(self, include): """ Discover subpackages and data. It also retrieves necessary files. """ pkgdir = None if include.source is not None: pkgdir = str(include.base) base = str(include.elements[0].parent) pkg_name = include.package pkg_data = defaultdict(list) # Undocumented distutils feature: # the empty string matches all package names pkg_data[""].append("*") packages = [pkg_name] subpkg_paths = set() def find_nearest_pkg(rel_path): parts = rel_path.split(os.sep) for i in reversed(range(1, len(parts))): ancestor = "/".join(parts[:i]) if ancestor in subpkg_paths: pkg = ".".join([pkg_name] + parts[:i]) return pkg, "/".join(parts[i:]) # Relative to the top-level package return pkg_name, Path(rel_path).as_posix() excluded_files = self.find_excluded_files() for path, dirnames, filenames in os.walk(str(base), topdown=True): if os.path.basename(path) == "__pycache__": continue from_top_level = os.path.relpath(path, base) if from_top_level == ".": continue is_subpkg = "__init__.py" in filenames if is_subpkg: subpkg_paths.add(from_top_level) parts = from_top_level.split(os.sep) packages.append(".".join([pkg_name] + parts)) else: pkg, from_nearest_pkg = find_nearest_pkg(from_top_level) data_elements = [ f.relative_to(self._path) for f in Path(path).glob("*") if not f.is_dir() ] data = [e for e in data_elements if not self.is_excluded(e)] if not data: continue if len(data) == len(data_elements): pkg_data[pkg].append(pjoin(from_nearest_pkg, "*")) else: for d in data: if d.is_dir(): continue pkg_data[pkg] += [pjoin(from_nearest_pkg, d.name) for d in data] # Sort values in pkg_data pkg_data = {k: sorted(v) for (k, v) in pkg_data.items() if v} return pkgdir, sorted(packages), pkg_data @classmethod def convert_dependencies( cls, package, dependencies # type: Package # type: List[Dependency] ): main = [] extras = defaultdict(list) req_regex = re.compile(r"^(.+) \((.+)\)$") for dependency in dependencies: if dependency.is_optional(): for extra_name, reqs in package.extras.items(): for req in reqs: if req.name == dependency.name: requirement = to_str( dependency.to_pep_508(with_extras=False) ) if ";" in requirement: requirement, conditions = requirement.split(";") requirement = requirement.strip() if req_regex.match(requirement): requirement = req_regex.sub( "\\1\\2", requirement.strip() ) extras[extra_name + ":" + conditions.strip()].append( requirement ) continue requirement = requirement.strip() if req_regex.match(requirement): requirement = req_regex.sub( "\\1\\2", requirement.strip() ) extras[extra_name].append(requirement) continue requirement = to_str(dependency.to_pep_508()) if ";" in requirement: requirement, conditions = requirement.split(";") requirement = requirement.strip() if req_regex.match(requirement): requirement = req_regex.sub("\\1\\2", requirement.strip()) extras[":" + conditions.strip()].append(requirement) continue requirement = requirement.strip() if req_regex.match(requirement): requirement = req_regex.sub("\\1\\2", requirement.strip()) main.append(requirement) return main, dict(extras) @classmethod def clean_tarinfo(cls, tar_info): """ Clean metadata from a TarInfo object to make it more reproducible. - Set uid & gid to 0 - Set uname and gname to "" - Normalise permissions to 644 or 755 - Set mtime if not None """ ti = copy(tar_info) ti.uid = 0 ti.gid = 0 ti.uname = "" ti.gname = "" ti.mode = normalize_file_permissions(ti.mode) return ti PK!e{.. poetry/masonry/builders/wheel.pyfrom __future__ import unicode_literals import contextlib import hashlib import os import re import tempfile import shutil import stat import zipfile from base64 import urlsafe_b64encode from io import StringIO from typing import Set from poetry.__version__ import __version__ from poetry.semver import parse_constraint from ..utils.helpers import normalize_file_permissions from ..utils.package_include import PackageInclude from ..utils.tags import get_abbr_impl from ..utils.tags import get_abi_tag from ..utils.tags import get_impl_ver from ..utils.tags import get_platform from .builder import Builder wheel_file_template = """\ Wheel-Version: 1.0 Generator: poetry {version} Root-Is-Purelib: {pure_lib} Tag: {tag} """ class WheelBuilder(Builder): def __init__(self, poetry, env, io, target_dir=None, original=None): super(WheelBuilder, self).__init__(poetry, env, io) self._records = [] self._original_path = self._path self._target_dir = target_dir or (self._poetry.file.parent / "dist") if original: self._original_path = original.file.parent @classmethod def make_in(cls, poetry, env, io, directory=None, original=None): wb = WheelBuilder(poetry, env, io, target_dir=directory, original=original) wb.build() return wb.wheel_filename @classmethod def make(cls, poetry, env, io): """Build a wheel in the dist/ directory, and optionally upload it.""" cls.make_in(poetry, env, io) def build(self): self._io.writeln(" - Building wheel") dist_dir = self._target_dir if not dist_dir.exists(): dist_dir.mkdir() (fd, temp_path) = tempfile.mkstemp(suffix=".whl") with zipfile.ZipFile( os.fdopen(fd, "w+b"), mode="w", compression=zipfile.ZIP_DEFLATED ) as zip_file: self._copy_module(zip_file) self._build(zip_file) self._write_metadata(zip_file) self._write_record(zip_file) wheel_path = dist_dir / self.wheel_filename if wheel_path.exists(): wheel_path.unlink() shutil.move(temp_path, str(wheel_path)) self._io.writeln(" - Built {}".format(self.wheel_filename)) def _build(self, wheel): if self._package.build: setup = self._path / "setup.py" # We need to place ourselves in the temporary # directory in order to build the package current_path = os.getcwd() try: os.chdir(str(self._path)) self._env.run( "python", str(setup), "build", "-b", str(self._path / "build") ) finally: os.chdir(current_path) build_dir = self._path / "build" lib = list(build_dir.glob("lib.*")) if not lib: # The result of building the extensions # does not exist, this may due to conditional # builds, so we assume that it's okay return lib = lib[0] excluded = self.find_excluded_files() for pkg in lib.glob("**/*"): if pkg.is_dir() or pkg in excluded: continue rel_path = str(pkg.relative_to(lib)) if rel_path in wheel.namelist(): continue self._io.writeln( " - Adding: {}".format(rel_path), verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) self._add_file(wheel, pkg, rel_path) def _copy_module(self, wheel): excluded = self.find_excluded_files() to_add = [] for include in self._module.includes: include.refresh() for file in include.elements: if "__pycache__" in str(file): continue if file.is_dir(): continue if isinstance(include, PackageInclude) and include.source: rel_file = file.relative_to(include.base) else: rel_file = file.relative_to(self._path) if file in excluded: continue if file.suffix == ".pyc": continue if (file, rel_file) in to_add: # Skip duplicates continue self._io.writeln( " - Adding: {}".format(str(file)), verbosity=self._io.VERBOSITY_VERY_VERBOSE, ) to_add.append((file, rel_file)) # Walk the files and compress them, # sorting everything so the order is stable. for full_path, rel_path in sorted(to_add, key=lambda x: x[1]): self._add_file(wheel, full_path, rel_path) def _write_metadata(self, wheel): if ( "scripts" in self._poetry.local_config or "plugins" in self._poetry.local_config ): with self._write_to_zip(wheel, self.dist_info + "/entry_points.txt") as f: self._write_entry_points(f) for base in ("COPYING", "LICENSE"): for path in sorted(self._path.glob(base + "*")): self._add_file(wheel, path, "%s/%s" % (self.dist_info, path.name)) with self._write_to_zip(wheel, self.dist_info + "/WHEEL") as f: self._write_wheel_file(f) with self._write_to_zip(wheel, self.dist_info + "/METADATA") as f: self._write_metadata_file(f) def _write_record(self, wheel): # Write a record of the files in the wheel with self._write_to_zip(wheel, self.dist_info + "/RECORD") as f: for path, hash, size in self._records: f.write("{},sha256={},{}\n".format(path, hash, size)) # RECORD itself is recorded with no hash or size f.write(self.dist_info + "/RECORD,,\n") def find_excluded_files(self): # type: () -> Set # Checking VCS return set() @property def dist_info(self): # type: () -> str return self.dist_info_name(self._package.name, self._meta.version) @property def wheel_filename(self): # type: () -> str return "{}-{}-{}.whl".format( re.sub(r"[^\w\d.]+", "_", self._package.pretty_name, flags=re.UNICODE), re.sub(r"[^\w\d.]+", "_", self._meta.version, flags=re.UNICODE), self.tag, ) def supports_python2(self): return self._package.python_constraint.allows_any( parse_constraint(">=2.0.0 <3.0.0") ) def dist_info_name(self, distribution, version): # type: (...) -> str escaped_name = re.sub(r"[^\w\d.]+", "_", distribution, flags=re.UNICODE) escaped_version = re.sub(r"[^\w\d.]+", "_", version, flags=re.UNICODE) return "{}-{}.dist-info".format(escaped_name, escaped_version) @property def tag(self): if self._package.build: platform = get_platform().replace(".", "_").replace("-", "_") impl_name = get_abbr_impl(self._env) impl_ver = get_impl_ver(self._env) impl = impl_name + impl_ver abi_tag = str(get_abi_tag(self._env)).lower() tag = (impl, abi_tag, platform) else: platform = "any" if self.supports_python2(): impl = "py2.py3" else: impl = "py3" tag = (impl, "none", platform) return "-".join(tag) def _add_file(self, wheel, full_path, rel_path): full_path, rel_path = str(full_path), str(rel_path) if os.sep != "/": # We always want to have /-separated paths in the zip file and in # RECORD rel_path = rel_path.replace(os.sep, "/") zinfo = zipfile.ZipInfo(rel_path) # Normalize permission bits to either 755 (executable) or 644 st_mode = os.stat(full_path).st_mode new_mode = normalize_file_permissions(st_mode) zinfo.external_attr = (new_mode & 0xFFFF) << 16 # Unix attributes if stat.S_ISDIR(st_mode): zinfo.external_attr |= 0x10 # MS-DOS directory flag hashsum = hashlib.sha256() with open(full_path, "rb") as src: while True: buf = src.read(1024 * 8) if not buf: break hashsum.update(buf) src.seek(0) wheel.writestr(zinfo, src.read(), compress_type=zipfile.ZIP_DEFLATED) size = os.stat(full_path).st_size hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") self._records.append((rel_path, hash_digest, size)) @contextlib.contextmanager def _write_to_zip(self, wheel, rel_path): sio = StringIO() yield sio # The default is a fixed timestamp rather than the current time, so # that building a wheel twice on the same computer can automatically # give you the exact same result. date_time = (2016, 1, 1, 0, 0, 0) zi = zipfile.ZipInfo(rel_path, date_time) b = sio.getvalue().encode("utf-8") hashsum = hashlib.sha256(b) hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") wheel.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED) self._records.append((rel_path, hash_digest, len(b))) def _write_entry_points(self, fp): """ Write entry_points.txt. """ entry_points = self.convert_entry_points() for group_name in sorted(entry_points): fp.write("[{}]\n".format(group_name)) for ep in sorted(entry_points[group_name]): fp.write(ep.replace(" ", "") + "\n") fp.write("\n") def _write_wheel_file(self, fp): fp.write( wheel_file_template.format( version=__version__, pure_lib="true" if self._package.build is None else "false", tag=self.tag, ) ) def _write_metadata_file(self, fp): """ Write out metadata in the 2.x format (email like) """ fp.write("Metadata-Version: 2.1\n") fp.write("Name: {}\n".format(self._meta.name)) fp.write("Version: {}\n".format(self._meta.version)) fp.write("Summary: {}\n".format(self._meta.summary)) fp.write("Home-page: {}\n".format(self._meta.home_page or "UNKNOWN")) fp.write("License: {}\n".format(self._meta.license or "UNKNOWN")) # Optional fields if self._meta.keywords: fp.write("Keywords: {}\n".format(self._meta.keywords)) if self._meta.author: fp.write("Author: {}\n".format(self._meta.author)) if self._meta.author_email: fp.write("Author-email: {}\n".format(self._meta.author_email)) if self._meta.requires_python: fp.write("Requires-Python: {}\n".format(self._meta.requires_python)) for classifier in self._meta.classifiers: fp.write("Classifier: {}\n".format(classifier)) for extra in sorted(self._meta.provides_extra): fp.write("Provides-Extra: {}\n".format(extra)) for dep in sorted(self._meta.requires_dist): fp.write("Requires-Dist: {}\n".format(dep)) for url in sorted(self._meta.project_urls, key=lambda u: u[0]): fp.write("Project-URL: {}\n".format(url)) if self._meta.description_content_type: fp.write( "Description-Content-Type: " "{}\n".format(self._meta.description_content_type) ) if self._meta.description is not None: fp.write("\n" + self._meta.description + "\n") PK!H/ / poetry/masonry/metadata.pyfrom poetry.utils.helpers import canonicalize_name from poetry.utils.helpers import normalize_version from poetry.version.helpers import format_python_constraint class Metadata: metadata_version = "2.1" # version 1.0 name = None version = None platforms = () supported_platforms = () summary = None description = None keywords = None home_page = None download_url = None author = None author_email = None license = None # version 1.1 classifiers = () requires = () provides = () obsoletes = () # version 1.2 maintainer = None maintainer_email = None requires_python = None requires_external = () requires_dist = [] provides_dist = () obsoletes_dist = () project_urls = () # Version 2.1 description_content_type = None provides_extra = [] @classmethod def from_package(cls, package): # type: (...) -> Metadata meta = cls() meta.name = canonicalize_name(package.name) meta.version = normalize_version(package.version.text) meta.summary = package.description if package.readme: with package.readme.open() as f: meta.description = f.read() meta.keywords = ",".join(package.keywords) meta.home_page = package.homepage or package.repository_url meta.author = package.author_name meta.author_email = package.author_email if package.license: meta.license = package.license.id meta.classifiers = package.all_classifiers # Version 1.2 meta.maintainer = meta.author meta.maintainer_email = meta.author_email # Requires python if package.python_versions != "*": meta.requires_python = format_python_constraint(package.python_constraint) meta.requires_dist = [d.to_pep_508() for d in package.requires] # Version 2.1 if package.readme: if package.readme.suffix == ".rst": meta.description_content_type = "text/x-rst" elif package.readme.suffix in [".md", ".markdown"]: meta.description_content_type = "text/markdown" else: meta.description_content_type = "text/plain" meta.provides_extra = [e for e in package.extras] if package.urls: for name, url in package.urls.items(): if name == "Homepage" and meta.home_page == url: continue meta.project_urls += ("{}, {}".format(name, url),) return meta PK!5k!!%poetry/masonry/publishing/__init__.pyfrom .publisher import Publisher PK!]%# # &poetry/masonry/publishing/publisher.pyfrom poetry.locations import CONFIG_DIR from poetry.utils._compat import Path from poetry.utils.helpers import get_http_basic_auth from poetry.utils.toml_file import TomlFile from .uploader import Uploader class Publisher: """ Registers and publishes packages to remote repositories. """ def __init__(self, poetry, io): self._poetry = poetry self._package = poetry.package self._io = io self._uploader = Uploader(poetry, io) @property def files(self): return self._uploader.files def publish(self, repository_name, username, password): if repository_name: self._io.writeln( "Publishing {} ({}) " "to {}".format( self._package.pretty_name, self._package.pretty_version, repository_name, ) ) else: self._io.writeln( "Publishing {} ({}) " "to PyPI".format( self._package.pretty_name, self._package.pretty_version ) ) if not repository_name: url = "https://upload.pypi.org/legacy/" repository_name = "pypi" else: # Retrieving config information config_file = TomlFile(Path(CONFIG_DIR) / "config.toml") if not config_file.exists(): raise RuntimeError( "Config file does not exist. " "Unable to get repository information" ) config = config_file.read() if ( "repositories" not in config or repository_name not in config["repositories"] ): raise RuntimeError( "Repository {} is not defined".format(repository_name) ) url = config["repositories"][repository_name]["url"] if not (username and password): auth = get_http_basic_auth(self._poetry.auth_config, repository_name) if auth: username = auth[0] password = auth[1] # Requesting missing credentials if not username: username = self._io.ask("Username:") if password is None: password = self._io.ask_hidden("Password:") # TODO: handle certificates self._uploader.auth(username, password) return self._uploader.upload(url) PK!t!!%poetry/masonry/publishing/uploader.pyimport hashlib import io import re from typing import List import requests from requests import adapters from requests.exceptions import HTTPError from requests.packages.urllib3 import util from requests_toolbelt import user_agent from requests_toolbelt.multipart import MultipartEncoder, MultipartEncoderMonitor from poetry.__version__ import __version__ from poetry.utils.helpers import normalize_version from poetry.utils.patterns import wheel_file_re from ..metadata import Metadata _has_blake2 = hasattr(hashlib, "blake2b") class Uploader: def __init__(self, poetry, io): self._poetry = poetry self._package = poetry.package self._io = io self._username = None self._password = None @property def user_agent(self): return user_agent("poetry", __version__) @property def adapter(self): retry = util.Retry( connect=5, total=10, method_whitelist=["GET"], status_forcelist=[500, 501, 502, 503], ) return adapters.HTTPAdapter(max_retries=retry) @property def files(self): # type: () -> List[str] dist = self._poetry.file.parent / "dist" version = normalize_version(self._package.version.text) wheels = list( dist.glob( "{}-{}-*.whl".format( re.sub( r"[^\w\d.]+", "_", self._package.pretty_name, flags=re.UNICODE ), re.sub(r"[^\w\d.]+", "_", version, flags=re.UNICODE), ) ) ) tars = list( dist.glob("{}-{}.tar.gz".format(self._package.pretty_name, version)) ) return sorted(wheels + tars) def auth(self, username, password): self._username = username self._password = password def make_session(self): session = requests.session() if self.is_authenticated(): session.auth = (self._username, self._password) session.headers["User-Agent"] = self.user_agent for scheme in ("http://", "https://"): session.mount(scheme, self.adapter) return session def is_authenticated(self): return self._username is not None and self._password is not None def upload(self, url): session = self.make_session() try: self._upload(session, url) finally: session.close() def post_data(self, file): meta = Metadata.from_package(self._package) file_type = self._get_type(file) if _has_blake2: blake2_256_hash = hashlib.blake2b(digest_size=256 // 8) md5_hash = hashlib.md5() sha256_hash = hashlib.sha256() with file.open("rb") as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""): md5_hash.update(content) sha256_hash.update(content) if _has_blake2: blake2_256_hash.update(content) md5_digest = md5_hash.hexdigest() sha2_digest = sha256_hash.hexdigest() if _has_blake2: blake2_256_digest = blake2_256_hash.hexdigest() else: blake2_256_digest = None if file_type == "bdist_wheel": wheel_info = wheel_file_re.match(file.name) py_version = wheel_info.group("pyver") else: py_version = None data = { # identify release "name": meta.name, "version": meta.version, # file content "filetype": file_type, "pyversion": py_version, # additional meta-data "metadata_version": meta.metadata_version, "summary": meta.summary, "home_page": meta.home_page, "author": meta.author, "author_email": meta.author_email, "maintainer": meta.maintainer, "maintainer_email": meta.maintainer_email, "license": meta.license, "description": meta.description, "keywords": meta.keywords, "platform": meta.platforms, "classifiers": meta.classifiers, "download_url": meta.download_url, "supported_platform": meta.supported_platforms, "comment": None, "md5_digest": md5_digest, "sha256_digest": sha2_digest, "blake2_256_digest": blake2_256_digest, # PEP 314 "provides": meta.provides, "requires": meta.requires, "obsoletes": meta.obsoletes, # Metadata 1.2 "project_urls": meta.project_urls, "provides_dist": meta.provides_dist, "obsoletes_dist": meta.obsoletes_dist, "requires_dist": meta.requires_dist, "requires_external": meta.requires_external, "requires_python": meta.requires_python, } # Metadata 2.1 if meta.description_content_type: data["description_content_type"] = meta.description_content_type # TODO: Provides extra return data def _upload(self, session, url): try: self._do_upload(session, url) except HTTPError as e: if ( e.response.status_code not in (403, 400) or e.response.status_code == 400 and "was ever registered" not in e.response.text ): raise # It may be the first time we publish the package # We'll try to register it and go from there try: self._register(session, url) except HTTPError: raise def _do_upload(self, session, url): for file in self.files: # TODO: Check existence resp = self._upload_file(session, url, file) resp.raise_for_status() def _upload_file(self, session, url, file): data = self.post_data(file) data.update( { # action ":action": "file_upload", "protocol_version": "1", } ) data_to_send = self._prepare_data(data) with file.open("rb") as fp: data_to_send.append( ("content", (file.name, fp, "application/octet-stream")) ) encoder = MultipartEncoder(data_to_send) bar = self._io.create_progress_bar(encoder.len) bar.set_format( " - Uploading {0} %percent%%".format(file.name) ) monitor = MultipartEncoderMonitor( encoder, lambda monitor: bar.set_progress(monitor.bytes_read) ) bar.start() resp = session.post( url, data=monitor, allow_redirects=False, headers={"Content-Type": monitor.content_type}, ) if resp.ok: bar.finish() self._io.writeln("") else: self._io.overwrite("") return resp def _register(self, session, url): """ Register a package to a repository. """ dist = self._poetry.file.parent / "dist" file = dist / "{}-{}.tar.gz".format( self._package.name, normalize_version(self._package.version.text) ) if not file.exists(): raise RuntimeError('"{0}" does not exist.'.format(file.name)) data = self.post_data(file) data.update({":action": "submit", "protocol_version": "1"}) data_to_send = self._prepare_data(data) encoder = MultipartEncoder(data_to_send) resp = session.post( url, data=encoder, allow_redirects=False, headers={"Content-Type": encoder.content_type}, ) return resp def _prepare_data(self, data): data_to_send = [] for key, value in data.items(): if not isinstance(value, (list, tuple)): data_to_send.append((key, value)) else: for item in value: data_to_send.append((key, item)) return data_to_send def _get_type(self, file): exts = file.suffixes if exts[-1] == ".whl": return "bdist_wheel" elif len(exts) >= 2 and "".join(exts[-2:]) == ".tar.gz": return "sdist" raise ValueError("Unknown distribution format {}".format("".join(exts))) PK! poetry/masonry/utils/__init__.pyPK!ƨC88poetry/masonry/utils/helpers.pydef normalize_file_permissions(st_mode): """ Normalizes the permission bits in the st_mode field from stat to 644/755 Popular VCSs only track whether a file is executable or not. The exact permissions can vary on systems with different umasks. Normalising to 644 (non executable) or 755 (executable) makes builds more reproducible. """ # Set 644 permissions, leaving higher bits of st_mode unchanged new_mode = (st_mode | 0o644) & ~0o133 if st_mode & 0o100: new_mode |= 0o111 # Executable: 644 -> 755 return new_mode PK! ҭpoetry/masonry/utils/include.pyfrom typing import List from poetry.utils._compat import Path class Include(object): """ Represents an "include" entry. It can be a glob string, a single file or a directory. This class will then detect the type of this include: - a package - a module - a file - a directory """ def __init__(self, base, include): # type: (Path, str) -> None self._base = base self._include = str(include) self._elements = sorted(list(self._base.glob(str(self._include)))) @property def base(self): # type: () -> Path return self._base @property def elements(self): # type: () -> List[Path] return self._elements def is_empty(self): # type: () -> bool return len(self._elements) == 0 def refresh(self): # type: () -> Include self._elements = sorted(list(self._base.glob(self._include))) return self PK!Q poetry/masonry/utils/module.pyfrom poetry.utils._compat import Path from poetry.utils.helpers import module_name from .include import Include from .package_include import PackageInclude class ModuleOrPackageNotFound(ValueError): pass class Module: def __init__(self, name, directory=".", packages=None, includes=None): self._name = module_name(name) self._in_src = False self._is_package = False self._path = Path(directory) self._includes = [] packages = packages or [] includes = includes or [] if not packages: # It must exist either as a .py file or a directory, but not both pkg_dir = Path(directory, self._name) py_file = Path(directory, self._name + ".py") if pkg_dir.is_dir() and py_file.is_file(): raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) elif pkg_dir.is_dir(): packages = [{"include": str(pkg_dir.relative_to(self._path))}] elif py_file.is_file(): packages = [{"include": str(py_file.relative_to(self._path))}] else: # Searching for a src module src = Path(directory, "src") src_pkg_dir = src / self._name src_py_file = src / (self._name + ".py") if src_pkg_dir.is_dir() and src_py_file.is_file(): raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) elif src_pkg_dir.is_dir(): packages = [ { "include": str(src_pkg_dir.relative_to(src)), "from": str(src.relative_to(self._path)), } ] elif src_py_file.is_file(): packages = [ { "include": str(src_py_file.relative_to(src)), "from": str(src.relative_to(self._path)), } ] else: raise ModuleOrPackageNotFound( "No file/folder found for package {}".format(name) ) for package in packages: self._includes.append( PackageInclude(self._path, package["include"], package.get("from")) ) for include in includes: self._includes.append(Include(self._path, include)) @property def name(self): # type: () -> str return self._name @property def path(self): # type: () -> Path return self._path @property def file(self): # type: () -> Path if self._is_package: return self._path / "__init__.py" else: return self._path @property def includes(self): # type: () -> List return self._includes def is_package(self): # type: () -> bool return self._is_package def is_in_src(self): # type: () -> bool return self._in_src PK!ܿ??'poetry/masonry/utils/package_include.pyfrom .include import Include class PackageInclude(Include): def __init__(self, base, include, source=None): self._package = None self._is_package = False self._is_module = False self._source = source if source is not None: base = base / source super(PackageInclude, self).__init__(base, include) self.check_elements() @property def package(self): # type: () -> str return self._package @property def source(self): # type: () -> str return self._source def is_package(self): # type: () -> bool return self._is_package def is_module(self): # type: () return self._is_module def refresh(self): # type: () -> PackageInclude super(PackageInclude, self).refresh() return self.check_elements() def check_elements(self): # type: () -> PackageInclude if not self._elements: raise ValueError( "{} does not contain any element".format(self._base / self._include) ) if len(self._elements) > 1: # Probably glob self._is_package = True # The __init__.py file should be first root = self._elements[0] if root.name != "__init__.py": raise ValueError("{} is not a package.".format(root)) self._package = root.parent.name else: if self._elements[0].is_dir(): # If it's a directory, we include everything inside it self._package = self._elements[0].name self._elements = sorted(list(self._elements[0].glob("**/*"))) self._is_package = True else: self._package = self._elements[0].stem self._is_module = True return self PK!b]""poetry/masonry/utils/tags.py""" Generate and work with PEP 425 Compatibility Tags. Base implementation taken from https://github.com/pypa/wheel/blob/master/wheel/pep425tags.py and adapted to work with poetry's env util. """ from __future__ import unicode_literals import distutils.util import sys import warnings def get_abbr_impl(env): """Return abbreviated implementation name.""" impl = env.python_implementation if impl == "PyPy": return "pp" elif impl == "Jython": return "jy" elif impl == "IronPython": return "ip" elif impl == "CPython": return "cp" raise LookupError("Unknown Python implementation: " + impl) def get_impl_ver(env): """Return implementation version.""" impl_ver = env.config_var("py_version_nodot") if not impl_ver or get_abbr_impl(env) == "pp": impl_ver = "".join(map(str, get_impl_version_info(env))) return impl_ver def get_impl_version_info(env): """Return sys.version_info-like tuple for use in decrementing the minor version.""" if get_abbr_impl(env) == "pp": # as per https://github.com/pypa/pip/issues/2882 return env.version_info[:3] else: return env.version_info[:2] def get_flag(env, var, fallback, expected=True, warn=True): """Use a fallback method for determining SOABI flags if the needed config var is unset or unavailable.""" val = env.config_var(var) if val is None: if warn: warnings.warn( "Config variable '{0}' is unset, Python ABI tag may " "be incorrect".format(var), RuntimeWarning, 2, ) return fallback() return val == expected def get_abi_tag(env): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = env.config_var("SOABI") impl = get_abbr_impl(env) if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"): d = "" m = "" u = "" if get_flag( env, "Py_DEBUG", lambda: hasattr(sys, "gettotalrefcount"), warn=(impl == "cp"), ): d = "d" if get_flag(env, "WITH_PYMALLOC", lambda: impl == "cp", warn=(impl == "cp")): m = "m" if get_flag( env, "Py_UNICODE_SIZE", lambda: sys.maxunicode == 0x10FFFF, expected=4, warn=(impl == "cp" and env.version_info < (3, 3)), ) and env.version_info < (3, 3): u = "u" abi = "%s%s%s%s%s" % (impl, get_impl_ver(env), d, m, u) elif soabi and soabi.startswith("cpython-"): abi = "cp" + soabi.split("-")[1] elif soabi: abi = soabi.replace(".", "_").replace("-", "_") else: abi = None return abi def get_platform(): """Return our platform name 'win32', 'linux_x86_64'""" # XXX remove distutils dependency result = distutils.util.get_platform().replace(".", "_").replace("-", "_") if result == "linux_x86_64" and sys.maxsize == 2147483647: # pip pull request #3497 result = "linux_i686" return result def get_supported(env, versions=None, supplied_platform=None): """Return a list of supported tags for each version specified in `versions`. :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. """ supported = [] # Versions must be given with respect to the preference if versions is None: versions = [] version_info = get_impl_version_info(env) major = version_info[:-1] # Support all previous minor Python versions. for minor in range(version_info[-1], -1, -1): versions.append("".join(map(str, major + (minor,)))) impl = get_abbr_impl(env) abis = [] abi = get_abi_tag(env) if abi: abis[0:0] = [abi] abi3s = set() import imp for suffix in imp.get_suffixes(): if suffix[0].startswith(".abi"): abi3s.add(suffix[0].split(".", 2)[1]) abis.extend(sorted(list(abi3s))) abis.append("none") platforms = [] if supplied_platform: platforms.append(supplied_platform) platforms.append(get_platform()) # Current version, current API (built specifically for our Python): for abi in abis: for arch in platforms: supported.append(("%s%s" % (impl, versions[0]), abi, arch)) # abi3 modules compatible with older version of Python for version in versions[1:]: # abi3 was introduced in Python 3.2 if version in ("31", "30"): break for abi in abi3s: # empty set if not Python 3 for arch in platforms: supported.append(("%s%s" % (impl, version), abi, arch)) # No abi / arch, but requires our implementation: for i, version in enumerate(versions): supported.append(("%s%s" % (impl, version), "none", "any")) if i == 0: # Tagged specifically as being cross-version compatible # (with just the major version specified) supported.append(("%s%s" % (impl, versions[0][0]), "none", "any")) # Major Python version + platform; e.g. binaries not using the Python API supported.append(("py%s" % (versions[0][0]), "none", arch)) # No abi / arch, generic Python for i, version in enumerate(versions): supported.append(("py%s" % (version,), "none", "any")) if i == 0: supported.append(("py%s" % (version[0]), "none", "any")) return supported PK!lapoetry/mixology/__init__.pyfrom .version_solver import VersionSolver def resolve_version(root, provider, locked=None, use_latest=None): solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest) return solver.solve() PK! Jpoetry/mixology/assignment.pyfrom typing import Any from .incompatibility import Incompatibility from .term import Term class Assignment(Term): """ A term in a PartialSolution that tracks some additional metadata. """ def __init__(self, dependency, is_positive, decision_level, index, cause=None): super(Assignment, self).__init__(dependency, is_positive) self._decision_level = decision_level self._index = index self._cause = cause @property def decision_level(self): # type: () -> int return self._decision_level @property def index(self): # type: () -> int return self._index @property def cause(self): # type: () -> Incompatibility return self._cause @classmethod def decision( cls, package, decision_level, index ): # type: (Any, int, int) -> Assignment return cls(package.to_dependency(), True, decision_level, index) @classmethod def derivation( cls, dependency, is_positive, cause, decision_level, index ): # type: (Any, bool, Incompatibility, int, int) -> Assignment return cls(dependency, is_positive, decision_level, index, cause) def is_decision(self): # type: () -> bool return self._cause is None PK!uE''poetry/mixology/failure.pyfrom typing import Dict from typing import List from typing import Tuple from .incompatibility import Incompatibility from .incompatibility_cause import ConflictCause from .incompatibility_cause import PythonCause class SolveFailure(Exception): def __init__(self, incompatibility): # type: (Incompatibility) -> None self._incompatibility = incompatibility @property def message(self): return str(self) def __str__(self): return _Writer(self._incompatibility).write() class _Writer: def __init__(self, root): # type: (Incompatibility) -> None self._root = root self._derivations = {} # type: Dict[Incompatibility, int] self._lines = [] # type: List[Tuple[str, int]] self._line_numbers = {} # type: Dict[Incompatibility, int] self._count_derivations(self._root) def write(self): buffer = [] required_python_version = None for incompatibility in self._root.external_incompatibilities: if isinstance(incompatibility.cause, PythonCause): required_python_version = incompatibility.cause.root_python_version break if required_python_version is not None: buffer.append( "The current project must support the following Python versions: {}".format( required_python_version ) ) buffer.append("") if isinstance(self._root.cause, ConflictCause): self._visit(self._root, {}) else: self._write( self._root, "Because {}, version solving failed.".format(self._root) ) padding = ( 0 if not self._line_numbers else len("({}) ".format(list(self._line_numbers.values())[-1])) ) last_was_empty = False for line in self._lines: message = line[0] if not message: if not last_was_empty: buffer.append("") last_was_empty = True continue last_was_empty = False number = line[-1] if number is not None: message = "({})".format(number).ljust(padding) + message else: message = " " * padding + message buffer.append(message) return "\n".join(buffer) def _write( self, incompatibility, message, numbered=False ): # type: (Incompatibility, str, bool) -> None if numbered: number = len(self._line_numbers) + 1 self._line_numbers[incompatibility] = number self._lines.append((message, number)) else: self._lines.append((message, None)) def _visit( self, incompatibility, details_for_incompatibility, conclusion=False ): # type: (Incompatibility, Dict, bool) -> None numbered = conclusion or self._derivations[incompatibility] > 1 conjunction = "So," if conclusion or incompatibility == self._root else "And" incompatibility_string = str(incompatibility) cause = incompatibility.cause # type: ConflictCause details_for_cause = {} if isinstance(cause.conflict.cause, ConflictCause) and isinstance( cause.other.cause, ConflictCause ): conflict_line = self._line_numbers.get(cause.conflict) other_line = self._line_numbers.get(cause.other) if conflict_line is not None and other_line is not None: self._write( incompatibility, "Because {}, {}.".format( cause.conflict.and_to_string( cause.other, details_for_cause, conflict_line, other_line ), incompatibility_string, ), numbered=numbered, ) elif conflict_line is not None or other_line is not None: if conflict_line is not None: with_line = cause.conflict without_line = cause.other line = conflict_line else: with_line = cause.other without_line = cause.conflict line = other_line self._visit(without_line, details_for_cause) self._write( incompatibility, "{} because {} ({}), {}.".format( conjunction, str(with_line), line, incompatibility_string ), numbered=numbered, ) else: single_line_conflict = self._is_single_line(cause.conflict.cause) single_line_other = self._is_single_line(cause.other.cause) if single_line_other or single_line_conflict: first = cause.conflict if single_line_other else cause.other second = cause.other if single_line_other else cause.conflict self._visit(first, details_for_cause) self._visit(second, details_for_cause) self._write( incompatibility, "Thus, {}.".format(incompatibility_string), numbered=numbered, ) else: self._visit(cause.conflict, {}, conclusion=True) self._lines.append(("", None)) self._visit(cause.other, details_for_cause) self._write( incompatibility, "{} because {} ({}), {}".format( conjunction, str(cause.conflict), self._line_numbers[cause.conflict], incompatibility_string, ), numbered=numbered, ) elif isinstance(cause.conflict.cause, ConflictCause) or isinstance( cause.other.cause, ConflictCause ): derived = ( cause.conflict if isinstance(cause.conflict.cause, ConflictCause) else cause.other ) ext = ( cause.other if isinstance(cause.conflict.cause, ConflictCause) else cause.conflict ) derived_line = self._line_numbers.get(derived) if derived_line is not None: self._write( incompatibility, "Because {}, {}.".format( ext.and_to_string( derived, details_for_cause, None, derived_line ), incompatibility_string, ), numbered=numbered, ) elif self._is_collapsible(derived): derived_cause = derived.cause # type: ConflictCause if isinstance(derived_cause.conflict.cause, ConflictCause): collapsed_derived = derived_cause.conflict else: collapsed_derived = derived_cause.other if isinstance(derived_cause.conflict.cause, ConflictCause): collapsed_ext = derived_cause.other else: collapsed_ext = derived_cause.conflict details_for_cause = {} self._visit(collapsed_derived, details_for_cause) self._write( incompatibility, "{} because {}, {}.".format( conjunction, collapsed_ext.and_to_string(ext, details_for_cause, None, None), incompatibility_string, ), numbered=numbered, ) else: self._visit(derived, details_for_cause) self._write( incompatibility, "{} because {}, {}.".format( conjunction, str(ext), incompatibility_string ), numbered=numbered, ) else: self._write( incompatibility, "Because {}, {}.".format( cause.conflict.and_to_string( cause.other, details_for_cause, None, None ), incompatibility_string, ), numbered=numbered, ) def _is_collapsible(self, incompatibility): # type: (Incompatibility) -> bool if self._derivations[incompatibility] > 1: return False cause = incompatibility.cause # type: ConflictCause if isinstance(cause.conflict.cause, ConflictCause) and isinstance( cause.other.cause, ConflictCause ): return False if not isinstance(cause.conflict.cause, ConflictCause) and not isinstance( cause.other.cause, ConflictCause ): return False complex = ( cause.conflict if isinstance(cause.conflict.cause, ConflictCause) else cause.other ) return complex not in self._line_numbers def _is_single_line(self, cause): # type: (ConflictCause) -> bool return not isinstance(cause.conflict.cause, ConflictCause) and not isinstance( cause.other.cause, ConflictCause ) def _count_derivations(self, incompatibility): # type: (Incompatibility) -> None if incompatibility in self._derivations: self._derivations[incompatibility] += 1 else: self._derivations[incompatibility] = 1 cause = incompatibility.cause if isinstance(cause, ConflictCause): self._count_derivations(cause.conflict) self._count_derivations(cause.other) PK!1>1>"poetry/mixology/incompatibility.pyfrom typing import Dict from typing import List from .incompatibility_cause import ConflictCause from .incompatibility_cause import DependencyCause from .incompatibility_cause import IncompatibilityCause from .incompatibility_cause import NoVersionsCause from .incompatibility_cause import PackageNotFoundCause from .incompatibility_cause import PlatformCause from .incompatibility_cause import PythonCause from .incompatibility_cause import RootCause from .term import Term class Incompatibility: def __init__( self, terms, cause ): # type: (List[Term], IncompatibilityCause) -> None # Remove the root package from generated incompatibilities, since it will # always be satisfied. This makes error reporting clearer, and may also # make solving more efficient. if ( len(terms) != 1 and isinstance(cause, ConflictCause) and any([term.is_positive() and term.dependency.is_root for term in terms]) ): terms = [ term for term in terms if not term.is_positive() or not term.dependency.is_root ] if ( len(terms) == 1 # Short-circuit in the common case of a two-term incompatibility with # two different packages (for example, a dependency). or len(terms) == 2 and terms[0].dependency.name != terms[-1].dependency.name ): pass else: # Coalesce multiple terms about the same package if possible. by_name = {} # type: Dict[str, Dict[str, Term]] for term in terms: if term.dependency.name not in by_name: by_name[term.dependency.name] = {} by_ref = by_name[term.dependency.name] ref = term.dependency.name if ref in by_ref: by_ref[ref] = by_ref[ref].intersect(term) # If we have two terms that refer to the same package but have a null # intersection, they're mutually exclusive, making this incompatibility # irrelevant, since we already know that mutually exclusive version # ranges are incompatible. We should never derive an irrelevant # incompatibility. assert by_ref[ref] is not None else: by_ref[ref] = term new_terms = [] for by_ref in by_name.values(): positive_terms = [ term for term in by_ref.values() if term.is_positive() ] if positive_terms: new_terms += positive_terms continue new_terms += list(by_ref.values()) terms = new_terms self._terms = terms self._cause = cause @property def terms(self): # type: () -> List[Term] return self._terms @property def cause(self): # type: () -> IncompatibilityCause return self._cause @property def external_incompatibilities(self): # type: () -> Generator[Incompatibility] """ Returns all external incompatibilities in this incompatibility's derivation graph. """ if isinstance(self._cause, ConflictCause): cause = self._cause # type: ConflictCause for incompatibility in cause.conflict.external_incompatibilities: yield incompatibility for incompatibility in cause.other.external_incompatibilities: yield incompatibility else: yield self def is_failure(self): # type: () -> bool return len(self._terms) == 0 or ( len(self._terms) == 1 and self._terms[0].dependency.is_root ) def __str__(self): if isinstance(self._cause, DependencyCause): assert len(self._terms) == 2 depender = self._terms[0] dependee = self._terms[1] assert depender.is_positive() assert not dependee.is_positive() return "{} depends on {}".format( self._terse(depender, allow_every=True), self._terse(dependee) ) elif isinstance(self._cause, PythonCause): assert len(self._terms) == 1 assert self._terms[0].is_positive() cause = self._cause # type: PythonCause text = "{} requires ".format(self._terse(self._terms[0], allow_every=True)) text += "Python {}".format(cause.python_version) return text elif isinstance(self._cause, PlatformCause): assert len(self._terms) == 1 assert self._terms[0].is_positive() cause = self._cause # type: PlatformCause text = "{} requires ".format(self._terse(self._terms[0], allow_every=True)) text += "platform {}".format(cause.platform) return text elif isinstance(self._cause, NoVersionsCause): assert len(self._terms) == 1 assert self._terms[0].is_positive() return "no versions of {} match {}".format( self._terms[0].dependency.name, self._terms[0].constraint ) elif isinstance(self._cause, PackageNotFoundCause): assert len(self._terms) == 1 assert self._terms[0].is_positive() return "{} doesn't exist".format(self._terms[0].dependency.name) elif isinstance(self._cause, RootCause): assert len(self._terms) == 1 assert not self._terms[0].is_positive() assert self._terms[0].dependency.is_root return "{} is {}".format( self._terms[0].dependency.name, self._terms[0].dependency.constraint ) elif self.is_failure(): return "version solving failed" if len(self._terms) == 1: term = self._terms[0] if term.constraint.is_any(): return "{} is {}".format( term.dependency.name, "forbidden" if term.is_positive() else "required", ) else: return "{} is {}".format( term.dependency.name, "forbidden" if term.is_positive() else "required", ) if len(self._terms) == 2: term1 = self._terms[0] term2 = self._terms[1] if term1.is_positive() == term2.is_positive(): if term1.is_positive(): package1 = ( term1.dependency.name if term1.constraint.is_any() else self._terse(term1) ) package2 = ( term2.dependency.name if term2.constraint.is_any() else self._terse(term2) ) return "{} is incompatible with {}".format(package1, package2) else: return "either {} or {}".format( self._terse(term1), self._terse(term2) ) positive = [] negative = [] for term in self._terms: if term.is_positive(): positive.append(self._terse(term)) else: negative.append(self._terse(term)) if positive and negative: if len(positive) == 1: positive_term = [term for term in self._terms if term.is_positive()][0] return "{} requires {}".format( self._terse(positive_term, allow_every=True), " or ".join(negative) ) else: return "if {} then {}".format( " and ".join(positive), " or ".join(negative) ) elif positive: return "one of {} must be false".format(" or ".join(positive)) else: return "one of {} must be true".format(" or ".join(negative)) def and_to_string( self, other, details, this_line, other_line ): # type: (Incompatibility, dict, int, int) -> str requires_both = self._try_requires_both(other, details, this_line, other_line) if requires_both is not None: return requires_both requires_through = self._try_requires_through( other, details, this_line, other_line ) if requires_through is not None: return requires_through requires_forbidden = self._try_requires_forbidden( other, details, this_line, other_line ) if requires_forbidden is not None: return requires_forbidden buffer = [str(self)] if this_line is not None: buffer.append(" " + this_line) buffer.append(" and {}".format(str(other))) if other_line is not None: buffer.append(" " + other_line) return "\n".join(buffer) def _try_requires_both( self, other, details, this_line, other_line ): # type: (Incompatibility, dict, int, int) -> str if len(self._terms) == 1 or len(other.terms) == 1: return this_positive = self._single_term_where(lambda term: term.is_positive()) if this_positive is None: return other_positive = other._single_term_where(lambda term: term.is_positive()) if other_positive is None: return if this_positive.dependency != other_positive.dependency: return this_negatives = " or ".join( [self._terse(term) for term in self._terms if not term.is_positive()] ) other_negatives = " or ".join( [self._terse(term) for term in other.terms if not term.is_positive()] ) buffer = [self._terse(this_positive, allow_every=True) + " "] is_dependency = isinstance(self.cause, DependencyCause) and isinstance( other.cause, DependencyCause ) if is_dependency: buffer.append("depends on") else: buffer.append("requires") buffer.append(" both {}".format(this_negatives)) if this_line is not None: buffer.append(" ({})".format(this_line)) buffer.append(" and {}".format(other_negatives)) if other_line is not None: buffer.append(" ({})".format(other_line)) return "".join(buffer) def _try_requires_through( self, other, details, this_line, other_line ): # type: (Incompatibility, dict, int, int) -> str if len(self._terms) == 1 or len(other.terms) == 1: return this_negative = self._single_term_where(lambda term: not term.is_positive()) other_negative = other._single_term_where(lambda term: not term.is_positive()) if this_negative is None and other_negative is None: return this_positive = self._single_term_where(lambda term: term.is_positive()) other_positive = self._single_term_where(lambda term: term.is_positive()) if ( this_negative is not None and other_positive is not None and this_negative.dependency.name == other_positive.dependency.name and this_negative.inverse.satisfies(other_positive) ): prior = self prior_negative = this_negative prior_line = this_line latter = other latter_line = other_line elif ( other_negative is not None and this_positive is not None and other_negative.dependency.name == this_positive.dependency.name and other_negative.inverse.satisfies(this_positive) ): prior = other prior_negative = other_negative prior_line = other_line latter = self latter_line = this_line else: return prior_positives = [term for term in prior.terms if term.is_positive()] buffer = [] if len(prior_positives) > 1: prior_string = " or ".join([self._terse(term) for term in prior_positives]) buffer.append("if {} then ".format(prior_string)) else: if isinstance(prior.cause, DependencyCause): verb = "depends on" else: verb = "requires" buffer.append( "{} {} ".format(self._terse(prior_positives[0], allow_every=True), verb) ) buffer.append(self._terse(prior_negative)) if prior_line is not None: buffer.append(" ({})".format(prior_line)) buffer.append(" which ") if isinstance(latter.cause, DependencyCause): buffer.append("depends on ") else: buffer.append("requires ") buffer.append( " or ".join( [self._terse(term) for term in latter.terms if not term.is_positive()] ) ) if latter_line is not None: buffer.append(" ({})".format(latter_line)) return "".join(buffer) def _try_requires_forbidden( self, other, details, this_line, other_line ): # type: (Incompatibility, dict, int, int) -> str if len(self._terms) != 1 and len(other.terms) != 1: return None if len(self.terms) == 1: prior = other latter = self prior_line = other_line latter_line = this_line else: prior = self latter = other prior_line = this_line latter_line = other_line negative = prior._single_term_where(lambda term: not term.is_positive()) if negative is None: return if not negative.inverse.satisfies(latter.terms[0]): return positives = [t for t in prior.terms if t.is_positive()] buffer = [] if len(positives) > 1: prior_string = " or ".join([self._terse(term) for term in positives]) buffer.append("if {} then ".format(prior_string)) else: buffer.append(self._terse(positives[0], allow_every=True)) if isinstance(prior.cause, DependencyCause): buffer.append(" depends on ") else: buffer.append(" requires ") buffer.append(self._terse(latter.terms[0]) + " ") if prior_line is not None: buffer.append("({}) ".format(prior_line)) if isinstance(latter.cause, PythonCause): cause = latter.cause # type: PythonCause buffer.append("which requires Python {}".format(cause.python_version)) elif isinstance(latter.cause, NoVersionsCause): buffer.append("which doesn't match any versions") elif isinstance(latter.cause, PackageNotFoundCause): buffer.append("which doesn't exist") else: buffer.append("which is forbidden") if latter_line is not None: buffer.append(" ({})".format(latter_line)) return "".join(buffer) def _terse(self, term, allow_every=False): if allow_every and term.constraint.is_any(): return "every version of {}".format(term.dependency.name) return str(term.dependency) def _single_term_where(self, callable): # type: (callable) -> Term found = None for term in self._terms: if not callable(term): continue if found is not None: return found = term return found def __repr__(self): return "".format(str(self)) PK!cXbb(poetry/mixology/incompatibility_cause.pyclass IncompatibilityCause(Exception): """ The reason and Incompatibility's terms are incompatible. """ class RootCause(IncompatibilityCause): pass class NoVersionsCause(IncompatibilityCause): pass class DependencyCause(IncompatibilityCause): pass class ConflictCause(IncompatibilityCause): """ The incompatibility was derived from two existing incompatibilities during conflict resolution. """ def __init__(self, conflict, other): self._conflict = conflict self._other = other @property def conflict(self): return self._conflict @property def other(self): return self._other def __str__(self): return str(self._conflict) class PythonCause(IncompatibilityCause): """ The incompatibility represents a package's python constraint (Python versions) being incompatible with the current python version. """ def __init__(self, python_version, root_python_version): self._python_version = python_version self._root_python_version = root_python_version @property def python_version(self): return self._python_version @property def root_python_version(self): return self._root_python_version class PlatformCause(IncompatibilityCause): """ The incompatibility represents a package's platform constraint (OS most likely) being incompatible with the current platform. """ def __init__(self, platform): self._platform = platform @property def platform(self): return self._platform class PackageNotFoundCause(IncompatibilityCause): """ The incompatibility represents a package that couldn't be found by its source. """ def __init__(self, error): self._error = error @property def error(self): return self._error PK!m0#poetry/mixology/partial_solution.pyfrom collections import OrderedDict from typing import Any from typing import Dict from typing import List from poetry.packages import Dependency from poetry.packages import Package from .assignment import Assignment from .incompatibility import Incompatibility from .set_relation import SetRelation from .term import Term class PartialSolution: """ # A list of Assignments that represent the solver's current best guess about # what's true for the eventual set of package versions that will comprise the # total solution. # # See https://github.com/dart-lang/mixology/tree/master/doc/solver.md#partial-solution. """ def __init__(self): # The assignments that have been made so far, in the order they were # assigned. self._assignments = [] # type: List[Assignment] # The decisions made for each package. self._decisions = OrderedDict() # type: Dict[str, Package] # The intersection of all positive Assignments for each package, minus any # negative Assignments that refer to that package. # # This is derived from self._assignments. self._positive = OrderedDict() # type: Dict[str, Term] # The union of all negative Assignments for each package. # # If a package has any positive Assignments, it doesn't appear in this # map. # # This is derived from self._assignments. self._negative = OrderedDict() # type: Dict[str, Dict[str, Term]] # The number of distinct solutions that have been attempted so far. self._attempted_solutions = 1 # Whether the solver is currently backtracking. self._backtracking = False @property def decisions(self): # type: () -> List[Package] return list(self._decisions.values()) @property def decision_level(self): # type: () -> int return len(self._decisions) @property def attempted_solutions(self): # type: () -> int return self._attempted_solutions @property def unsatisfied(self): # type: () -> List[Dependency] return [ term.dependency for term in self._positive.values() if term.dependency.name not in self._decisions ] def decide(self, package): # type: (Package) -> None """ Adds an assignment of package as a decision and increments the decision level. """ # When we make a new decision after backtracking, count an additional # attempted solution. If we backtrack multiple times in a row, though, we # only want to count one, since we haven't actually started attempting a # new solution. if self._backtracking: self._attempted_solutions += 1 self._backtracking = False self._decisions[package.name] = package self._assign( Assignment.decision(package, self.decision_level, len(self._assignments)) ) def derive( self, dependency, is_positive, cause ): # type: (Dependency, bool, Incompatibility) -> None """ Adds an assignment of package as a derivation. """ self._assign( Assignment.derivation( dependency, is_positive, cause, self.decision_level, len(self._assignments), ) ) def _assign(self, assignment): # type: (Assignment) -> None """ Adds an Assignment to _assignments and _positive or _negative. """ self._assignments.append(assignment) self._register(assignment) def backtrack(self, decision_level): # type: (int) -> None """ Resets the current decision level to decision_level, and removes all assignments made after that level. """ self._backtracking = True packages = set() while self._assignments[-1].decision_level > decision_level: removed = self._assignments.pop(-1) packages.add(removed.dependency.name) if removed.is_decision(): del self._decisions[removed.dependency.name] # Re-compute _positive and _negative for the packages that were removed. for package in packages: if package in self._positive: del self._positive[package] if package in self._negative: del self._negative[package] for assignment in self._assignments: if assignment.dependency.name in packages: self._register(assignment) def _register(self, assignment): # type: (Assignment) -> None """ Registers an Assignment in _positive or _negative. """ name = assignment.dependency.name old_positive = self._positive.get(name) if old_positive is not None: self._positive[name] = old_positive.intersect(assignment) return ref = assignment.dependency.name negative_by_ref = self._negative.get(name) old_negative = None if negative_by_ref is None else negative_by_ref.get(ref) if old_negative is None: term = assignment else: term = assignment.intersect(old_negative) if term.is_positive(): if name in self._negative: del self._negative[name] self._positive[name] = term else: if name not in self._negative: self._negative[name] = {} self._negative[name][ref] = term def satisfier(self, term): # type: (Term) -> Assignment """ Returns the first Assignment in this solution such that the sublist of assignments up to and including that entry collectively satisfies term. """ assigned_term = None # type: Term for assignment in self._assignments: if assignment.dependency.name != term.dependency.name: continue if ( not assignment.dependency.is_root and not assignment.dependency.name == term.dependency.name ): if not assignment.is_positive(): continue assert not term.is_positive() return assignment if assigned_term is None: assigned_term = assignment else: assigned_term = assigned_term.intersect(assignment) # As soon as we have enough assignments to satisfy term, return them. if assigned_term.satisfies(term): return assignment raise RuntimeError("[BUG] {} is not satisfied.".format(term)) def satisfies(self, term): # type: (Term) -> bool return self.relation(term) == SetRelation.SUBSET def relation(self, term): # type: (Term) -> int positive = self._positive.get(term.dependency.name) if positive is not None: return positive.relation(term) by_ref = self._negative.get(term.dependency.name) if by_ref is None: return SetRelation.OVERLAPPING negative = by_ref[term.dependency.name] if negative is None: return SetRelation.OVERLAPPING return negative.relation(term) PK!⟶eepoetry/mixology/result.pyclass SolverResult: def __init__(self, root, packages, attempted_solutions): self._root = root self._packages = packages self._attempted_solutions = attempted_solutions @property def packages(self): return self._packages @property def attempted_solutions(self): return self._attempted_solutions PK!:m poetry/mixology/set_relation.pyclass SetRelation: """ An enum of possible relationships between two sets. """ SUBSET = "subset" DISJOINT = "disjoint" OVERLAPPING = "overlapping" PK!`poetry/mixology/term.py# -*- coding: utf-8 -*- from typing import Union from poetry.packages import Dependency from .set_relation import SetRelation class Term(object): """ A statement about a package which is true or false for a given selection of package versions. See https://github.com/dart-lang/pub/tree/master/doc/solver.md#term. """ def __init__(self, dependency, is_positive): # type: (Dependency, bool) -> None self._dependency = dependency self._positive = is_positive @property def inverse(self): # type: () -> Term return Term(self._dependency, not self.is_positive()) @property def dependency(self): return self._dependency @property def constraint(self): return self._dependency.constraint def is_positive(self): # type: () -> bool return self._positive def satisfies(self, other): # type: (Term) -> bool """ Returns whether this term satisfies another. """ return ( self.dependency.name == other.dependency.name and self.relation(other) == SetRelation.SUBSET ) def relation(self, other): # type: (Term) -> int """ Returns the relationship between the package versions allowed by this term and another. """ if self.dependency.name != other.dependency.name: raise ValueError( "{} should refer to {}".format(other, self.dependency.name) ) other_constraint = other.constraint if other.is_positive(): if self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.DISJOINT # foo ^1.5.0 is a subset of foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.SUBSET # foo ^2.0.0 is disjoint with foo ^1.0.0 if not self.constraint.allows_any(other_constraint): return SetRelation.DISJOINT return SetRelation.OVERLAPPING else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # not foo ^1.0.0 is disjoint with foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.DISJOINT # not foo ^1.5.0 overlaps foo ^1.0.0 # not foo ^2.0.0 is a superset of foo ^1.5.0 return SetRelation.OVERLAPPING else: if self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.SUBSET # foo ^2.0.0 is a subset of not foo ^1.0.0 if not other_constraint.allows_any(self.constraint): return SetRelation.SUBSET # foo ^1.5.0 is disjoint with not foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.DISJOINT # foo ^1.0.0 overlaps not foo ^1.5.0 return SetRelation.OVERLAPPING else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # not foo ^1.0.0 is a subset of not foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.SUBSET # not foo ^2.0.0 overlaps not foo ^1.0.0 # not foo ^1.5.0 is a superset of not foo ^1.0.0 return SetRelation.OVERLAPPING def intersect(self, other): # type: (Term) -> Union[Term, None] """ Returns a Term that represents the packages allowed by both this term and another """ if self.dependency.name != other.dependency.name: raise ValueError( "{} should refer to {}".format(other, self.dependency.name) ) if self._compatible_dependency(other.dependency): if self.is_positive() != other.is_positive(): # foo ^1.0.0 ∩ not foo ^1.5.0 → foo >=1.0.0 <1.5.0 positive = self if self.is_positive() else other negative = other if self.is_positive() else self return self._non_empty_term( positive.constraint.difference(negative.constraint), True ) elif self.is_positive(): # foo ^1.0.0 ∩ foo >=1.5.0 <3.0.0 → foo ^1.5.0 return self._non_empty_term( self.constraint.intersect(other.constraint), True ) else: # not foo ^1.0.0 ∩ not foo >=1.5.0 <3.0.0 → not foo >=1.0.0 <3.0.0 return self._non_empty_term( self.constraint.union(other.constraint), False ) elif self.is_positive() != other.is_positive(): return self if self.is_positive() else other else: return def difference(self, other): # type: (Term) -> Term """ Returns a Term that represents packages allowed by this term and not by the other """ return self.intersect(other.inverse) def _compatible_dependency(self, other): return ( self.dependency.is_root or other.is_root or other.name == self.dependency.name ) def _non_empty_term(self, constraint, is_positive): if constraint.is_empty(): return dep = Dependency(self.dependency.name, constraint) dep.python_versions = str(self.dependency.python_versions) return Term(dep, is_positive) def __str__(self): return "{}{}".format("not " if not self.is_positive() else "", self._dependency) def __repr__(self): return "".format(str(self)) PK!|CEE!poetry/mixology/version_solver.py# -*- coding: utf-8 -*- import time from typing import Dict from typing import List from typing import Union from poetry.packages import Dependency from poetry.packages import ProjectPackage from poetry.packages import Package from poetry.puzzle.provider import Provider from poetry.semver import Version from poetry.semver import VersionRange from .failure import SolveFailure from .incompatibility import Incompatibility from .incompatibility_cause import ConflictCause from .incompatibility_cause import NoVersionsCause from .incompatibility_cause import PackageNotFoundCause from .incompatibility_cause import RootCause from .partial_solution import PartialSolution from .result import SolverResult from .set_relation import SetRelation from .term import Term _conflict = object() class VersionSolver: """ The version solver that finds a set of package versions that satisfy the root package's dependencies. See https://github.com/dart-lang/pub/tree/master/doc/solver.md for details on how this solver works. """ def __init__( self, root, # type: ProjectPackage provider, # type: Provider locked=None, # type: Dict[str, Package] use_latest=None, # type: List[str] ): self._root = root self._provider = provider self._locked = locked or {} if use_latest is None: use_latest = [] self._use_latest = use_latest self._incompatibilities = {} # type: Dict[str, List[Incompatibility]] self._solution = PartialSolution() @property def solution(self): # type: () -> PartialSolution return self._solution def solve(self): # type: () -> SolverResult """ Finds a set of dependencies that match the root package's constraints, or raises an error if no such set is available. """ start = time.time() root_dependency = Dependency(self._root.name, self._root.version) root_dependency.is_root = True self._add_incompatibility( Incompatibility([Term(root_dependency, False)], RootCause()) ) try: next = self._root.name while next is not None: self._propagate(next) next = self._choose_package_version() return self._result() except Exception: raise finally: self._log( "Version solving took {:.3f} seconds.\n" "Tried {} solutions.".format( time.time() - start, self._solution.attempted_solutions ) ) def _propagate(self, package): # type: (str) -> None """ Performs unit propagation on incompatibilities transitively related to package to derive new assignments for _solution. """ changed = set() changed.add(package) while changed: package = changed.pop() # Iterate in reverse because conflict resolution tends to produce more # general incompatibilities as time goes on. If we look at those first, # we can derive stronger assignments sooner and more eagerly find # conflicts. for incompatibility in reversed(self._incompatibilities[package]): result = self._propagate_incompatibility(incompatibility) if result is _conflict: # If the incompatibility is satisfied by the solution, we use # _resolve_conflict() to determine the root cause of the conflict as a # new incompatibility. # # It also backjumps to a point in the solution # where that incompatibility will allow us to derive new assignments # that avoid the conflict. root_cause = self._resolve_conflict(incompatibility) # Back jumping erases all the assignments we did at the previous # decision level, so we clear [changed] and refill it with the # newly-propagated assignment. changed.clear() changed.add(str(self._propagate_incompatibility(root_cause))) break elif result is not None: changed.add(result) def _propagate_incompatibility( self, incompatibility ): # type: (Incompatibility) -> Union[str, _conflict, None] """ If incompatibility is almost satisfied by _solution, adds the negation of the unsatisfied term to _solution. If incompatibility is satisfied by _solution, returns _conflict. If incompatibility is almost satisfied by _solution, returns the unsatisfied term's package name. Otherwise, returns None. """ # The first entry in incompatibility.terms that's not yet satisfied by # _solution, if one exists. If we find more than one, _solution is # inconclusive for incompatibility and we can't deduce anything. unsatisfied = None for term in incompatibility.terms: relation = self._solution.relation(term) if relation == SetRelation.DISJOINT: # If term is already contradicted by _solution, then # incompatibility is contradicted as well and there's nothing new we # can deduce from it. return elif relation == SetRelation.OVERLAPPING: # If more than one term is inconclusive, we can't deduce anything about # incompatibility. if unsatisfied is not None: return # If exactly one term in incompatibility is inconclusive, then it's # almost satisfied and [term] is the unsatisfied term. We can add the # inverse of the term to _solution. unsatisfied = term # If *all* terms in incompatibility are satisfied by _solution, then # incompatibility is satisfied and we have a conflict. if unsatisfied is None: return _conflict self._log( "derived: {}{}".format( "not " if unsatisfied.is_positive() else "", unsatisfied.dependency ) ) self._solution.derive( unsatisfied.dependency, not unsatisfied.is_positive(), incompatibility ) return unsatisfied.dependency.name def _resolve_conflict( self, incompatibility ): # type: (Incompatibility) -> Incompatibility """ Given an incompatibility that's satisfied by _solution, The `conflict resolution`_ constructs a new incompatibility that encapsulates the root cause of the conflict and backtracks _solution until the new incompatibility will allow _propagate() to deduce new assignments. Adds the new incompatibility to _incompatibilities and returns it. .. _conflict resolution: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution """ self._log("conflict: {}".format(incompatibility)) new_incompatibility = False while not incompatibility.is_failure(): # The term in incompatibility.terms that was most recently satisfied by # _solution. most_recent_term = None # The earliest assignment in _solution such that incompatibility is # satisfied by _solution up to and including this assignment. most_recent_satisfier = None # The difference between most_recent_satisfier and most_recent_term; # that is, the versions that are allowed by most_recent_satisfier and not # by most_recent_term. This is None if most_recent_satisfier totally # satisfies most_recent_term. difference = None # The decision level of the earliest assignment in _solution *before* # most_recent_satisfier such that incompatibility is satisfied by # _solution up to and including this assignment plus # most_recent_satisfier. # # Decision level 1 is the level where the root package was selected. It's # safe to go back to decision level 0, but stopping at 1 tends to produce # better error messages, because references to the root package end up # closer to the final conclusion that no solution exists. previous_satisfier_level = 1 for term in incompatibility.terms: satisfier = self._solution.satisfier(term) if most_recent_satisfier is None: most_recent_term = term most_recent_satisfier = satisfier elif most_recent_satisfier.index < satisfier.index: previous_satisfier_level = max( previous_satisfier_level, most_recent_satisfier.decision_level ) most_recent_term = term most_recent_satisfier = satisfier difference = None else: previous_satisfier_level = max( previous_satisfier_level, satisfier.decision_level ) if most_recent_term == term: # If most_recent_satisfier doesn't satisfy most_recent_term on its # own, then the next-most-recent satisfier may be the one that # satisfies the remainder. difference = most_recent_satisfier.difference(most_recent_term) if difference is not None: previous_satisfier_level = max( previous_satisfier_level, self._solution.satisfier(difference.inverse).decision_level, ) # If most_recent_identifier is the only satisfier left at its decision # level, or if it has no cause (indicating that it's a decision rather # than a derivation), then incompatibility is the root cause. We then # backjump to previous_satisfier_level, where incompatibility is # guaranteed to allow _propagate to produce more assignments. if ( previous_satisfier_level < most_recent_satisfier.decision_level or most_recent_satisfier.cause is None ): self._solution.backtrack(previous_satisfier_level) if new_incompatibility: self._add_incompatibility(incompatibility) return incompatibility # Create a new incompatibility by combining incompatibility with the # incompatibility that caused most_recent_satisfier to be assigned. Doing # this iteratively constructs an incompatibility that's guaranteed to be # true (that is, we know for sure no solution will satisfy the # incompatibility) while also approximating the intuitive notion of the # "root cause" of the conflict. new_terms = [] for term in incompatibility.terms: if term != most_recent_term: new_terms.append(term) for term in most_recent_satisfier.cause.terms: if term.dependency != most_recent_satisfier.dependency: new_terms.append(term) # The most_recent_satisfier may not satisfy most_recent_term on its own # if there are a collection of constraints on most_recent_term that # only satisfy it together. For example, if most_recent_term is # `foo ^1.0.0` and _solution contains `[foo >=1.0.0, # foo <2.0.0]`, then most_recent_satisfier will be `foo <2.0.0` even # though it doesn't totally satisfy `foo ^1.0.0`. # # In this case, we add `not (most_recent_satisfier \ most_recent_term)` to # the incompatibility as well, See the `algorithm documentation`_ for # details. # # .. _algorithm documentation: https://github.com/dart-lang/pub/tree/master/doc/solver.md#conflict-resolution if difference is not None: new_terms.append(difference.inverse) incompatibility = Incompatibility( new_terms, ConflictCause(incompatibility, most_recent_satisfier.cause) ) new_incompatibility = True partially = "" if difference is None else " partially" bang = "!" self._log( "{} {} is{} satisfied by {}".format( bang, most_recent_term, partially, most_recent_satisfier ) ) self._log( '{} which is caused by "{}"'.format(bang, most_recent_satisfier.cause) ) self._log("{} thus: {}".format(bang, incompatibility)) raise SolveFailure(incompatibility) def _choose_package_version(self): # type: () -> Union[str, None] """ Tries to select a version of a required package. Returns the name of the package whose incompatibilities should be propagated by _propagate(), or None indicating that version solving is complete and a solution has been found. """ unsatisfied = self._solution.unsatisfied if not unsatisfied: return # Prefer packages with as few remaining versions as possible, # so that if a conflict is necessary it's forced quickly. def _get_min(dependency): if dependency.name in self._use_latest: # If we're forced to use the latest version of a package, it effectively # only has one version to choose from. return 1 if dependency.name in self._locked: return 1 try: return len(self._provider.search_for(dependency)) except ValueError: return 0 if len(unsatisfied) == 1: dependency = unsatisfied[0] else: dependency = min(*unsatisfied, key=_get_min) locked = self._get_locked(dependency.name) if locked is None or not dependency.constraint.allows(locked.version): try: packages = self._provider.search_for(dependency) except ValueError as e: self._add_incompatibility( Incompatibility([Term(dependency, True)], PackageNotFoundCause(e)) ) return dependency.name try: version = packages[0] except IndexError: version = None else: version = locked if version is None: # If there are no versions that satisfy the constraint, # add an incompatibility that indicates that. self._add_incompatibility( Incompatibility([Term(dependency, True)], NoVersionsCause()) ) return dependency.name version = self._provider.complete_package(version) conflict = False for incompatibility in self._provider.incompatibilities_for(version): self._add_incompatibility(incompatibility) # If an incompatibility is already satisfied, then selecting version # would cause a conflict. # # We'll continue adding its dependencies, then go back to # unit propagation which will guide us to choose a better version. conflict = conflict or all( [ term.dependency.name == dependency.name or self._solution.satisfies(term) for term in incompatibility.terms ] ) if not conflict: self._solution.decide(version) self._log( "selecting {} ({})".format(version.name, version.full_pretty_version) ) return dependency.name def _excludes_single_version(self, constraint): # type: (Any) -> bool return isinstance(VersionRange().difference(constraint), Version) def _result(self): # type: () -> SolverResult """ Creates a #SolverResult from the decisions in _solution """ decisions = self._solution.decisions return SolverResult( self._root, [p for p in decisions if not p.is_root()], self._solution.attempted_solutions, ) def _add_incompatibility(self, incompatibility): # type: (Incompatibility) -> None self._log("fact: {}".format(incompatibility)) for term in incompatibility.terms: if term.dependency.name not in self._incompatibilities: self._incompatibilities[term.dependency.name] = [] if incompatibility in self._incompatibilities[term.dependency.name]: continue self._incompatibilities[term.dependency.name].append(incompatibility) def _get_locked(self, package_name): # type: (str) -> Union[Package, None] if package_name in self._use_latest: return locked = self._locked.get(package_name) if not locked: return for dep in self._root.all_requires: if dep.name == locked.name: locked.requires_extras = dep.extras return locked def _log(self, text): self._provider.debug(text, self._solution.attempted_solutions) PK!ͪӲpoetry/packages/__init__.pyimport os import re from poetry.version.requirements import Requirement from .dependency import Dependency from .dependency_package import DependencyPackage from .directory_dependency import DirectoryDependency from .file_dependency import FileDependency from .locker import Locker from .package import Package from .package_collection import PackageCollection from .project_package import ProjectPackage from .utils.link import Link from .utils.utils import convert_markers from .utils.utils import group_markers from .utils.utils import is_archive_file from .utils.utils import is_installable_dir from .utils.utils import is_url from .utils.utils import path_to_url from .utils.utils import strip_extras from .vcs_dependency import VCSDependency def dependency_from_pep_508(name): # Removing comments parts = name.split("#", 1) name = parts[0].strip() if len(parts) > 1: rest = parts[1] if ";" in rest: name += ";" + rest.split(";", 1)[1] req = Requirement(name) if req.marker: markers = convert_markers(req.marker) else: markers = {} name = req.name path = os.path.normpath(os.path.abspath(name)) link = None if is_url(name): link = Link(name) else: p, extras = strip_extras(path) if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")): if not is_installable_dir(p): raise ValueError( "Directory {!r} is not installable. File 'setup.py' " "not found.".format(name) ) link = Link(path_to_url(p)) elif is_archive_file(p): link = Link(path_to_url(p)) # it's a local file, dir, or url if link: # Handle relative file URLs if link.scheme == "file" and re.search(r"\.\./", link.url): link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) # wheel file if link.is_wheel: m = re.match(r"^(?P(?P.+?)-(?P\d.*?))", link.filename) if not m: raise ValueError("Invalid wheel name: {}".format(link.filename)) name = m.group("name") version = m.group("ver") dep = Dependency(name, version) else: name = link.egg_fragment if link.scheme == "git": dep = VCSDependency(name, "git", link.url_without_fragment) else: dep = Dependency(name, "*") else: if req.pretty_constraint: constraint = req.constraint else: constraint = "*" dep = Dependency(name, constraint) if "extra" in markers: # If we have extras, the dependency is optional dep.deactivate() for or_ in markers["extra"]: for _, extra in or_: dep.in_extras.append(extra) if "python_version" in markers: ors = [] for or_ in markers["python_version"]: ands = [] for op, version in or_: # Expand python version if op == "==": version = "~" + version op = "" elif op == "!=": version += ".*" elif op in ("in", "not in"): versions = [] for v in re.split("[ ,]+", version): split = v.split(".") if len(split) in [1, 2]: split.append("*") op_ = "" if op == "in" else "!=" else: op_ = "==" if op == "in" else "!=" versions.append(op_ + ".".join(split)) glue = " || " if op == "in" else ", " if versions: ands.append(glue.join(versions)) continue ands.append("{}{}".format(op, version)) ors.append(" ".join(ands)) dep.python_versions = " || ".join(ors) if req.marker: dep.marker = req.marker # Extras for extra in req.extras: dep.extras.append(extra) return dep PK!"'cc'poetry/packages/constraints/__init__.pyimport re from .any_constraint import AnyConstraint from .constraint import Constraint from .union_constraint import UnionConstraint BASIC_CONSTRAINT = re.compile(r"^(!?==?)?\s*([^\s]+?)\s*$") def parse_constraint(constraints): if constraints == "*": return AnyConstraint() or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip()) or_groups = [] for constraints in or_constraints: and_constraints = re.split( r"(?< ,]) *(? 1: for constraint in and_constraints: constraint_objects.append(parse_single_constraint(constraint)) else: constraint_objects.append(parse_single_constraint(and_constraints[0])) if len(constraint_objects) == 1: constraint = constraint_objects[0] else: constraint = constraint_objects[0] for next_constraint in constraint_objects[1:]: constraint = constraint.intersect(next_constraint) or_groups.append(constraint) if len(or_groups) == 1: return or_groups[0] else: return UnionConstraint(*or_groups) def parse_single_constraint(constraint): # type: (str) -> BaseConstraint # Basic comparator m = BASIC_CONSTRAINT.match(constraint) if m: op = m.group(1) if op is None: op = "==" version = m.group(2).strip() return Constraint(version, op) raise ValueError("Could not parse version constraint: {}".format(constraint)) PK!i-poetry/packages/constraints/any_constraint.pyfrom .base_constraint import BaseConstraint from .empty_constraint import EmptyConstraint class AnyConstraint(BaseConstraint): def allows(self, other): return True def allows_all(self, other): return True def allows_any(self, other): return True def difference(self, other): if other.is_any(): return EmptyConstraint() return other def intersect(self, other): return other def union(self, other): return AnyConstraint() def is_any(self): return True def is_empty(self): return False def __str__(self): return "*" def __eq__(self, other): return other.is_any() PK!Kynn.poetry/packages/constraints/base_constraint.pyclass BaseConstraint(object): def allows_all(self, other): raise NotImplementedError() def allows_any(self, other): raise NotImplementedError() def difference(self, other): raise NotImplementedError() def intersect(self, other): raise NotImplementedError() def union(self, other): raise NotImplementedError() def is_any(self): return False def is_empty(self): return False def __repr__(self): return "<{} {}>".format(self.__class__.__name__, str(self)) def __eq__(self, other): raise NotImplementedError() PK! f^ )poetry/packages/constraints/constraint.pyimport operator from .base_constraint import BaseConstraint from .empty_constraint import EmptyConstraint class Constraint(BaseConstraint): OP_EQ = operator.eq OP_NE = operator.ne _trans_op_str = {"=": OP_EQ, "==": OP_EQ, "!=": OP_NE} _trans_op_int = {OP_EQ: "==", OP_NE: "!="} def __init__(self, version, operator="=="): if operator == "=": operator = "==" self._version = version self._operator = operator self._op = self._trans_op_str[operator] @property def version(self): return self._version @property def operator(self): return self._operator def allows(self, other): is_equal_op = self._operator == "==" is_non_equal_op = self._operator == "!=" is_other_equal_op = other.operator == "==" is_other_non_equal_op = other.operator == "!=" if is_equal_op and is_other_equal_op: return self._version == other.version if ( is_equal_op and is_other_non_equal_op or is_non_equal_op and is_other_equal_op or is_non_equal_op and is_other_non_equal_op ): return self._version != other.version return False def allows_all(self, other): if not isinstance(other, Constraint): return other.is_empty() return other == self def allows_any(self, other): if isinstance(other, Constraint): is_non_equal_op = self._operator == "!=" is_other_non_equal_op = other.operator == "!=" if is_non_equal_op and is_other_non_equal_op: return self._version != other.version return other.allows(self) def difference(self, other): if other.allows(self): return EmptyConstraint() return self def intersect(self, other): from .multi_constraint import MultiConstraint if isinstance(other, Constraint): if other == self: return self if self.operator == "!=" and other.operator == "==" and self.allows(other): return other if other.operator == "!=" and self.operator == "==" and other.allows(self): return self if other.operator == "!=" and self.operator == "!=": return MultiConstraint(self, other) return EmptyConstraint() return other.intersect(self) def union(self, other): if isinstance(other, Constraint): from .union_constraint import UnionConstraint return UnionConstraint(self, other) return other.union(self) def is_any(self): return False def is_empty(self): return False def __eq__(self, other): if not isinstance(other, Constraint): return NotImplemented return (self.version, self.operator) == (other.version, other.operator) def __hash__(self): return hash((self._operator, self._version)) def __str__(self): return "{}{}".format( self._operator if self._operator != "==" else "", self._version ) PK!U>/poetry/packages/constraints/empty_constraint.pyfrom .base_constraint import BaseConstraint class EmptyConstraint(BaseConstraint): pretty_string = None def matches(self, _): return True def is_empty(self): return True def allows_all(self, other): return True def allows_any(self, other): return True def intersect(self, other): return other def difference(self, other): return def __eq__(self, other): return other.is_empty() def __str__(self): return "" PK!Pe /poetry/packages/constraints/multi_constraint.pyfrom .base_constraint import BaseConstraint from .constraint import Constraint class MultiConstraint(BaseConstraint): def __init__(self, *constraints): if any(c.operator == "==" for c in constraints): raise ValueError( "A multi-constraint can only be comprised of negative constraints" ) self._constraints = constraints @property def constraints(self): return self._constraints def allows(self, other): for constraint in self._constraints: if not constraint.allows(other): return False return True def allows_all(self, other): if other.is_any(): return False if other.is_empty(): return True if isinstance(other, Constraint): return self.allows(other) our_constraints = iter(self._constraints) their_constraints = iter(other.constraints) our_constraint = next(our_constraints, None) their_constraint = next(their_constraints, None) while our_constraint and their_constraint: if our_constraint.allows_all(their_constraint): their_constraint = next(their_constraints, None) else: our_constraint = next(our_constraints, None) return their_constraint is None def allows_any(self, other): if other.is_any(): return True if other.is_empty(): return True if isinstance(other, Constraint): return self.allows(other) if isinstance(other, MultiConstraint): for c1 in self.constraints: for c2 in other.constraints: if c1.allows(c2): return True return False def intersect(self, other): if isinstance(other, Constraint): constraints = [c for c in self._constraints if c == other] if len(constraints) == 1: return constraints[0] return MultiConstraint(*constraints) def __eq__(self, other): if not isinstance(other, MultiConstraint): return False return sorted( self._constraints, key=lambda c: (c.operator, c.version) ) == sorted(other.constraints, key=lambda c: (c.operator, c.version)) def __str__(self): constraints = [] for constraint in self._constraints: constraints.append(str(constraint)) return "{}".format(", ").join(constraints) PK!q /poetry/packages/constraints/union_constraint.pyfrom .base_constraint import BaseConstraint from .constraint import Constraint from .empty_constraint import EmptyConstraint from .multi_constraint import MultiConstraint class UnionConstraint(BaseConstraint): def __init__(self, *constraints): self._constraints = constraints @property def constraints(self): return self._constraints def allows(self, other): for constraint in self._constraints: if constraint.allows(other): return True return False def allows_any(self, other): if other.is_empty(): return False if other.is_any(): return True if isinstance(other, Constraint): constraints = [other] else: constraints = other.constraints for our_constraint in self._constraints: for their_constraint in constraints: if our_constraint.allows_any(their_constraint): return True return False def allows_all(self, other): if other.is_any(): return False if other.is_empty(): return True if isinstance(other, Constraint): constraints = [other] else: constraints = other.constraints our_constraints = iter(self._constraints) their_constraints = iter(constraints) our_constraint = next(our_constraints, None) their_constraint = next(their_constraints, None) while our_constraint and their_constraint: if our_constraint.allows_all(their_constraint): their_constraint = next(their_constraints, None) else: our_constraint = next(our_constraints, None) return their_constraint is None def intersect(self, other): if other.is_any(): return self if other.is_empty(): return other if isinstance(other, Constraint): if self.allows(other): return other return EmptyConstraint() new_constraints = [] for our_constraint in self._constraints: for their_constraint in other.constraints: intersection = our_constraint.intersect(their_constraint) if not intersection.is_empty() and intersection not in new_constraints: new_constraints.append(intersection) if not new_constraints: return EmptyConstraint() return UnionConstraint(*new_constraints) def union(self, other): if isinstance(other, Constraint): constraints = self._constraints if other not in self._constraints: constraints += (other,) return UnionConstraint(*constraints) def __eq__(self, other): if not isinstance(other, UnionConstraint): return False return sorted( self._constraints, key=lambda c: (c.operator, c.version) ) == sorted(other.constraints, key=lambda c: (c.operator, c.version)) def __str__(self): constraints = [] for constraint in self._constraints: constraints.append(str(constraint)) return "{}".format(" || ").join(constraints) PK!QFF2poetry/packages/constraints/wildcard_constraint.pyimport re from .constraint import Constraint class WilcardConstraint(Constraint): def __init__(self, constraint): # type: (str) -> None m = re.match( r"^(!= ?|==)?v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.[xX*])+$", constraint ) if not m: raise ValueError("Invalid value for wildcard constraint") if not m.group(1): operator = "==" else: operator = m.group(1).strip() super(WilcardConstraint, self).__init__( operator, ".".join([g if g else "*" for g in m.groups()[1:]]) ) if m.group(4): position = 2 elif m.group(3): position = 1 else: position = 0 from ..version_parser import VersionParser parser = VersionParser() groups = m.groups()[1:] low_version = parser._manipulate_version_string(groups, position) high_version = parser._manipulate_version_string(groups, position, 1) if operator == "!=": if low_version == "0.0.0.0": self._constraint = Constraint(">=", high_version) else: self._constraint = parser.parse_constraints( "<{} || >={}".format(low_version, high_version) ) else: if low_version == "0.0.0.0": self._constraint = Constraint("<", high_version) else: self._constraint = parser.parse_constraints( ">={},<{}".format(low_version, high_version) ) @property def supported_operators(self): return ["!=", "=="] @property def constraint(self): return self._constraint def matches(self, provider): # type: (Constraint) -> bool if isinstance(provider, self.__class__): return self._constraint.matches(provider.constraint) return provider.matches(self._constraint) def __str__(self): op = "" if self.string_operator == "!=": op = "!= " return "{}{}".format(op, self._version) PK!?&&poetry/packages/dependency.pyimport poetry.packages from poetry.semver import parse_constraint from poetry.semver import Version from poetry.semver import VersionConstraint from poetry.semver import VersionRange from poetry.semver import VersionUnion from poetry.utils.helpers import canonicalize_name from poetry.version.markers import AnyMarker from poetry.version.markers import parse_marker from .constraints import parse_constraint as parse_generic_constraint from .constraints.constraint import Constraint from .constraints.multi_constraint import MultiConstraint from .constraints.union_constraint import UnionConstraint class Dependency(object): def __init__( self, name, # type: str constraint, # type: str optional=False, # type: bool category="main", # type: str allows_prereleases=False, # type: bool ): self._name = canonicalize_name(name) self._pretty_name = name try: if not isinstance(constraint, VersionConstraint): self._constraint = parse_constraint(constraint) else: self._constraint = constraint except ValueError: self._constraint = parse_constraint("*") self._pretty_constraint = str(constraint) self._optional = optional self._category = category if isinstance(self._constraint, VersionRange) and self._constraint.min: allows_prereleases = ( allows_prereleases or self._constraint.min.is_prerelease() ) self._allows_prereleases = allows_prereleases self._python_versions = "*" self._python_constraint = parse_constraint("*") self._transitive_python_versions = None self._transitive_python_constraint = None self._extras = [] self._in_extras = [] self._activated = not self._optional self.is_root = False self.marker = AnyMarker() @property def name(self): return self._name @property def constraint(self): return self._constraint @property def pretty_constraint(self): return self._pretty_constraint @property def pretty_name(self): return self._pretty_name @property def category(self): return self._category @property def python_versions(self): return self._python_versions @python_versions.setter def python_versions(self, value): self._python_versions = value self._python_constraint = parse_constraint(value) if not self._python_constraint.is_any(): self.marker = self.marker.intersect( parse_marker( self._create_nested_marker( "python_version", self._python_constraint ) ) ) @property def transitive_python_versions(self): if self._transitive_python_versions is None: return self._python_versions return self._transitive_python_versions @transitive_python_versions.setter def transitive_python_versions(self, value): self._transitive_python_versions = value self._transitive_python_constraint = parse_constraint(value) @property def python_constraint(self): return self._python_constraint @property def transitive_python_constraint(self): if self._transitive_python_constraint is None: return self._python_constraint return self._transitive_python_constraint @property def extras(self): # type: () -> list return self._extras @property def in_extras(self): # type: () -> list return self._in_extras def allows_prereleases(self): return self._allows_prereleases def is_optional(self): return self._optional def is_activated(self): return self._activated def is_vcs(self): return False def is_file(self): return False def is_directory(self): return False def accepts(self, package): # type: (poetry.packages.Package) -> bool """ Determines if the given package matches this dependency. """ return ( self._name == package.name and self._constraint.allows(package.version) and (not package.is_prerelease() or self.allows_prereleases()) ) def to_pep_508(self, with_extras=True): # type: (bool) -> str requirement = self.pretty_name if self.extras: requirement += "[{}]".format(",".join(self.extras)) if isinstance(self.constraint, VersionUnion): requirement += " ({})".format( ",".join([str(c).replace(" ", "") for c in self.constraint.ranges]) ) elif isinstance(self.constraint, Version): requirement += " (=={})".format(self.constraint.text) elif not self.constraint.is_any(): requirement += " ({})".format(str(self.constraint).replace(" ", "")) markers = [] if not self.marker.is_any(): marker = self.marker if not with_extras: marker = marker.without_extras() if not marker.is_empty(): markers.append(str(marker)) else: # Python marker if self.python_versions != "*": python_constraint = self.python_constraint markers.append( self._create_nested_marker("python_version", python_constraint) ) in_extras = " || ".join(self._in_extras) if in_extras and with_extras: markers.append( self._create_nested_marker("extra", parse_generic_constraint(in_extras)) ) if markers: if len(markers) > 1: markers = ["({})".format(m) for m in markers] requirement += "; {}".format(" and ".join(markers)) else: requirement += "; {}".format(markers[0]) return requirement def _create_nested_marker(self, name, constraint): if isinstance(constraint, (MultiConstraint, UnionConstraint)): parts = [] for c in constraint.constraints: multi = False if isinstance(c, (MultiConstraint, UnionConstraint)): multi = True parts.append((multi, self._create_nested_marker(name, c))) glue = " and " if isinstance(constraint, UnionConstraint): parts = [ "({})".format(part[1]) if part[0] else part[1] for part in parts ] glue = " or " else: parts = [part[1] for part in parts] marker = glue.join(parts) elif isinstance(constraint, Constraint): marker = '{} {} "{}"'.format(name, constraint.operator, constraint.version) elif isinstance(constraint, VersionUnion): parts = [] for c in constraint.ranges: parts.append(self._create_nested_marker(name, c)) glue = " or " parts = ["({})".format(part) for part in parts] marker = glue.join(parts) elif isinstance(constraint, Version): marker = '{} == "{}"'.format(name, constraint.text) else: if constraint.min is not None: op = ">=" if not constraint.include_min: op = ">" version = constraint.min.text if constraint.max is not None: text = '{} {} "{}"'.format(name, op, version) op = "<=" if not constraint.include_max: op = "<" version = constraint.max text += ' and {} {} "{}"'.format(name, op, version) return text elif constraint.max is not None: op = "<=" if not constraint.include_max: op = "<" version = constraint.max else: return "" marker = '{} {} "{}"'.format(name, op, version) return marker def activate(self): """ Set the dependency as mandatory. """ self._activated = True def deactivate(self): """ Set the dependency as optional. """ if not self._optional: self._optional = True self._activated = False def with_constraint(self, constraint): new = Dependency( self.pretty_name, constraint, optional=self.is_optional(), category=self.category, allows_prereleases=self.allows_prereleases(), ) new.is_root = self.is_root new.python_versions = self.python_versions for extra in self.extras: new.extras.append(extra) for in_extra in self.in_extras: new.in_extras.append(in_extra) return new def __eq__(self, other): if not isinstance(other, Dependency): return NotImplemented return self._name == other.name and self._constraint == other.constraint def __ne__(self, other): return not self == other def __hash__(self): return hash((self._name, self._pretty_constraint)) def __str__(self): if self.is_root: return self._pretty_name return "{} ({})".format(self._pretty_name, self._pretty_constraint) def __repr__(self): return "<{} {}>".format(self.__class__.__name__, str(self)) PK!%poetry/packages/dependency_package.pyclass DependencyPackage(object): def __init__(self, dependency, package): self._dependency = dependency self._package = package @property def dependency(self): return self._dependency @property def package(self): return self._package def clone(self): # type: () -> DependencyPackage return self.__class__(self._dependency, self._package.clone()) def __getattr__(self, name): return getattr(self._package, name) def __setattr__(self, key, value): if key in {"_dependency", "_package"}: return super(DependencyPackage, self).__setattr__(key, value) setattr(self._package, key, value) def __str__(self): return str(self._package) def __repr__(self): return repr(self._package) def __hash__(self): return hash(self._package) def __eq__(self, other): if isinstance(other, DependencyPackage): other = other.package return self._package == other PK!8'poetry/packages/directory_dependency.pyfrom pkginfo.distribution import HEADER_ATTRS from pkginfo.distribution import HEADER_ATTRS_2_0 from poetry.utils._compat import Path from poetry.utils.toml_file import TomlFile from .dependency import Dependency # Patching pkginfo to support Metadata version 2.1 (PEP 566) HEADER_ATTRS.update( {"2.1": HEADER_ATTRS_2_0 + (("Provides-Extra", "provides_extra", True),)} ) class DirectoryDependency(Dependency): def __init__( self, name, path, # type: Path category="main", # type: str optional=False, # type: bool base=None, # type: Path develop=True, # type: bool ): self._path = path self._base = base self._full_path = path self._develop = develop self._supports_poetry = False if self._base and not self._path.is_absolute(): self._full_path = self._base / self._path if not self._full_path.exists(): raise ValueError("Directory {} does not exist".format(self._path)) if self._full_path.is_file(): raise ValueError("{} is a file, expected a directory".format(self._path)) # Checking content to determine actions setup = self._full_path / "setup.py" pyproject = TomlFile(self._full_path / "pyproject.toml") if pyproject.exists(): pyproject_content = pyproject.read() self._supports_poetry = ( "tool" in pyproject_content and "poetry" in pyproject_content["tool"] ) if not setup.exists() and not self._supports_poetry: raise ValueError( "Directory {} does not seem to be a Python package".format( self._full_path ) ) super(DirectoryDependency, self).__init__( name, "*", category=category, optional=optional, allows_prereleases=True ) @property def path(self): return self._path @property def full_path(self): return self._full_path.resolve() @property def base(self): return self._base @property def develop(self): return self._develop def supports_poetry(self): return self._supports_poetry def is_directory(self): return True PK!r44"poetry/packages/file_dependency.pyimport hashlib import io from pkginfo.distribution import HEADER_ATTRS from pkginfo.distribution import HEADER_ATTRS_2_0 from poetry.utils._compat import Path from .dependency import Dependency # Patching pkginfo to support Metadata version 2.1 (PEP 566) HEADER_ATTRS.update( {"2.1": HEADER_ATTRS_2_0 + (("Provides-Extra", "provides_extra", True),)} ) class FileDependency(Dependency): def __init__( self, name, path, # type: Path category="main", # type: str optional=False, # type: bool base=None, # type: Path ): self._path = path self._base = base self._full_path = path if self._base and not self._path.is_absolute(): self._full_path = self._base / self._path if not self._full_path.exists(): raise ValueError("File {} does not exist".format(self._path)) if self._full_path.is_dir(): raise ValueError("{} is a directory, expected a file".format(self._path)) super(FileDependency, self).__init__( name, "*", category=category, optional=optional, allows_prereleases=True ) @property def path(self): return self._path @property def full_path(self): return self._full_path.resolve() def is_file(self): return True def hash(self): h = hashlib.sha256() with self._full_path.open("rb") as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""): h.update(content) return h.hexdigest() PK!ˉ5jjpoetry/packages/locker.pyimport json import poetry.packages import poetry.repositories from hashlib import sha256 from tomlkit import document from typing import List from poetry.utils._compat import Path from poetry.utils.toml_file import TomlFile from poetry.version.markers import parse_marker class Locker: _relevant_keys = ["dependencies", "dev-dependencies", "source", "extras"] def __init__(self, lock, local_config): # type: (Path, dict) -> None self._lock = TomlFile(lock) self._local_config = local_config self._lock_data = None self._content_hash = self._get_content_hash() @property def lock(self): # type: () -> TomlFile return self._lock @property def lock_data(self): if self._lock_data is None: self._lock_data = self._get_lock_data() return self._lock_data def is_locked(self): # type: () -> bool """ Checks whether the locker has been locked (lockfile found). """ if not self._lock.exists(): return False return "package" in self.lock_data def is_fresh(self): # type: () -> bool """ Checks whether the lock file is still up to date with the current hash. """ lock = self._lock.read() metadata = lock.get("metadata", {}) if "content-hash" in metadata: return self._content_hash == lock["metadata"]["content-hash"] return False def locked_repository( self, with_dev_reqs=False ): # type: (bool) -> poetry.repositories.Repository """ Searches and returns a repository of locked packages. """ if not self.is_locked(): return poetry.repositories.Repository() lock_data = self.lock_data packages = poetry.repositories.Repository() if with_dev_reqs: locked_packages = lock_data["package"] else: locked_packages = [ p for p in lock_data["package"] if p["category"] == "main" ] if not locked_packages: return packages for info in locked_packages: package = poetry.packages.Package( info["name"], info["version"], info["version"] ) package.description = info.get("description", "") package.category = info["category"] package.optional = info["optional"] package.hashes = lock_data["metadata"]["hashes"][info["name"]] package.python_versions = info["python-versions"] if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = poetry.packages.Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): if isinstance(constraint, list): for c in constraint: package.add_dependency(dep_name, c) continue package.add_dependency(dep_name, constraint) if "source" in info: package.source_type = info["source"]["type"] package.source_url = info["source"]["url"] package.source_reference = info["source"]["reference"] packages.add_package(package) return packages def set_lock_data(self, root, packages): # type: () -> bool hashes = {} packages = self._lock_packages(packages) # Retrieving hashes for package in packages: if package["name"] not in hashes: hashes[package["name"]] = [] hashes[package["name"]] += package["hashes"] del package["hashes"] lock = document() lock["package"] = packages if root.extras: lock["extras"] = { extra: [dep.pretty_name for dep in deps] for extra, deps in root.extras.items() } lock["metadata"] = { "python-versions": root.python_versions, "content-hash": self._content_hash, "hashes": hashes, } if not self.is_locked() or lock != self.lock_data: self._write_lock_data(lock) return True return False def _write_lock_data(self, data): self.lock.write(data) # Checking lock file data consistency if data != self.lock.read(): raise RuntimeError("Inconsistent lock file data.") self._lock_data = None def _get_content_hash(self): # type: () -> str """ Returns the sha256 hash of the sorted content of the pyproject file. """ content = self._local_config relevant_content = {} for key in self._relevant_keys: relevant_content[key] = content.get(key) content_hash = sha256( json.dumps(relevant_content, sort_keys=True).encode() ).hexdigest() return content_hash def _get_lock_data(self): # type: () -> dict if not self._lock.exists(): raise RuntimeError("No lockfile found. Unable to read locked packages") return self._lock.read() def _lock_packages( self, packages ): # type: (List['poetry.packages.Package']) -> list locked = [] for package in sorted(packages, key=lambda x: x.name): spec = self._dump_package(package) locked.append(spec) return locked def _dump_package(self, package): # type: (poetry.packages.Package) -> dict dependencies = {} for dependency in sorted(package.requires, key=lambda d: d.name): if dependency.is_optional() and not dependency.is_activated(): continue if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = {"version": str(dependency.pretty_constraint)} if not dependency.python_constraint.is_any(): constraint["python"] = str(dependency.python_constraint) if len(constraint) == 1: dependencies[dependency.pretty_name].append(constraint["version"]) else: dependencies[dependency.pretty_name].append(constraint) data = { "name": package.pretty_name, "version": package.pretty_version, "description": package.description or "", "category": package.category, "optional": package.optional, "python-versions": package.python_versions, "hashes": sorted(package.hashes), } if not package.marker.is_any(): data["marker"] = str(package.marker) if dependencies: for k, constraints in dependencies.items(): if len(constraints) == 1: dependencies[k] = constraints[0] data["dependencies"] = dependencies if package.source_type: data["source"] = { "type": package.source_type, "url": package.source_url, "reference": package.source_reference, } return data PK!1-t,t,poetry/packages/package.py# -*- coding: utf-8 -*- import copy import re from contextlib import contextmanager from typing import Union from poetry.semver import Version from poetry.semver import parse_constraint from poetry.spdx import license_by_id from poetry.spdx import License from poetry.utils._compat import Path from poetry.utils.helpers import canonicalize_name from poetry.version.markers import AnyMarker from poetry.version.markers import parse_marker from .constraints import parse_constraint as parse_generic_constraint from .dependency import Dependency from .directory_dependency import DirectoryDependency from .file_dependency import FileDependency from .vcs_dependency import VCSDependency from .utils.utils import convert_markers from .utils.utils import create_nested_marker AUTHOR_REGEX = re.compile(r"(?u)^(?P[- .,\w\d'’\"()]+)(?: <(?P.+?)>)?$") class Package(object): AVAILABLE_PYTHONS = {"2", "2.7", "3", "3.4", "3.5", "3.6", "3.7"} def __init__(self, name, version, pretty_version=None): """ Creates a new in memory package. """ self._pretty_name = name self._name = canonicalize_name(name) if not isinstance(version, Version): self._version = Version.parse(version) self._pretty_version = pretty_version or version else: self._version = version self._pretty_version = pretty_version or self._version.text self.description = "" self._authors = [] self.homepage = None self.repository_url = None self.documentation_url = None self.keywords = [] self._license = None self.readme = None self.source_type = "" self.source_reference = "" self.source_url = "" self.requires = [] self.dev_requires = [] self.extras = {} self.requires_extras = [] self.category = "main" self.hashes = [] self.optional = False self.classifiers = [] self._python_versions = "*" self._python_constraint = parse_constraint("*") self._python_marker = AnyMarker() self.platform = None self.marker = AnyMarker() self.root_dir = None self.develop = True @property def name(self): return self._name @property def pretty_name(self): return self._pretty_name @property def version(self): return self._version @property def pretty_version(self): return self._pretty_version @property def unique_name(self): if self.is_root(): return self._name return self.name + "-" + self._version.text @property def pretty_string(self): return self.pretty_name + " " + self.pretty_version @property def full_pretty_version(self): if self.source_type in ["file", "directory"]: return "{} {}".format(self._pretty_version, self.source_url) if self.source_type not in ["hg", "git"]: return self._pretty_version # if source reference is a sha1 hash -- truncate if len(self.source_reference) == 40: return "{} {}".format(self._pretty_version, self.source_reference[0:7]) return "{} {}".format(self._pretty_version, self.source_reference) @property def authors(self): # type: () -> list return self._authors @property def author_name(self): # type: () -> str return self._get_author()["name"] @property def author_email(self): # type: () -> str return self._get_author()["email"] @property def all_requires(self): return self.requires + self.dev_requires def _get_author(self): # type: () -> dict if not self._authors: return {"name": None, "email": None} m = AUTHOR_REGEX.match(self._authors[0]) name = m.group("name") email = m.group("email") return {"name": name, "email": email} @property def python_versions(self): return self._python_versions @python_versions.setter def python_versions(self, value): self._python_versions = value self._python_constraint = parse_constraint(value) self._python_marker = parse_marker( create_nested_marker("python_version", self._python_constraint) ) @property def python_constraint(self): return self._python_constraint @property def python_marker(self): return self._python_marker @property def license(self): return self._license @license.setter def license(self, value): if value is None: self._license = value elif isinstance(value, License): self._license = value else: self._license = license_by_id(value) @property def all_classifiers(self): classifiers = copy.copy(self.classifiers) # Automatically set python classifiers if self.python_versions == "*": python_constraint = parse_constraint("~2.7 || ^3.4") else: python_constraint = self.python_constraint for version in sorted(self.AVAILABLE_PYTHONS): if len(version) == 1: constraint = parse_constraint(version + ".*") else: constraint = Version.parse(version) if python_constraint.allows_any(constraint): classifiers.append( "Programming Language :: Python :: {}".format(version) ) # Automatically set license classifiers if self.license: classifiers.append(self.license.classifier) classifiers = set(classifiers) return sorted(classifiers) @property def urls(self): urls = {} if self.homepage: urls["Homepage"] = self.homepage if self.repository_url: urls["Repository"] = self.repository_url if self.documentation_url: urls["Documentation"] = self.documentation_url return urls def is_prerelease(self): return self._version.is_prerelease() def is_root(self): return False def add_dependency( self, name, # type: str constraint=None, # type: Union[str, dict, None] category="main", # type: str ): # type: (...) -> Dependency if constraint is None: constraint = "*" if isinstance(constraint, dict): optional = constraint.get("optional", False) python_versions = constraint.get("python") platform = constraint.get("platform") allows_prereleases = constraint.get("allows-prereleases", False) if "git" in constraint: # VCS dependency dependency = VCSDependency( name, "git", constraint["git"], branch=constraint.get("branch", None), tag=constraint.get("tag", None), rev=constraint.get("rev", None), optional=optional, ) elif "file" in constraint: file_path = Path(constraint["file"]) dependency = FileDependency( name, file_path, category=category, base=self.root_dir ) elif "path" in constraint: path = Path(constraint["path"]) if self.root_dir: is_file = (self.root_dir / path).is_file() else: is_file = path.is_file() if is_file: dependency = FileDependency( name, path, category=category, optional=optional, base=self.root_dir, ) else: dependency = DirectoryDependency( name, path, category=category, optional=optional, base=self.root_dir, develop=constraint.get("develop", True), ) else: version = constraint["version"] dependency = Dependency( name, version, optional=optional, category=category, allows_prereleases=allows_prereleases, ) marker = AnyMarker() if python_versions: dependency.python_versions = python_versions marker = marker.intersect( parse_marker( create_nested_marker( "python_version", dependency.python_constraint ) ) ) if platform: marker = marker.intersect( parse_marker( create_nested_marker( "sys_platform", parse_generic_constraint(platform) ) ) ) if not marker.is_any(): dependency.marker = marker if "extras" in constraint: for extra in constraint["extras"]: dependency.extras.append(extra) else: dependency = Dependency(name, constraint, category=category) if category == "dev": self.dev_requires.append(dependency) else: self.requires.append(dependency) return dependency def to_dependency(self): from . import dependency_from_pep_508 name = "{} (=={})".format(self._name, self._version) if not self.marker.is_any(): name += " ; {}".format(str(self.marker)) return dependency_from_pep_508(name) @contextmanager def with_python_versions(self, python_versions): original_python_versions = self.python_versions self.python_versions = python_versions yield self.python_versions = original_python_versions def clone(self): # type: () -> Package clone = self.__class__(self.pretty_name, self.version) clone.category = self.category clone.optional = self.optional clone.python_versions = self.python_versions clone.marker = self.marker clone.extras = self.extras clone.source_type = self.source_type clone.source_url = self.source_url clone.source_reference = self.source_reference for dep in self.requires: clone.requires.append(dep) for dep in self.dev_requires: clone.dev_requires.append(dep) return clone def __hash__(self): return hash((self._name, self._version)) def __eq__(self, other): if not isinstance(other, Package): return NotImplemented return self._name == other.name and self._version == other.version def __str__(self): return self.unique_name def __repr__(self): return "".format(self.unique_name) PK!}33%poetry/packages/package_collection.pyfrom .dependency_package import DependencyPackage class PackageCollection(list): def __init__(self, dependency, packages=None): self._dependency = dependency if packages is None: packages = [] super(PackageCollection, self).__init__() for package in packages: self.append(package) def append(self, package): if not isinstance(package, DependencyPackage): package = DependencyPackage(self._dependency, package) return super(PackageCollection, self).append(package) PK!v"poetry/packages/project_package.pyfrom poetry.semver import VersionRange from poetry.semver import parse_constraint from poetry.version.markers import parse_marker from .package import Package from .utils.utils import create_nested_marker class ProjectPackage(Package): def __init__(self, name, version, pretty_version=None): super(ProjectPackage, self).__init__(name, version, pretty_version) self.build = None self.packages = [] self.include = [] self.exclude = [] if self._python_versions == "*": self._python_constraint = parse_constraint("~2.7 || >=3.4") def is_root(self): return True def to_dependency(self): dependency = super(ProjectPackage, self).to_dependency() dependency.is_root = True return dependency @property def python_versions(self): return self._python_versions @python_versions.setter def python_versions(self, value): self._python_versions = value if value == "*" or value == VersionRange(): value = "~2.7 || >=3.4" self._python_constraint = parse_constraint(value) self._python_marker = parse_marker( create_nested_marker("python_version", self._python_constraint) ) def clone(self): # type: () -> ProjectPackage package = super(ProjectPackage, self).clone() package.build = self.build package.packages = self.packages[:] package.include = self.include[:] package.exclude = self.exclude[:] return package PK!!poetry/packages/utils/__init__.pyPK!1poetry/packages/utils/link.pyimport posixpath try: import urllib.parse as urlparse except ImportError: import urlparse import re from .utils import path_to_url from .utils import splitext class Link: def __init__(self, url, comes_from=None, requires_python=None): """ Object representing a parsed link from https://pypi.python.org/simple/* url: url of the resource pointed to (href of the link) comes_from: instance of HTMLPage where the link was found, or string. requires_python: String containing the `Requires-Python` metadata field, specified in PEP 345. This may be specified by a data-requires-python attribute in the HTML link tag, as described in PEP 503. """ # url can be a UNC windows share if url.startswith("\\\\"): url = path_to_url(url) self.url = url self.comes_from = comes_from self.requires_python = requires_python if requires_python else None def __str__(self): if self.requires_python: rp = " (requires-python:%s)" % self.requires_python else: rp = "" if self.comes_from: return "%s (from %s)%s" % (self.url, self.comes_from, rp) else: return str(self.url) def __repr__(self): return "" % self def __eq__(self, other): if not isinstance(other, Link): return NotImplemented return self.url == other.url def __ne__(self, other): if not isinstance(other, Link): return NotImplemented return self.url != other.url def __lt__(self, other): if not isinstance(other, Link): return NotImplemented return self.url < other.url def __le__(self, other): if not isinstance(other, Link): return NotImplemented return self.url <= other.url def __gt__(self, other): if not isinstance(other, Link): return NotImplemented return self.url > other.url def __ge__(self, other): if not isinstance(other, Link): return NotImplemented return self.url >= other.url def __hash__(self): return hash(self.url) @property def filename(self): _, netloc, path, _, _ = urlparse.urlsplit(self.url) name = posixpath.basename(path.rstrip("/")) or netloc name = urlparse.unquote(name) assert name, "URL %r produced no filename" % self.url return name @property def scheme(self): return urlparse.urlsplit(self.url)[0] @property def netloc(self): return urlparse.urlsplit(self.url)[1] @property def path(self): return urlparse.unquote(urlparse.urlsplit(self.url)[2]) def splitext(self): return splitext(posixpath.basename(self.path.rstrip("/"))) @property def ext(self): return self.splitext()[1] @property def url_without_fragment(self): scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url) return urlparse.urlunsplit((scheme, netloc, path, query, None)) _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") @property def egg_fragment(self): match = self._egg_fragment_re.search(self.url) if not match: return None return match.group(1) _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") @property def subdirectory_fragment(self): match = self._subdirectory_fragment_re.search(self.url) if not match: return None return match.group(1) _hash_re = re.compile(r"(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)") @property def hash(self): match = self._hash_re.search(self.url) if match: return match.group(2) return None @property def hash_name(self): match = self._hash_re.search(self.url) if match: return match.group(1) return None @property def show_url(self): return posixpath.basename(self.url.split("#", 1)[0].split("?", 1)[0]) @property def is_wheel(self): return self.ext == ".whl" @property def is_artifact(self): """ Determines if this points to an actual artifact (e.g. a tarball) or if it points to an "abstract" thing like a path or a VCS location. """ if self.scheme in ["ssh", "git", "hg", "bzr", "sftp", "svn"]: return False return True PK!h&poetry/packages/utils/utils.pyimport os import posixpath import re from poetry.packages.constraints.constraint import Constraint from poetry.packages.constraints.multi_constraint import MultiConstraint from poetry.packages.constraints.union_constraint import UnionConstraint from poetry.semver import Version from poetry.semver import VersionUnion from poetry.version.markers import MarkerUnion from poetry.version.markers import MultiMarker from poetry.version.markers import SingleMarker try: import urllib.parse as urlparse except ImportError: import urlparse try: import urllib.request as urllib2 except ImportError: import urllib2 BZ2_EXTENSIONS = (".tar.bz2", ".tbz") XZ_EXTENSIONS = (".tar.xz", ".txz", ".tlz", ".tar.lz", ".tar.lzma") ZIP_EXTENSIONS = (".zip", ".whl") TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar") ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS try: import bz2 # noqa SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS except ImportError: pass try: # Only for Python 3.3+ import lzma # noqa SUPPORTED_EXTENSIONS += XZ_EXTENSIONS except ImportError: pass def path_to_url(path): """ Convert a path to a file: URL. The path will be made absolute and have quoted path parts. """ path = os.path.normpath(os.path.abspath(path)) url = urlparse.urljoin("file:", urllib2.pathname2url(path)) return url def is_url(name): if ":" not in name: return False scheme = name.split(":", 1)[0].lower() return scheme in [ "http", "https", "file", "ftp", "ssh", "git", "hg", "bzr", "sftp", "svn" "ssh", ] def strip_extras(path): m = re.match(r"^(.+)(\[[^\]]+\])$", path) extras = None if m: path_no_extras = m.group(1) extras = m.group(2) else: path_no_extras = path return path_no_extras, extras def is_installable_dir(path): """Return True if `path` is a directory containing a setup.py file.""" if not os.path.isdir(path): return False setup_py = os.path.join(path, "setup.py") if os.path.isfile(setup_py): return True return False def is_archive_file(name): """Return True if `name` is a considered as an archive file.""" ext = splitext(name)[1].lower() if ext in ARCHIVE_EXTENSIONS: return True return False def splitext(path): """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) if base.lower().endswith(".tar"): ext = base[-4:] + ext base = base[:-4] return base, ext def group_markers(markers, or_=False): groups = [[]] for marker in markers: if or_: groups.append([]) if isinstance(marker, (MultiMarker, MarkerUnion)): groups[-1].append( group_markers(marker.markers, isinstance(marker, MarkerUnion)) ) elif isinstance(marker, SingleMarker): lhs, op, rhs = marker.name, marker.operator, marker.value groups[-1].append((lhs, op, rhs)) return groups def convert_markers(marker): groups = group_markers([marker]) requirements = {} def _group(_groups, or_=False): ors = {} for group in _groups: if isinstance(group, list): _group(group, or_=True) else: variable, op, value = group group_name = str(variable) # python_full_version is equivalent to python_version # for Poetry so we merge them if group_name == "python_full_version": group_name = "python_version" if group_name not in requirements: requirements[group_name] = [] if group_name not in ors: ors[group_name] = or_ if ors[group_name] or not requirements[group_name]: requirements[group_name].append([]) requirements[group_name][-1].append((str(op), str(value))) ors[group_name] = False _group(groups, or_=True) return requirements def create_nested_marker(name, constraint): if constraint.is_any(): return "" if isinstance(constraint, (MultiConstraint, UnionConstraint)): parts = [] for c in constraint.constraints: multi = False if isinstance(c, (MultiConstraint, UnionConstraint)): multi = True parts.append((multi, create_nested_marker(name, c))) glue = " and " if isinstance(constraint, UnionConstraint): parts = ["({})".format(part[1]) if part[0] else part[1] for part in parts] glue = " or " else: parts = [part[1] for part in parts] marker = glue.join(parts) elif isinstance(constraint, Constraint): marker = '{} {} "{}"'.format(name, constraint.operator, constraint.version) elif isinstance(constraint, VersionUnion): parts = [] for c in constraint.ranges: parts.append(create_nested_marker(name, c)) glue = " or " parts = ["({})".format(part) for part in parts] marker = glue.join(parts) elif isinstance(constraint, Version): marker = '{} == "{}"'.format(name, constraint.text) else: if constraint.min is not None: op = ">=" if not constraint.include_min: op = ">" version = constraint.min.text if constraint.max is not None: text = '{} {} "{}"'.format(name, op, version) op = "<=" if not constraint.include_max: op = "<" version = constraint.max text += ' and {} {} "{}"'.format(name, op, version) return text elif constraint.max is not None: op = "<=" if not constraint.include_max: op = "<" version = constraint.max else: return "" marker = '{} {} "{}"'.format(name, op, version) return marker PK!ٺg!poetry/packages/vcs_dependency.pyfrom .dependency import Dependency class VCSDependency(Dependency): """ Represents a VCS dependency """ def __init__( self, name, vcs, source, branch=None, tag=None, rev=None, optional=False ): self._vcs = vcs self._source = source if not any([branch, tag, rev]): # If nothing has been specified, we assume master branch = "master" self._branch = branch self._tag = tag self._rev = rev super(VCSDependency, self).__init__( name, "*", optional=optional, allows_prereleases=True ) @property def vcs(self): return self._vcs @property def source(self): return self._source @property def branch(self): return self._branch @property def tag(self): return self._tag @property def rev(self): return self._rev @property def reference(self): # type: () -> str return self._branch or self._tag or self._rev @property def pretty_constraint(self): # type: () -> str if self._branch: what = "branch" version = self._branch elif self._tag: what = "tag" version = self._tag else: what = "rev" version = self._rev return "{} {}".format(what, version) def is_vcs(self): # type: () -> bool return True def accepts_prereleases(self): return True PK!+""poetry/poetry.pyfrom __future__ import absolute_import from __future__ import unicode_literals import shutil from typing import Dict from typing import List from .__version__ import __version__ from .config import Config from .json import validate_object from .packages import Dependency from .packages import Locker from .packages import Package from .packages import ProjectPackage from .repositories import Pool from .repositories.auth import Auth from .repositories.legacy_repository import LegacyRepository from .repositories.pypi_repository import PyPiRepository from .spdx import license_by_id from .utils._compat import Path from .utils.helpers import get_http_basic_auth from .utils.toml_file import TomlFile class Poetry: VERSION = __version__ def __init__( self, file, # type: Path local_config, # type: dict package, # type: Package locker, # type: Locker ): self._file = TomlFile(file) self._package = package self._local_config = local_config self._locker = locker self._config = Config.create("config.toml") self._auth_config = Config.create("auth.toml") # Configure sources self._pool = Pool() for source in self._local_config.get("source", []): self._pool.add_repository(self.create_legacy_repository(source)) # Always put PyPI last to prefer private repositories self._pool.add_repository(PyPiRepository()) @property def file(self): return self._file @property def package(self): # type: () -> Package return self._package @property def local_config(self): # type: () -> dict return self._local_config @property def locker(self): # type: () -> Locker return self._locker @property def pool(self): # type: () -> Pool return self._pool @property def config(self): # type: () -> Config return self._config @property def auth_config(self): # type: () -> Config return self._auth_config @classmethod def create(cls, cwd): # type: (Path) -> Poetry candidates = [Path(cwd)] candidates.extend(Path(cwd).parents) for path in candidates: poetry_file = path / "pyproject.toml" if poetry_file.exists(): break else: raise RuntimeError( "Poetry could not find a pyproject.toml file in {} or its parents".format( cwd ) ) local_config = TomlFile(poetry_file.as_posix()).read() if "tool" not in local_config or "poetry" not in local_config["tool"]: raise RuntimeError( "[tool.poetry] section not found in {}".format(poetry_file.name) ) local_config = local_config["tool"]["poetry"] # Checking validity cls.check(local_config) # Load package name = local_config["name"] version = local_config["version"] package = ProjectPackage(name, version, version) package.root_dir = poetry_file.parent for author in local_config["authors"]: package.authors.append(author) package.description = local_config.get("description", "") package.homepage = local_config.get("homepage") package.repository_url = local_config.get("repository") package.documentation_url = local_config.get("documentation") try: license_ = license_by_id(local_config.get("license", "")) except ValueError: license_ = None package.license = license_ package.keywords = local_config.get("keywords", []) package.classifiers = local_config.get("classifiers", []) if "readme" in local_config: package.readme = Path(poetry_file.parent) / local_config["readme"] if "platform" in local_config: package.platform = local_config["platform"] if "dependencies" in local_config: for name, constraint in local_config["dependencies"].items(): if name.lower() == "python": package.python_versions = constraint continue if isinstance(constraint, list): for _constraint in constraint: package.add_dependency(name, _constraint) continue package.add_dependency(name, constraint) if "dev-dependencies" in local_config: for name, constraint in local_config["dev-dependencies"].items(): if isinstance(constraint, list): for _constraint in constraint: package.add_dependency(name, _constraint) continue package.add_dependency(name, constraint, category="dev") extras = local_config.get("extras", {}) for extra_name, requirements in extras.items(): package.extras[extra_name] = [] # Checking for dependency for req in requirements: req = Dependency(req, "*") for dep in package.requires: if dep.name == req.name: dep.in_extras.append(extra_name) package.extras[extra_name].append(dep) break if "build" in local_config: package.build = local_config["build"] if "include" in local_config: package.include = local_config["include"] if "exclude" in local_config: package.exclude = local_config["exclude"] if "packages" in local_config: package.packages = local_config["packages"] # Moving lock if necessary (pyproject.lock -> poetry.lock) lock = poetry_file.parent / "poetry.lock" if not lock.exists(): # Checking for pyproject.lock old_lock = poetry_file.with_suffix(".lock") if old_lock.exists(): shutil.move(str(old_lock), str(lock)) locker = Locker(poetry_file.parent / "poetry.lock", local_config) return cls(poetry_file, local_config, package, locker) def create_legacy_repository( self, source ): # type: (Dict[str, str]) -> LegacyRepository if "url" in source: # PyPI-like repository if "name" not in source: raise RuntimeError("Missing [name] in source.") else: raise RuntimeError("Unsupported source specified") name = source["name"] url = source["url"] credentials = get_http_basic_auth(self._auth_config, name) if not credentials: return LegacyRepository(name, url) auth = Auth(url, credentials[0], credentials[1]) return LegacyRepository(name, url, auth=auth) @classmethod def check(cls, config, strict=False): # type: (dict, bool) -> Dict[str, List[str]] """ Checks the validity of a configuration """ result = {"errors": [], "warnings": []} # Schema validation errors validation_errors = validate_object(config, "poetry-schema") result["errors"] += validation_errors if strict: # If strict, check the file more thoroughly # Checking license license = config.get("license") if license: try: license_by_id(license) except ValueError: result["errors"].append("{} is not a valid license".format(license)) if "dependencies" in config: python_versions = config["dependencies"]["python"] if python_versions == "*": result["warnings"].append( "A wildcard Python dependency is ambiguous. " "Consider specifying a more explicit one." ) # Checking for scripts with extras if "scripts" in config: scripts = config["scripts"] for name, script in scripts.items(): if not isinstance(script, dict): continue extras = script["extras"] for extra in extras: if extra not in config["extras"]: result["errors"].append( 'Script "{}" requires extra "{}" which is not defined.'.format( name, extra ) ) return result PK!-;}poetry/puzzle/__init__.pyfrom .solver import Solver PK!#poetry/puzzle/dependencies.pyclass Dependencies: """ Proxy to package dependencies to only require them when needed. """ def __init__(self, package, provider): self._package = package self._provider = provider self._dependencies = None @property def dependencies(self): if self._dependencies is None: self._dependencies = self._get_dependencies() return self._dependencies def _get_dependencies(self): self._provider.debug("Getting dependencies for {}".format(self._package), 0) dependencies = self._provider._dependencies_for(self._package) if dependencies is None: dependencies = [] return dependencies def __len__(self): return len(self.dependencies) def __iter__(self): return self.dependencies.__iter__() def __add__(self, other): return self.dependencies + other __radd__ = __add__ PK!EOG)poetry/puzzle/exceptions.pyclass CompatibilityError(Exception): def __init__(self, *constraints): self._constraints = list(constraints) @property def constraints(self): return self._constraints class SolverProblemError(Exception): def __init__(self, error): self._error = error super(SolverProblemError, self).__init__(str(error)) @property def error(self): return self._error PK!,MYY$poetry/puzzle/operations/__init__.pyfrom .install import Install from .uninstall import Uninstall from .update import Update PK!h9ii#poetry/puzzle/operations/install.pyfrom .operation import Operation class Install(Operation): def __init__(self, package, reason=None): super(Install, self).__init__(reason) self._package = package @property def package(self): return self._package @property def job_type(self): return "install" def __str__(self): return "Installing {} ({})".format( self.package.pretty_name, self.format_version(self.package) ) def __repr__(self): return "".format( self.package.pretty_name, self.format_version(self.package) ) PK!Tk++%poetry/puzzle/operations/operation.py# -*- coding: utf-8 -*- from typing import Union class Operation(object): def __init__(self, reason=None): # type: (Union[str, None]) -> None self._reason = reason self._skipped = False self._skip_reason = None @property def job_type(self): # type: () -> str raise NotImplementedError @property def reason(self): # type: () -> str return self._reason @property def skipped(self): # type: () -> bool return self._skipped @property def skip_reason(self): # type: () -> Union[str, None] return self._skip_reason @property def package(self): raise NotImplementedError() def format_version(self, package): # type: (...) -> str return package.full_pretty_version def skip(self, reason): # type: (str) -> Operation self._skipped = True self._skip_reason = reason return self def unskip(self): # type: () -> Operation self._skipped = False self._skip_reason = None return self PK!^Ntt%poetry/puzzle/operations/uninstall.pyfrom .operation import Operation class Uninstall(Operation): def __init__(self, package, reason=None): super(Uninstall, self).__init__(reason) self._package = package @property def package(self): return self._package @property def job_type(self): return "uninstall" def __str__(self): return "Uninstalling {} ({})".format( self.package.pretty_name, self.format_version(self._package) ) def __repr__(self): return "".format( self.package.pretty_name, self.format_version(self.package) ) PK!Y%{ZZ"poetry/puzzle/operations/update.pyfrom .operation import Operation class Update(Operation): def __init__(self, initial, target, reason=None): self._initial_package = initial self._target_package = target super(Update, self).__init__(reason) @property def initial_package(self): return self._initial_package @property def target_package(self): return self._target_package @property def package(self): return self._target_package @property def job_type(self): return "update" def __str__(self): return "Updating {} ({}) to {} ({})".format( self.initial_package.pretty_name, self.format_version(self.initial_package), self.target_package.pretty_name, self.format_version(self.target_package), ) def __repr__(self): return "".format( self.initial_package.pretty_name, self.format_version(self.initial_package), self.target_package.pretty_name, self.format_version(self.target_package), ) PK!bbpoetry/puzzle/provider.pyimport glob import logging import os import pkginfo import re import time from cleo import ProgressIndicator from contextlib import contextmanager from tempfile import mkdtemp from typing import List from poetry.packages import Dependency from poetry.packages import DependencyPackage from poetry.packages import DirectoryDependency from poetry.packages import FileDependency from poetry.packages import Package from poetry.packages import PackageCollection from poetry.packages import VCSDependency from poetry.packages import dependency_from_pep_508 from poetry.mixology.incompatibility import Incompatibility from poetry.mixology.incompatibility_cause import DependencyCause from poetry.mixology.incompatibility_cause import PythonCause from poetry.mixology.term import Term from poetry.repositories import Pool from poetry.utils._compat import PY35 from poetry.utils._compat import Path from poetry.utils._compat import OrderedDict from poetry.utils.helpers import parse_requires from poetry.utils.helpers import safe_rmtree from poetry.utils.env import Env from poetry.utils.env import EnvCommandError from poetry.utils.setup_reader import SetupReader from poetry.version.markers import MarkerUnion from poetry.vcs.git import Git from .exceptions import CompatibilityError logger = logging.getLogger(__name__) class Indicator(ProgressIndicator): def __init__(self, output): super(Indicator, self).__init__(output) self.format = "%message% (%elapsed:2s%)" @contextmanager def auto(self): message = "Resolving dependencies..." with super(Indicator, self).auto(message, message): yield def _formatter_elapsed(self): elapsed = time.time() - self.start_time return "{:.1f}s".format(elapsed) class Provider: UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"} def __init__(self, package, pool, io): # type: (Package, Pool, ...) -> None self._package = package self._pool = pool self._io = io self._python_constraint = package.python_constraint self._search_for = {} self._is_debugging = self._io.is_debug() or self._io.is_very_verbose() self._in_progress = False @property def pool(self): # type: () -> Pool return self._pool @property def name_for_explicit_dependency_source(self): # type: () -> str return "pyproject.toml" @property def name_for_locking_dependency_source(self): # type: () -> str return "poetry.lock" def is_debugging(self): return self._is_debugging def name_for(self, dependency): # type: (Dependency) -> str """ Returns the name for the given dependency. """ return dependency.name def search_for(self, dependency): # type: (Dependency) -> List[Package] """ Search for the specifications that match the given dependency. The specifications in the returned list will be considered in reverse order, so the latest version ought to be last. """ if dependency.is_root: return PackageCollection(dependency, [self._package]) for constraint in self._search_for.keys(): if ( constraint.name == dependency.name and constraint.constraint.intersect(dependency.constraint) == dependency.constraint ): packages = [ p for p in self._search_for[constraint] if dependency.constraint.allows(p.version) ] packages.sort( key=lambda p: ( not p.is_prerelease() and not dependency.allows_prereleases(), p.version, ), reverse=True, ) return PackageCollection(dependency, packages) if dependency.is_vcs(): packages = self.search_for_vcs(dependency) elif dependency.is_file(): packages = self.search_for_file(dependency) elif dependency.is_directory(): packages = self.search_for_directory(dependency) else: constraint = dependency.constraint packages = self._pool.find_packages( dependency.name, constraint, extras=dependency.extras, allow_prereleases=dependency.allows_prereleases(), ) packages.sort( key=lambda p: ( not p.is_prerelease() and not dependency.allows_prereleases(), p.version, ), reverse=True, ) self._search_for[dependency] = packages return PackageCollection(dependency, packages) def search_for_vcs(self, dependency): # type: (VCSDependency) -> List[Package] """ Search for the specifications that match the given VCS dependency. Basically, we clone the repository in a temporary directory and get the information we need by checking out the specified reference. """ if dependency.vcs != "git": raise ValueError("Unsupported VCS dependency {}".format(dependency.vcs)) tmp_dir = Path(mkdtemp(prefix="pypoetry-git-{}".format(dependency.name))) try: git = Git() git.clone(dependency.source, tmp_dir) git.checkout(dependency.reference, tmp_dir) revision = git.rev_parse(dependency.reference, tmp_dir).strip() if dependency.tag or dependency.rev: revision = dependency.reference directory_dependency = DirectoryDependency( dependency.name, tmp_dir, category=dependency.category, optional=dependency.is_optional(), ) for extra in dependency.extras: directory_dependency.extras.append(extra) package = self.search_for_directory(directory_dependency)[0] package.source_type = "git" package.source_url = dependency.source package.source_reference = revision except Exception: raise finally: safe_rmtree(str(tmp_dir)) return [package] def search_for_file(self, dependency): # type: (FileDependency) -> List[Package] if dependency.path.suffix == ".whl": meta = pkginfo.Wheel(str(dependency.full_path)) else: # Assume sdist meta = pkginfo.SDist(str(dependency.full_path)) if dependency.name != meta.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}".format( dependency.name, meta.name ) ) package = Package(meta.name, meta.version) package.source_type = "file" package.source_url = dependency.path.as_posix() package.description = meta.summary for req in meta.requires_dist: dep = dependency_from_pep_508(req) for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if meta.requires_python: package.python_versions = meta.requires_python package.hashes = [dependency.hash()] for extra in dependency.extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] return [package] def search_for_directory( self, dependency ): # type: (DirectoryDependency) -> List[Package] if dependency.supports_poetry(): from poetry.poetry import Poetry poetry = Poetry.create(dependency.full_path) pkg = poetry.package package = Package(pkg.name, pkg.version) for dep in pkg.requires: if not dep.is_optional(): package.requires.append(dep) for extra, deps in pkg.extras.items(): if extra not in package.extras: package.extras[extra] = [] for dep in deps: package.extras[extra].append(dep) package.python_versions = pkg.python_versions else: # Execute egg_info current_dir = os.getcwd() os.chdir(str(dependency.full_path)) try: cwd = dependency.full_path venv = Env.get(cwd) venv.run("python", "setup.py", "egg_info") except EnvCommandError: result = SetupReader.read_from_directory(dependency.full_path) if not result["name"]: # The name could not be determined # We use the dependency name result["name"] = dependency.name if not result["version"]: # The version could not be determined # so we raise an error since it is mandatory raise RuntimeError( "Unable to retrieve the package version for {}".format( dependency.path ) ) package_name = result["name"] package_version = result["version"] python_requires = result["python_requires"] if python_requires is None: python_requires = "*" package_summary = "" requires = "" for dep in result["install_requires"]: requires += dep + "\n" if result["extras_require"]: requires += "\n" for extra_name, deps in result["extras_require"].items(): requires += "[{}]\n".format(extra_name) for dep in deps: requires += dep + "\n" requires += "\n" reqs = parse_requires(requires) else: os.chdir(current_dir) # Sometimes pathlib will fail on recursive # symbolic links, so we need to workaround it # and use the glob module instead. # Note that this does not happen with pathlib2 # so it's safe to use it for Python < 3.4. if PY35: egg_info = next( Path(p) for p in glob.glob( os.path.join(str(dependency.full_path), "**", "*.egg-info"), recursive=True, ) ) else: egg_info = next(dependency.full_path.glob("**/*.egg-info")) meta = pkginfo.UnpackedSDist(str(egg_info)) package_name = meta.name package_version = meta.version package_summary = meta.summary python_requires = meta.requires_python if meta.requires_dist: reqs = list(meta.requires_dist) else: reqs = [] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: reqs = parse_requires(f.read()) finally: os.chdir(current_dir) package = Package(package_name, package_version) if dependency.name != package.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}".format( dependency.name, package.name ) ) package.description = package_summary for req in reqs: dep = dependency_from_pep_508(req) if dep.in_extras: for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if python_requires: package.python_versions = python_requires package.source_type = "directory" package.source_url = dependency.path.as_posix() for extra in dependency.extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] return [package] def incompatibilities_for( self, package ): # type: (DependencyPackage) -> List[Incompatibility] """ Returns incompatibilities that encapsulate a given package's dependencies, or that it can't be safely selected. If multiple subsequent versions of this package have the same dependencies, this will return incompatibilities that reflect that. It won't return incompatibilities that have already been returned by a previous call to _incompatibilities_for(). """ if package.is_root(): dependencies = package.all_requires else: dependencies = package.requires if not package.python_constraint.allows_all( self._package.python_constraint ): intersection = package.python_constraint.intersect( package.dependency.transitive_python_constraint ) difference = package.dependency.transitive_python_constraint.difference( intersection ) if ( package.dependency.transitive_python_constraint.is_any() or self._package.python_constraint.intersect( package.dependency.python_constraint ).is_empty() or intersection.is_empty() or not difference.is_empty() ): return [ Incompatibility( [Term(package.to_dependency(), True)], PythonCause( package.python_versions, self._package.python_versions ), ) ] dependencies = [ dep for dep in dependencies if dep.name not in self.UNSAFE_PACKAGES and self._package.python_constraint.allows_any(dep.python_constraint) ] return [ Incompatibility( [Term(package.to_dependency(), True), Term(dep, False)], DependencyCause(), ) for dep in dependencies ] def complete_package( self, package ): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() if not package.is_root() and package.source_type not in { "directory", "file", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.requires_extras ), ) dependencies = [ r for r in package.requires if self._package.python_constraint.allows_any(r.python_constraint) ] # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("Duplicate dependencies for {}".format(dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_empty(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug( "Merging requirements for {}".format(str(deps[0])) ) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to enter compatibility mode # which means it will resolve for subsets # Python constraints # # For instance, if our root package requires Python ~2.7 || ^3.6 # And we have one dependency that requires Python <3.6 # and the other Python >=3.6 than the solver will solve # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 # and Python >=3.6,<4.0 python_constraints = [] for constraint, _deps in by_constraint.items(): python_constraints.append(_deps[0].python_versions) _deps = [str(_dep[0]) for _dep in by_constraint.values()] self.debug( "Different requirements found for {}.".format( ", ".join(_deps[:-1]) + " and " + _deps[-1] ) ) raise CompatibilityError(*python_constraints) # Modifying dependencies as needed for dep in dependencies: if not package.dependency.python_constraint.is_any(): dep.transitive_python_versions = str( dep.python_constraint.intersect( package.dependency.python_constraint ) ) if (package.dependency.is_directory() or package.dependency.is_file()) and ( dep.is_directory() or dep.is_file() ): if dep.path.as_posix().startswith(package.source_url): relative = (Path(package.source_url) / dep.path).relative_to( package.source_url ) else: relative = Path(package.source_url) / dep.path # TODO: Improve the way we set the correct relative path for dependencies dep._path = relative package.requires = dependencies return package # UI @property def output(self): return self._io def debug(self, message, depth=0): if not (self.output.is_very_verbose() or self.output.is_debug()): return if message.startswith("fact:"): if "depends on" in message: m = re.match(r"fact: (.+?) depends on (.+?) \((.+?)\)", message) m2 = re.match(r"(.+?) \((.+?)\)", m.group(1)) if m2: name = m2.group(1) version = " ({})".format(m2.group(2)) else: name = m.group(1) version = "" message = ( "fact: {}{} " "depends on {} ({})".format( name, version, m.group(2), m.group(3) ) ) elif " is " in message: message = re.sub( "fact: (.+) is (.+)", "fact: \\1 is \\2", message, ) else: message = re.sub( r"(?<=: )(.+?) \((.+?)\)", "\\1 (\\2)", message, ) message = "fact: {}".format(message.split("fact: ")[1]) elif message.startswith("selecting "): message = re.sub( r"selecting (.+?) \((.+?)\)", "selecting \\1 (\\2)", message, ) elif message.startswith("derived:"): m = re.match(r"derived: (.+?) \((.+?)\)$", message) if m: message = "derived: {} ({})".format( m.group(1), m.group(2) ) else: message = "derived: {}".format( message.split("derived: ")[1] ) elif message.startswith("conflict:"): m = re.match(r"conflict: (.+?) depends on (.+?) \((.+?)\)", message) if m: m2 = re.match(r"(.+?) \((.+?)\)", m.group(1)) if m2: name = m2.group(1) version = " ({})".format(m2.group(2)) else: name = m.group(1) version = "" message = ( "conflict: {}{} " "depends on {} ({})".format( name, version, m.group(2), m.group(3) ) ) else: message = "conflict: {}".format( message.split("conflict: ")[1] ) message = message.replace("! ", "! ") if self.is_debugging(): debug_info = str(message) debug_info = ( "\n".join( [ "{}: {}".format(str(depth).rjust(4), s) for s in debug_info.split("\n") ] ) + "\n" ) self.output.write(debug_info) @contextmanager def progress(self): if not self._io.is_decorated() or self.is_debugging(): self.output.writeln("Resolving dependencies...") yield else: indicator = Indicator(self._io) with indicator.auto(): yield self._in_progress = False PK!2 - -poetry/puzzle/solver.pyimport time from typing import Any from typing import Dict from typing import List from poetry.mixology import resolve_version from poetry.mixology.failure import SolveFailure from poetry.packages import DependencyPackage from poetry.packages import Package from poetry.semver import parse_constraint from poetry.version.markers import AnyMarker from .exceptions import CompatibilityError from .exceptions import SolverProblemError from .operations import Install from .operations import Uninstall from .operations import Update from .operations.operation import Operation from .provider import Provider class Solver: def __init__(self, package, pool, installed, locked, io): self._package = package self._pool = pool self._installed = installed self._locked = locked self._io = io self._provider = Provider(self._package, self._pool, self._io) self._branches = [] def solve(self, use_latest=None): # type: (...) -> List[Operation] with self._provider.progress(): start = time.time() packages, depths = self._solve(use_latest=use_latest) end = time.time() if len(self._branches) > 1: self._provider.debug( "Complete version solving took {:.3f} seconds for {} branches".format( end - start, len(self._branches[1:]) ) ) self._provider.debug( "Resolved for branches: {}".format( ", ".join("({})".format(b) for b in self._branches[1:]) ) ) operations = [] for package in packages: installed = False for pkg in self._installed.packages: if package.name == pkg.name: installed = True if pkg.source_type == "git" and package.source_type == "git": # Trying to find the currently installed version for locked in self._locked.packages: if ( locked.name == pkg.name and locked.source_type == pkg.source_type and locked.source_url == pkg.source_url and locked.source_reference == pkg.source_reference ): pkg = Package(pkg.name, locked.version) pkg.source_type = "git" pkg.source_url = locked.source_url pkg.source_reference = locked.source_reference break if ( pkg.source_url != package.source_url or pkg.source_reference != package.source_reference ): operations.append(Update(pkg, package)) else: operations.append( Install(package).skip("Already installed") ) elif package.version != pkg.version: # Checking version operations.append(Update(pkg, package)) else: operations.append(Install(package).skip("Already installed")) break if not installed: operations.append(Install(package)) # Checking for removals for pkg in self._locked.packages: remove = True for package in packages: if pkg.name == package.name: remove = False break if remove: skip = True for installed in self._installed.packages: if installed.name == pkg.name: skip = False break op = Uninstall(pkg) if skip: op.skip("Not currently installed") operations.append(op) return sorted( operations, key=lambda o: ( o.job_type == "uninstall", # Packages to be uninstalled have no depth so we default to 0 # since it actually doesn't matter since removals are always on top. -depths[packages.index(o.package)] if o.job_type != "uninstall" else 0, o.package.name, o.package.version, ), ) def solve_in_compatibility_mode(self, constraints, use_latest=None): locked = {} for package in self._locked.packages: locked[package.name] = DependencyPackage(package.to_dependency(), package) packages = [] depths = [] for constraint in constraints: constraint = parse_constraint(constraint) intersection = constraint.intersect(self._package.python_constraint) self._provider.debug( "Retrying dependency resolution " "for Python ({}).".format(intersection) ) with self._package.with_python_versions(str(intersection)): _packages, _depths = self._solve(use_latest=use_latest) for index, package in enumerate(_packages): if package not in packages: packages.append(package) depths.append(_depths[index]) continue else: idx = packages.index(package) pkg = packages[idx] depths[idx] = max(depths[idx], _depths[index]) pkg.marker = pkg.marker.union(package.marker) for dep in package.requires: if dep not in pkg.requires: pkg.requires.append(dep) return packages, depths def _solve(self, use_latest=None): self._branches.append(self._package.python_versions) locked = {} for package in self._locked.packages: locked[package.name] = DependencyPackage(package.to_dependency(), package) try: result = resolve_version( self._package, self._provider, locked=locked, use_latest=use_latest ) packages = result.packages except CompatibilityError as e: return self.solve_in_compatibility_mode( e.constraints, use_latest=use_latest ) except SolveFailure as e: raise SolverProblemError(e) graph = self._build_graph(self._package, packages) depths = [] for package in packages: category, optional, marker, depth = self._get_tags_for_package( package, graph ) if marker is None: marker = AnyMarker() package.category = category package.optional = optional package.marker = marker depths.append(depth) return packages, depths def _build_graph( self, package, packages, previous=None, previous_dep=None, dep=None ): # type: (...) -> Dict[str, Any] if not previous: category = "dev" optional = True marker = package.marker else: category = dep.category optional = dep.is_optional() and not dep.is_activated() intersection = previous["marker"].intersect(previous_dep.marker) intersection = intersection.intersect(package.marker) marker = intersection graph = { "name": package.name, "category": category, "optional": optional, "marker": marker, "children": [], # type: List[Dict[str, Any]] } if previous_dep and previous_dep is not dep and previous_dep.name == dep.name: return graph for dependency in package.all_requires: is_activated = True if dependency.is_optional(): if not package.is_root() and ( not previous_dep or not previous_dep.extras ): continue is_activated = False for group, extra_deps in package.extras.items(): if dep: extras = previous_dep.extras elif package.is_root(): extras = package.extras else: extras = [] if group in extras and dependency.name in ( d.name for d in package.extras[group] ): is_activated = True break if previous and previous["name"] == dependency.name: break for pkg in packages: if pkg.name == dependency.name and dependency.constraint.allows( pkg.version ): # If there is already a child with this name # we merge the requirements existing = None for child in graph["children"]: if ( child["name"] == pkg.name and child["category"] == dependency.category ): existing = child continue child_graph = self._build_graph( pkg, packages, graph, dependency, dep or dependency ) if not is_activated: child_graph["optional"] = True if existing: existing["marker"] = existing["marker"].union( child_graph["marker"] ) continue graph["children"].append(child_graph) return graph def _get_tags_for_package(self, package, graph, depth=0): categories = ["dev"] optionals = [True] markers = [] _depths = [0] children = graph["children"] for child in children: if child["name"] == package.name: category = child["category"] optional = child["optional"] marker = child["marker"] _depths.append(depth) else: (category, optional, marker, _depth) = self._get_tags_for_package( package, child, depth=depth + 1 ) _depths.append(_depth) categories.append(category) optionals.append(optional) if marker is not None: markers.append(marker) if "main" in categories: category = "main" else: category = "dev" optional = all(optionals) depth = max(*(_depths + [0])) if not markers: marker = None else: marker = markers[0] for m in markers[1:]: marker = marker.union(m) return category, optional, marker, depth PK!y::poetry/repositories/__init__.pyfrom .pool import Pool from .repository import Repository PK!R%%poetry/repositories/auth.pyfrom requests import Request from requests.auth import AuthBase from requests.auth import HTTPBasicAuth from poetry.utils._compat import urlparse class Auth(AuthBase): def __init__(self, url, username, password): # type: (str, str, str) -> None self._hostname = urlparse.urlparse(url).hostname self._auth = HTTPBasicAuth(username, password) def __call__(self, r): # type: (Request) -> Request if urlparse.urlparse(r.url).hostname != self._hostname: return r self._auth(r) return r PK! uKK&poetry/repositories/base_repository.pyclass BaseRepository(object): SEARCH_FULLTEXT = 0 SEARCH_NAME = 1 def __init__(self): self._packages = [] @property def packages(self): return self._packages def has_package(self, package): raise NotImplementedError() def package(self, name, version, extras=None): raise NotImplementedError() def find_packages( self, name, constraint=None, extras=None, allow_prereleases=False ): raise NotImplementedError() def search(self, query, mode=SEARCH_FULLTEXT): raise NotImplementedError() PK!QtZZ!poetry/repositories/exceptions.pyclass RepositoryError(Exception): pass class PackageNotFound(Exception): pass PK!WVRR+poetry/repositories/installed_repository.pyimport re from poetry.packages import Package from poetry.utils.env import Env from .repository import Repository class InstalledRepository(Repository): @classmethod def load(cls, env): # type: (Env) -> InstalledRepository """ Load installed packages. For now, it uses the pip "freeze" command. """ repo = cls() freeze_output = env.run("pip", "freeze") for line in freeze_output.split("\n"): if "==" in line: name, version = re.split("={2,3}", line) repo.add_package(Package(name, version, version)) elif line.startswith("-e "): line = line[3:].strip() if line.startswith("git+"): url = line.lstrip("git+") if "@" in url: url, rev = url.rsplit("@", 1) else: rev = "master" name = url.split("/")[-1].rstrip(".git") if "#egg=" in rev: rev, name = rev.split("#egg=") package = Package(name, "0.0.0") package.source_type = "git" package.source_url = url package.source_reference = rev repo.add_package(package) return repo PK!] ..(poetry/repositories/legacy_repository.pyimport cgi import re try: import urllib.parse as urlparse except ImportError: import urlparse try: from html import unescape except ImportError: try: from html.parser import HTMLParser except ImportError: from HTMLParser import HTMLParser unescape = HTMLParser().unescape from collections import defaultdict from typing import Generator from typing import Optional from typing import Union import html5lib import requests from cachecontrol import CacheControl from cachecontrol.caches.file_cache import FileCache from cachy import CacheManager import poetry.packages from poetry.locations import CACHE_DIR from poetry.packages import Package from poetry.packages import dependency_from_pep_508 from poetry.packages.utils.link import Link from poetry.semver import parse_constraint from poetry.semver import Version from poetry.semver import VersionConstraint from poetry.semver import VersionRange from poetry.utils._compat import Path from poetry.utils.helpers import canonicalize_name from poetry.utils.patterns import wheel_file_re from poetry.version.markers import InvalidMarker from .auth import Auth from .exceptions import PackageNotFound from .pypi_repository import PyPiRepository class Page: VERSION_REGEX = re.compile(r"(?i)([a-z0-9_\-.]+?)-(?=\d)([a-z0-9_.!+-]+)") SUPPORTED_FORMATS = [ ".tar.gz", ".whl", ".zip", ".tar.bz2", ".tar.xz", ".tar.Z", ".tar", ] def __init__(self, url, content, headers): if not url.endswith("/"): url += "/" self._url = url encoding = None if headers and "Content-Type" in headers: content_type, params = cgi.parse_header(headers["Content-Type"]) if "charset" in params: encoding = params["charset"] self._content = content if encoding is None: self._parsed = html5lib.parse(content, namespaceHTMLElements=False) else: self._parsed = html5lib.parse( content, transport_encoding=encoding, namespaceHTMLElements=False ) @property def versions(self): # type: () -> Generator[Version] seen = set() for link in self.links: version = self.link_version(link) if not version: continue if version in seen: continue seen.add(version) yield version @property def links(self): # type: () -> Generator[Link] for anchor in self._parsed.findall(".//a"): if anchor.get("href"): href = anchor.get("href") url = self.clean_link(urlparse.urljoin(self._url, href)) pyrequire = anchor.get("data-requires-python") pyrequire = unescape(pyrequire) if pyrequire else None link = Link(url, self, requires_python=pyrequire) if link.ext not in self.SUPPORTED_FORMATS: continue yield link def links_for_version(self, version): # type: (Version) -> Generator[Link] for link in self.links: if self.link_version(link) == version: yield link def link_version(self, link): # type: (Link) -> Union[Version, None] m = wheel_file_re.match(link.filename) if m: version = m.group("ver") else: info, ext = link.splitext() match = self.VERSION_REGEX.match(info) if not match: return version = match.group(2) try: version = Version.parse(version) except ValueError: return return version _clean_re = re.compile(r"[^a-z0-9$&+,/:;=?@.#%_\\|-]", re.I) def clean_link(self, url): """Makes sure a link is fully encoded. That is, if a ' ' shows up in the link, it will be rewritten to %20 (while not over-quoting % or other characters).""" return self._clean_re.sub(lambda match: "%%%2x" % ord(match.group(0)), url) class LegacyRepository(PyPiRepository): def __init__( self, name, url, auth=None, disable_cache=False ): # type: (str, str, Optional[Auth], bool) -> None if name == "pypi": raise ValueError("The name [pypi] is reserved for repositories") self._packages = [] self._name = name self._url = url.rstrip("/") self._cache_dir = Path(CACHE_DIR) / "cache" / "repositories" / name self._cache = CacheManager( { "default": "releases", "serializer": "json", "stores": { "releases": {"driver": "file", "path": str(self._cache_dir)}, "packages": {"driver": "dict"}, "matches": {"driver": "dict"}, }, } ) self._session = CacheControl( requests.session(), cache=FileCache(str(self._cache_dir / "_http")) ) url_parts = urlparse.urlparse(self._url) if not url_parts.username and auth: self._session.auth = auth self._disable_cache = disable_cache @property def name(self): return self._name def find_packages( self, name, constraint=None, extras=None, allow_prereleases=False ): packages = [] if constraint is None: constraint = "*" if not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) if isinstance(constraint, VersionRange): if ( constraint.max is not None and constraint.max.is_prerelease() or constraint.min is not None and constraint.min.is_prerelease() ): allow_prereleases = True key = name if not constraint.is_any(): key = "{}:{}".format(key, str(constraint)) if self._cache.store("matches").has(key): versions = self._cache.store("matches").get(key) else: page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: return [] versions = [] for version in page.versions: if version.is_prerelease() and not allow_prereleases: continue if constraint.allows(version): versions.append(version) self._cache.store("matches").put(key, versions, 5) for version in versions: package = Package(name, version) package.source_type = "legacy" package.source_url = self._url if extras is not None: package.requires_extras = extras packages.append(package) self._log( "{} packages found for {} {}".format(len(packages), name, str(constraint)), level="debug", ) return packages def package( self, name, version, extras=None ): # type: (...) -> poetry.packages.Package """ Retrieve the release information. This is a heavy task which takes time. We have to download a package to get the dependencies. We also need to download every file matching this release to get the various hashes. Note that, this will be cached so the subsequent operations should be much faster. """ try: index = self._packages.index( poetry.packages.Package(name, version, version) ) return self._packages[index] except ValueError: if extras is None: extras = [] release_info = self.get_release_info(name, version) package = poetry.packages.Package(name, version, version) if release_info["requires_python"]: package.python_versions = release_info["requires_python"] package.source_type = "legacy" package.source_url = self._url package.source_reference = self.name requires_dist = release_info["requires_dist"] or [] for req in requires_dist: try: dependency = dependency_from_pep_508(req) except InvalidMarker: # Invalid marker # We strip the markers hoping for the best req = req.split(";")[0] dependency = dependency_from_pep_508(req) except ValueError: # Likely unable to parse constraint so we skip it self._log( "Invalid constraint ({}) found in {}-{} dependencies, " "skipping".format(req, package.name, package.version), level="debug", ) continue if dependency.in_extras: for extra in dependency.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dependency) if not dependency.is_optional(): package.requires.append(dependency) # Adding description package.description = release_info.get("summary", "") # Adding hashes information package.hashes = release_info["digests"] # Activate extra dependencies for extra in extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] self._packages.append(package) return package def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: raise PackageNotFound('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": None, "digests": [], } links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( 'No valid distribution links found for package: "{}" version: "{}"'.format( name, version ) ) urls = defaultdict(list) hashes = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar") ): urls["sdist"].append(link.url) hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data def _download(self, url, dest): # type: (str, str) -> None r = self._session.get(url, stream=True) with open(dest, "wb") as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) def _get(self, endpoint): # type: (str) -> Union[Page, None] url = self._url + endpoint response = self._session.get(url) if response.status_code == 404: return return Page(url, response.content, response.headers) PK!^:  poetry/repositories/pool.pyfrom typing import List from typing import Union from .base_repository import BaseRepository from .exceptions import PackageNotFound from .repository import Repository class Pool(BaseRepository): def __init__(self, repositories=None): # type: (Union[list, None]) -> None if repositories is None: repositories = [] self._repositories = [] for repository in repositories: self.add_repository(repository) super(Pool, self).__init__() @property def repositories(self): # type: () -> List[Repository] return self._repositories def add_repository(self, repository): # type: (Repository) -> Pool """ Adds a repository to the pool. """ self._repositories.append(repository) return self def remove_repository(self, repository_name): # type: (str) -> Pool for i, repository in enumerate(self._repositories): if repository.name == repository_name: del self._repositories[i] break return self def has_package(self, package): raise NotImplementedError() def package(self, name, version, extras=None): for repository in self._repositories: try: package = repository.package(name, version, extras=extras) except PackageNotFound: continue if package: self._packages.append(package) return package raise PackageNotFound("Package {} ({}) not found.".format(name, version)) def find_packages( self, name, constraint=None, extras=None, allow_prereleases=False ): for repository in self._repositories: packages = repository.find_packages( name, constraint, extras=extras, allow_prereleases=allow_prereleases ) if packages: return packages return [] def search(self, query, mode=BaseRepository.SEARCH_FULLTEXT): from .legacy_repository import LegacyRepository results = [] for repository in self._repositories: if isinstance(repository, LegacyRepository): continue results += repository.search(query, mode=mode) return results PK!EzzsXsX&poetry/repositories/pypi_repository.pyimport logging import os import platform import tarfile import zipfile import pkginfo from bz2 import BZ2File from collections import defaultdict from gzip import GzipFile from typing import Dict from typing import List from typing import Union try: import urllib.parse as urlparse except ImportError: import urlparse try: from xmlrpc.client import ServerProxy except ImportError: from xmlrpclib import ServerProxy from cachecontrol import CacheControl from cachecontrol.caches.file_cache import FileCache from cachy import CacheManager from requests import get from requests import session from poetry.locations import CACHE_DIR from poetry.packages import dependency_from_pep_508 from poetry.packages import Package from poetry.packages.utils.link import Link from poetry.semver import parse_constraint from poetry.semver import VersionConstraint from poetry.semver import VersionRange from poetry.semver.exceptions import ParseVersionError from poetry.utils._compat import Path from poetry.utils._compat import to_str from poetry.utils.helpers import parse_requires from poetry.utils.helpers import temporary_directory from poetry.utils.patterns import wheel_file_re from poetry.utils.setup_reader import SetupReader from poetry.version.markers import InvalidMarker from poetry.version.markers import parse_marker from .exceptions import PackageNotFound from .repository import Repository logger = logging.getLogger(__name__) class PyPiRepository(Repository): CACHE_VERSION = parse_constraint("0.12.0") def __init__(self, url="https://pypi.org/", disable_cache=False, fallback=True): self._name = "PyPI" self._url = url self._disable_cache = disable_cache self._fallback = fallback release_cache_dir = Path(CACHE_DIR) / "cache" / "repositories" / "pypi" self._cache = CacheManager( { "default": "releases", "serializer": "json", "stores": { "releases": {"driver": "file", "path": str(release_cache_dir)}, "packages": {"driver": "dict"}, }, } ) self._session = CacheControl( session(), cache=FileCache(str(release_cache_dir / "_http")) ) super(PyPiRepository, self).__init__() def find_packages( self, name, # type: str constraint=None, # type: Union[VersionConstraint, str, None] extras=None, # type: Union[list, None] allow_prereleases=False, # type: bool ): # type: (...) -> List[Package] """ Find packages on the remote server. """ if constraint is None: constraint = "*" if not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) if isinstance(constraint, VersionRange): if ( constraint.max is not None and constraint.max.is_prerelease() or constraint.min is not None and constraint.min.is_prerelease() ): allow_prereleases = True info = self.get_package_info(name) packages = [] for version, release in info["releases"].items(): if not release: # Bad release self._log( "No release information found for {}-{}, skipping".format( name, version ), level="debug", ) continue try: package = Package(name, version) except ParseVersionError: self._log( 'Unable to parse version "{}" for the {} package, skipping'.format( version, name ), level="debug", ) continue if package.is_prerelease() and not allow_prereleases: continue if not constraint or (constraint and constraint.allows(package.version)): if extras is not None: package.requires_extras = extras packages.append(package) self._log( "{} packages found for {} {}".format(len(packages), name, str(constraint)), level="debug", ) return packages def package( self, name, # type: str version, # type: str extras=None, # type: (Union[list, None]) ): # type: (...) -> Union[Package, None] if extras is None: extras = [] release_info = self.get_release_info(name, version) package = Package(name, version, version) requires_dist = release_info["requires_dist"] or [] for req in requires_dist: try: dependency = dependency_from_pep_508(req) except InvalidMarker: # Invalid marker # We strip the markers hoping for the best req = req.split(";")[0] dependency = dependency_from_pep_508(req) except ValueError: # Likely unable to parse constraint so we skip it self._log( "Invalid constraint ({}) found in {}-{} dependencies, " "skipping".format(req, package.name, package.version), level="debug", ) continue if dependency.in_extras: for extra in dependency.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dependency) if not dependency.is_optional(): package.requires.append(dependency) # Adding description package.description = release_info.get("summary", "") if release_info["requires_python"]: package.python_versions = release_info["requires_python"] if release_info["platform"]: package.platform = release_info["platform"] # Adding hashes information package.hashes = release_info["digests"] # Activate extra dependencies for extra in extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] return package def search(self, query, mode=0): results = [] search = {"name": query} if mode == self.SEARCH_FULLTEXT: search["summary"] = query client = ServerProxy("https://pypi.python.org/pypi") hits = client.search(search, "or") for hit in hits: result = Package(hit["name"], hit["version"], hit["version"]) result.description = to_str(hit["summary"]) results.append(result) return results def get_package_info(self, name): # type: (str) -> dict """ Return the package information given its name. The information is returned from the cache if it exists or retrieved from the remote server. """ if self._disable_cache: return self._get_package_info(name) return self._cache.store("packages").remember_forever( name, lambda: self._get_package_info(name) ) def _get_package_info(self, name): # type: (str) -> dict data = self._get("pypi/{}/json".format(name)) if data is None: raise PackageNotFound("Package [{}] not found.".format(name)) return data def get_release_info(self, name, version): # type: (str, str) -> dict """ Return the release information given a package name and a version. The information is returned from the cache if it exists or retrieved from the remote server. """ if self._disable_cache: return self._get_release_info(name, version) cached = self._cache.remember_forever( "{}:{}".format(name, version), lambda: self._get_release_info(name, version) ) cache_version = cached.get("_cache_version", "0.0.0") if parse_constraint(cache_version) != self.CACHE_VERSION: # The cache must be updated self._log( "The cache for {} {} is outdated. Refreshing.".format(name, version), level="debug", ) cached = self._get_release_info(name, version) self._cache.forever("{}:{}".format(name, version), cached) return cached def _get_release_info(self, name, version): # type: (str, str) -> dict self._log("Getting info for {} ({}) from PyPI".format(name, version), "debug") json_data = self._get("pypi/{}/{}/json".format(name, version)) if json_data is None: raise PackageNotFound("Package [{}] not found.".format(name)) info = json_data["info"] data = { "name": info["name"], "version": info["version"], "summary": info["summary"], "platform": info["platform"], "requires_dist": info["requires_dist"], "requires_python": info["requires_python"], "digests": [], "_cache_version": str(self.CACHE_VERSION), } try: version_info = json_data["releases"][version] except KeyError: version_info = [] for file_info in version_info: data["digests"].append(file_info["digests"]["sha256"]) if self._fallback and data["requires_dist"] is None: self._log("No dependencies found, downloading archives", level="debug") # No dependencies set (along with other information) # This might be due to actually no dependencies # or badly set metadata when uploading # So, we need to make sure there is actually no # dependencies by introspecting packages urls = defaultdict(list) for url in json_data["urls"]: # Only get sdist and wheels if they exist dist_type = url["packagetype"] if dist_type not in ["sdist", "bdist_wheel"]: continue urls[dist_type].append(url["url"]) if not urls: return data info = self._get_info_from_urls(urls) data["requires_dist"] = info["requires_dist"] if not data["requires_python"]: data["requires_python"] = info["requires_python"] return data def _get(self, endpoint): # type: (str) -> Union[dict, None] json_response = self._session.get(self._url + endpoint) if json_response.status_code == 404: return None json_data = json_response.json() return json_data def _get_info_from_urls( self, urls ): # type: (Dict[str, List[str]]) -> Dict[str, Union[str, List, None]] # Checking wheels first as they are more likely to hold # the necessary information if "bdist_wheel" in urls: # Check fo a universal wheel wheels = urls["bdist_wheel"] universal_wheel = None universal_python2_wheel = None universal_python3_wheel = None platform_specific_wheels = [] for wheel in wheels: link = Link(wheel) m = wheel_file_re.match(link.filename) if not m: continue pyver = m.group("pyver") abi = m.group("abi") plat = m.group("plat") if abi == "none" and plat == "any": # Universal wheel if pyver == "py2.py3": # Any Python universal_wheel = wheel elif pyver == "py2": universal_python2_wheel = wheel else: universal_python3_wheel = wheel else: platform_specific_wheels.append(wheel) if universal_wheel is not None: return self._get_info_from_wheel(universal_wheel) info = {} if universal_python2_wheel and universal_python3_wheel: info = self._get_info_from_wheel(universal_python2_wheel) py3_info = self._get_info_from_wheel(universal_python3_wheel) if py3_info["requires_dist"]: if not info["requires_dist"]: info["requires_dist"] = py3_info["requires_dist"] return info py2_requires_dist = set( dependency_from_pep_508(r).to_pep_508() for r in info["requires_dist"] ) py3_requires_dist = set( dependency_from_pep_508(r).to_pep_508() for r in py3_info["requires_dist"] ) base_requires_dist = py2_requires_dist & py3_requires_dist py2_only_requires_dist = py2_requires_dist - py3_requires_dist py3_only_requires_dist = py3_requires_dist - py2_requires_dist # Normalizing requires_dist requires_dist = list(base_requires_dist) for requirement in py2_only_requires_dist: dep = dependency_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version == '2.7'") ) requires_dist.append(dep.to_pep_508()) for requirement in py3_only_requires_dist: dep = dependency_from_pep_508(requirement) dep.marker = dep.marker.intersect( parse_marker("python_version >= '3'") ) requires_dist.append(dep.to_pep_508()) info["requires_dist"] = sorted(list(set(requires_dist))) if info: return info if platform_specific_wheels and "sdist" not in urls: # Attempt to select the best platform-specific wheel best_wheel = self._pick_platform_specific_wheel( platform_specific_wheels ) return self._get_info_from_wheel(best_wheel) return self._get_info_from_sdist(urls["sdist"][0]) def get_sys_info(self): # type: () -> Dict[str, str] # Return system information. Can be overridden for testing return { "plat": platform.system().lower(), "machine": platform.machine().lower(), "pyver": platform.python_version_tuple(), } def _pick_platform_specific_wheel( self, platform_specific_wheels ): # type: (list) -> str sys_info = self.get_sys_info() # Format the expected platform name as used by package authors os_map = {"windows": "win", "darwin": "macosx"} os_name = ( os_map[sys_info["plat"]] if sys_info["plat"] in os_map else sys_info["plat"] ) machine = sys_info["machine"] if os_name == "win" and machine == "x86": machine = "32" # Fix search string for Windows 32bit systems py_label = "cp{}".format("".join(sys_info["pyver"][:2])) self._log( "Attempting to determine best match for: {}".format(sys_info), level="debug" ) platform_matches = [] for url in platform_specific_wheels: m = wheel_file_re.match(Link(url).filename) plat = m.group("plat") if os_name in plat: match_py = m.group("pyver") == py_label if match_py and (machine in plat or "x86_64" in plat): self._log("Found best wheel match: {}".format(url), level="debug") return url elif match_py: platform_matches.insert(0, url) if len(platform_matches) > 0: # Return first platform match as more specificity couldn't be determined self._log( "Selecting wheel file: {}".format(platform_matches[0]), level="debug" ) return platform_matches[0] # Could not pick the best wheel, return the first available and hope for the best self._log( "Matching failed, selecting wheel file: {}".format(platform_matches[0]), level="debug", ) return platform_specific_wheels[0] def _get_info_from_wheel( self, url ): # type: (str) -> Dict[str, Union[str, List, None]] self._log( "Downloading wheel: {}".format(urlparse.urlparse(url).path.rsplit("/")[-1]), level="debug", ) info = {"summary": "", "requires_python": None, "requires_dist": None} filename = os.path.basename(urlparse.urlparse(url).path.rsplit("/")[-1]) with temporary_directory() as temp_dir: filepath = os.path.join(temp_dir, filename) self._download(url, filepath) try: meta = pkginfo.Wheel(filepath) except ValueError: # Unable to determine dependencies # Assume none return info if meta.summary: info["summary"] = meta.summary or "" info["requires_python"] = meta.requires_python if meta.requires_dist: info["requires_dist"] = meta.requires_dist return info def _get_info_from_sdist( self, url ): # type: (str) -> Dict[str, Union[str, List, None]] self._log( "Downloading sdist: {}".format(urlparse.urlparse(url).path.rsplit("/")[-1]), level="debug", ) info = {"summary": "", "requires_python": None, "requires_dist": None} filename = os.path.basename(urlparse.urlparse(url).path) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) try: meta = pkginfo.SDist(str(filepath)) if meta.summary: info["summary"] = meta.summary if meta.requires_python: info["requires_python"] = meta.requires_python if meta.requires_dist: info["requires_dist"] = list(meta.requires_dist) return info except ValueError: # Unable to determine dependencies # We pass and go deeper pass # Still not dependencies found # So, we unpack and introspect suffix = filepath.suffix gz = None if suffix == ".zip": tar = zipfile.ZipFile(str(filepath)) else: if suffix == ".bz2": gz = BZ2File(str(filepath)) suffixes = filepath.suffixes if len(suffixes) > 1 and suffixes[-2] == ".tar": suffix = ".tar.bz2" else: gz = GzipFile(str(filepath)) suffix = ".tar.gz" tar = tarfile.TarFile(str(filepath), fileobj=gz) try: tar.extractall(os.path.join(temp_dir, "unpacked")) finally: if gz: gz.close() tar.close() unpacked = Path(temp_dir) / "unpacked" sdist_dir = unpacked / Path(filename).name.rstrip(suffix) # Checking for .egg-info at root eggs = list(sdist_dir.glob("*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: info["requires_dist"] = parse_requires(f.read()) return info # Searching for .egg-info in sub directories eggs = list(sdist_dir.glob("**/*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: info["requires_dist"] = parse_requires(f.read()) return info # Still nothing, try reading (without executing it) # the setup.py file. try: setup_info = self._inspect_sdist_with_setup(sdist_dir) for key, value in info.items(): if value: continue info[key] = setup_info[key] return info except Exception as e: self._log( "An error occurred when reading setup.py or setup.cfg: {}".format( str(e) ), "warning", ) return info def _inspect_sdist_with_setup(self, sdist_dir): info = {"requires_python": None, "requires_dist": None} result = SetupReader.read_from_directory(sdist_dir) requires = "" for dep in result["install_requires"]: requires += dep + "\n" if result["extras_require"]: requires += "\n" for extra_name, deps in result["extras_require"].items(): requires += "[{}]\n".format(extra_name) for dep in deps: requires += dep + "\n" requires += "\n" info["requires_dist"] = parse_requires(requires) info["requires_python"] = result["python_requires"] return info def _download(self, url, dest): # type: (str, str) -> None r = get(url, stream=True) r.raise_for_status() with open(dest, "wb") as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) def _log(self, msg, level="info"): getattr(logger, level)("{}: {}".format(self._name, msg)) PK!m( # # !poetry/repositories/repository.pyfrom poetry.semver import parse_constraint from poetry.semver import VersionConstraint from poetry.semver import VersionRange from .base_repository import BaseRepository class Repository(BaseRepository): def __init__(self, packages=None): super(Repository, self).__init__() if packages is None: packages = [] for package in packages: self.add_package(package) def package(self, name, version, extras=None): name = name.lower() if extras is None: extras = [] for package in self.packages: if name == package.name and package.version.text == version: # Activate extra dependencies for extra in extras: if extra in package.extras: for extra_dep in package.extras[extra]: for dep in package.requires: if dep.name == extra_dep.name: dep.activate() return package.clone() def find_packages( self, name, constraint=None, extras=None, allow_prereleases=False ): name = name.lower() packages = [] if extras is None: extras = [] if constraint is None: constraint = "*" if not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) if isinstance(constraint, VersionRange): if ( constraint.max is not None and constraint.max.is_prerelease() or constraint.min is not None and constraint.min.is_prerelease() ): allow_prereleases = True for package in self.packages: if name == package.name: if package.is_prerelease() and not allow_prereleases: continue if constraint.allows(package.version): for dep in package.requires: for extra in extras: if extra not in package.extras: continue reqs = package.extras[extra] for req in reqs: if req.name == dep.name: dep.activate() packages.append(package) return packages def has_package(self, package): package_id = package.unique_name for repo_package in self.packages: if package_id == repo_package.unique_name: return True return False def add_package(self, package): self._packages.append(package) def remove_package(self, package): package_id = package.unique_name index = None for i, repo_package in enumerate(self.packages): if package_id == repo_package.unique_name: index = i break if index is not None: del self._packages[index] def search(self, query, mode=0): results = [] for package in self.packages: if query in package.name: results.append(package) return results def __len__(self): return len(self._packages) PK!6poetry/semver/__init__.pyimport re from .empty_constraint import EmptyConstraint from .patterns import BASIC_CONSTRAINT from .patterns import CARET_CONSTRAINT from .patterns import TILDE_CONSTRAINT from .patterns import TILDE_PEP440_CONSTRAINT from .patterns import X_CONSTRAINT from .version import Version from .version_constraint import VersionConstraint from .version_range import VersionRange from .version_union import VersionUnion def parse_constraint(constraints): # type: (str) -> VersionConstraint if constraints == "*": return VersionRange() or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip()) or_groups = [] for constraints in or_constraints: and_constraints = re.split( "(?< ,]) *(? 1: for constraint in and_constraints: constraint_objects.append(parse_single_constraint(constraint)) else: constraint_objects.append(parse_single_constraint(and_constraints[0])) if len(constraint_objects) == 1: constraint = constraint_objects[0] else: constraint = constraint_objects[0] for next_constraint in constraint_objects[1:]: constraint = constraint.intersect(next_constraint) or_groups.append(constraint) if len(or_groups) == 1: return or_groups[0] else: return VersionUnion.of(*or_groups) def parse_single_constraint(constraint): # type: (str) -> VersionConstraint m = re.match(r"(?i)^v?[xX*](\.[xX*])*$", constraint) if m: return VersionRange() # Tilde range m = TILDE_CONSTRAINT.match(constraint) if m: version = Version.parse(m.group(1)) high = version.stable.next_minor if len(m.group(1).split(".")) == 1: high = version.stable.next_major return VersionRange( version, high, include_min=True, always_include_max_prerelease=True ) # PEP 440 Tilde range (~=) m = TILDE_PEP440_CONSTRAINT.match(constraint) if m: precision = 1 if m.group(3): precision += 1 if m.group(4): precision += 1 version = Version.parse(m.group(1)) if precision == 2: low = version high = version.stable.next_major else: low = Version(version.major, version.minor, 0) high = version.stable.next_minor return VersionRange( low, high, include_min=True, always_include_max_prerelease=True ) # Caret range m = CARET_CONSTRAINT.match(constraint) if m: version = Version.parse(m.group(1)) return VersionRange( version, version.next_breaking, include_min=True, always_include_max_prerelease=True, ) # X Range m = X_CONSTRAINT.match(constraint) if m: op = m.group(1) major = int(m.group(2)) minor = m.group(3) if minor is not None: version = Version(major, int(minor), 0) result = VersionRange( version, version.next_minor, include_min=True, always_include_max_prerelease=True, ) else: if major == 0: result = VersionRange(max=Version(1, 0, 0)) else: version = Version(major, 0, 0) result = VersionRange( version, version.next_major, include_min=True, always_include_max_prerelease=True, ) if op == "!=": result = VersionRange().difference(result) return result # Basic comparator m = BASIC_CONSTRAINT.match(constraint) if m: op = m.group(1) version = m.group(2) if version == "dev": version = "0.0-dev" try: version = Version.parse(version) except ValueError: raise ValueError( "Could not parse version constraint: {}".format(constraint) ) if op == "<": return VersionRange(max=version) elif op == "<=": return VersionRange(max=version, include_max=True) elif op == ">": return VersionRange(min=version) elif op == ">=": return VersionRange(min=version, include_min=True) elif op == "!=": return VersionUnion(VersionRange(max=version), VersionRange(min=version)) else: return version raise ValueError("Could not parse version constraint: {}".format(constraint)) PK!Du-22!poetry/semver/empty_constraint.pyfrom .version_constraint import VersionConstraint class EmptyConstraint(VersionConstraint): def is_empty(self): return True def is_any(self): return False def allows(self, version): return False def allows_all(self, other): return other.is_empty() def allows_any(self, other): return False def intersect(self, other): return self def union(self, other): return other def difference(self, other): return self def __str__(self): return "" PK!<څ..poetry/semver/exceptions.pyclass ParseVersionError(ValueError): pass PK!^[poetry/semver/patterns.pyimport re MODIFIERS = ( "[._-]?" r"((?!post)(?:beta|b|c|pre|RC|alpha|a|patch|pl|p|dev)(?:(?:[.-]?\d+)*)?)?" r"([+-]?([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?" ) _COMPLETE_VERSION = r"v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.(\d+))?{}(?:\+[^\s]+)?".format( MODIFIERS ) COMPLETE_VERSION = re.compile("(?i)" + _COMPLETE_VERSION) CARET_CONSTRAINT = re.compile(r"(?i)^\^({})$".format(_COMPLETE_VERSION)) TILDE_CONSTRAINT = re.compile("(?i)^~(?!=)({})$".format(_COMPLETE_VERSION)) TILDE_PEP440_CONSTRAINT = re.compile("(?i)^~=({})$".format(_COMPLETE_VERSION)) X_CONSTRAINT = re.compile(r"^(!=|==)?\s*v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.[xX*])+$") BASIC_CONSTRAINT = re.compile( r"(?i)^(<>|!=|>=?|<=?|==?)?\s*({}|dev)".format(_COMPLETE_VERSION) ) PK!H^i//poetry/semver/version.pyimport re from typing import List from typing import Union from .empty_constraint import EmptyConstraint from .exceptions import ParseVersionError from .patterns import COMPLETE_VERSION from .version_constraint import VersionConstraint from .version_range import VersionRange from .version_union import VersionUnion class Version(VersionRange): """ A parsed semantic version number. """ def __init__( self, major, # type: int minor=None, # type: Union[int, None] patch=None, # type: Union[int, None] rest=None, # type: Union[int, None] pre=None, # type: Union[str, None] build=None, # type: Union[str, None] text=None, # type: Union[str, None] precision=None, # type: Union[int, None] ): # type: () -> None self._major = int(major) self._precision = None if precision is None: self._precision = 1 if minor is None: minor = 0 else: if self._precision is not None: self._precision += 1 self._minor = int(minor) if patch is None: patch = 0 else: if self._precision is not None: self._precision += 1 if rest is None: rest = 0 else: if self._precision is not None: self._precision += 1 if precision is not None: self._precision = precision self._patch = int(patch) self._rest = int(rest) if text is None: parts = [str(major)] if self._precision >= 2 or minor != 0: parts.append(str(minor)) if self._precision >= 3 or patch != 0: parts.append(str(patch)) if self._precision >= 4 or rest != 0: parts.append(str(rest)) text = ".".join(parts) if pre: text += "-{}".format(pre) if build: text += "+{}".format(build) self._text = text pre = self._normalize_prerelease(pre) self._prerelease = [] if pre is not None: self._prerelease = self._split_parts(pre) build = self._normalize_build(build) self._build = [] if build is not None: if build.startswith(("-", "+")): build = build[1:] self._build = self._split_parts(build) @property def major(self): # type: () -> int return self._major @property def minor(self): # type: () -> int return self._minor @property def patch(self): # type: () -> int return self._patch @property def rest(self): # type: () -> int return self._rest @property def prerelease(self): # type: () -> List[str] return self._prerelease @property def build(self): # type: () -> List[str] return self._build @property def text(self): return self._text @property def precision(self): # type: () -> int return self._precision @property def stable(self): if not self.is_prerelease(): return self return self.next_patch @property def next_major(self): # type: () -> Version if self.is_prerelease() and self.minor == 0 and self.patch == 0: return Version(self.major, self.minor, self.patch) return self._increment_major() @property def next_minor(self): # type: () -> Version if self.is_prerelease() and self.patch == 0: return Version(self.major, self.minor, self.patch) return self._increment_minor() @property def next_patch(self): # type: () -> Version if self.is_prerelease(): return Version(self.major, self.minor, self.patch) return self._increment_patch() @property def next_breaking(self): # type: () -> Version if self.major == 0: if self.minor != 0: return self._increment_minor() if self._precision == 1: return self._increment_major() elif self._precision == 2: return self._increment_minor() return self._increment_patch() return self._increment_major() @property def first_prerelease(self): # type: () -> Version return Version.parse( "{}.{}.{}-alpha.0".format(self.major, self.minor, self.patch) ) @property def min(self): return self @property def max(self): return self @property def full_max(self): return self @property def include_min(self): return True @property def include_max(self): return True @classmethod def parse(cls, text): # type: (str) -> Version match = COMPLETE_VERSION.match(text) if match is None: raise ParseVersionError('Unable to parse "{}".'.format(text)) text = text.rstrip(".") major = int(match.group(1)) minor = int(match.group(2)) if match.group(2) else None patch = int(match.group(3)) if match.group(3) else None rest = int(match.group(4)) if match.group(4) else None pre = match.group(5) build = match.group(6) if build: build = build.lstrip("+") return Version(major, minor, patch, rest, pre, build, text) def is_any(self): return False def is_empty(self): return False def is_prerelease(self): # type: () -> bool return len(self._prerelease) > 0 def allows(self, version): # type: (Version) -> bool return self == version def allows_all(self, other): # type: (VersionConstraint) -> bool return other.is_empty() or other == self def allows_any(self, other): # type: (VersionConstraint) -> bool return other.allows(self) def intersect(self, other): # type: (VersionConstraint) -> VersionConstraint if other.allows(self): return self return EmptyConstraint() def union(self, other): # type: (VersionConstraint) -> VersionConstraint from .version_range import VersionRange if other.allows(self): return other if isinstance(other, VersionRange): if other.min == self: return VersionRange( other.min, other.max, include_min=True, include_max=other.include_max, ) if other.max == self: return VersionRange( other.min, other.max, include_min=other.include_min, include_max=True, ) return VersionUnion.of(self, other) def difference(self, other): # type: (VersionConstraint) -> VersionConstraint if other.allows(self): return EmptyConstraint() return self def equals_without_prerelease(self, other): # type: (Version) -> bool return ( self.major == other.major and self.minor == other.minor and self.patch == other.patch ) def _increment_major(self): # type: () -> Version return Version(self.major + 1, 0, 0, precision=self._precision) def _increment_minor(self): # type: () -> Version return Version(self.major, self.minor + 1, 0, precision=self._precision) def _increment_patch(self): # type: () -> Version return Version( self.major, self.minor, self.patch + 1, precision=self._precision ) def _normalize_prerelease(self, pre): # type: (str) -> str if not pre: return m = re.match(r"(?i)^(a|alpha|b|beta|c|pre|rc|dev)[-.]?(\d+)?$", pre) if not m: return modifier = m.group(1) number = m.group(2) if number is None: number = 0 if modifier == "a": modifier = "alpha" elif modifier == "b": modifier = "beta" elif modifier in {"c", "pre"}: modifier = "rc" elif modifier == "dev": modifier = "alpha" return "{}.{}".format(modifier, number) def _normalize_build(self, build): # type: (str) -> str if not build: return if build.startswith("post"): build = build.lstrip("post") if not build: return return build def _split_parts(self, text): # type: (str) -> List[Union[str, int]] parts = text.split(".") for i, part in enumerate(parts): try: parts[i] = int(part) except (TypeError, ValueError): continue return parts def __lt__(self, other): return self._cmp(other) < 0 def __le__(self, other): return self._cmp(other) <= 0 def __gt__(self, other): return self._cmp(other) > 0 def __ge__(self, other): return self._cmp(other) >= 0 def _cmp(self, other): if not isinstance(other, VersionConstraint): return NotImplemented if not isinstance(other, Version): return -other._cmp(self) if self.major != other.major: return self._cmp_parts(self.major, other.major) if self.minor != other.minor: return self._cmp_parts(self.minor, other.minor) if self.patch != other.patch: return self._cmp_parts(self.patch, other.patch) if self.rest != other.rest: return self._cmp_parts(self.rest, other.rest) # Pre-releases always come before no pre-release string. if not self.is_prerelease() and other.is_prerelease(): return 1 if not other.is_prerelease() and self.is_prerelease(): return -1 comparison = self._cmp_lists(self.prerelease, other.prerelease) if comparison != 0: return comparison # Builds always come after no build string. if not self.build and other.build: return -1 if not other.build and self.build: return 1 return self._cmp_lists(self.build, other.build) def _cmp_parts(self, a, b): if a < b: return -1 elif a > b: return 1 return 0 def _cmp_lists(self, a, b): # type: (List, List) -> int for i in range(max(len(a), len(b))): a_part = None if i < len(a): a_part = a[i] b_part = None if i < len(b): b_part = b[i] if a_part == b_part: continue # Missing parts come after present ones. if a_part is None: return -1 if b_part is None: return 1 if isinstance(a_part, int): if isinstance(b_part, int): return self._cmp_parts(a_part, b_part) return -1 else: if isinstance(b_part, int): return 1 return self._cmp_parts(a_part, b_part) return 0 def __eq__(self, other): # type: (Version) -> bool if not isinstance(other, Version): return NotImplemented return ( self._major == other.major and self._minor == other.minor and self._patch == other.patch and self._rest == other.rest and self._prerelease == other.prerelease and self._build == other.build ) def __ne__(self, other): return not self == other def __str__(self): return self._text def __repr__(self): return "".format(str(self)) def __hash__(self): return hash( ( self.major, self.minor, self.patch, ".".join(str(p) for p in self.prerelease), ".".join(str(p) for p in self.build), ) ) PK!#OO#poetry/semver/version_constraint.pyclass VersionConstraint: def is_empty(self): # type: () -> bool raise NotImplementedError() def is_any(self): # type: () -> bool raise NotImplementedError() def allows(self, version): # type: (Version) -> bool raise NotImplementedError() def allows_all(self, other): # type: (VersionConstraint) -> bool raise NotImplementedError() def allows_any(self, other): # type: (VersionConstraint) -> bool raise NotImplementedError() def intersect(self, other): # type: (VersionConstraint) -> VersionConstraint raise NotImplementedError() def union(self, other): # type: (VersionConstraint) -> VersionConstraint raise NotImplementedError() def difference(self, other): # type: (VersionConstraint) -> VersionConstraint raise NotImplementedError() PK!Ì44poetry/semver/version_range.pyfrom .empty_constraint import EmptyConstraint from .version_constraint import VersionConstraint from .version_union import VersionUnion class VersionRange(VersionConstraint): def __init__( self, min=None, max=None, include_min=False, include_max=False, always_include_max_prerelease=False, ): full_max = max if ( always_include_max_prerelease and not include_max and not full_max.is_prerelease() and not full_max.build and ( min is None or not min.is_prerelease() or not min.equals_without_prerelease(full_max) ) ): full_max = full_max.first_prerelease self._min = min self._max = max self._full_max = full_max self._include_min = include_min self._include_max = include_max @property def min(self): return self._min @property def max(self): return self._max @property def full_max(self): return self._full_max @property def include_min(self): return self._include_min @property def include_max(self): return self._include_max def is_empty(self): return False def is_any(self): return self._min is None and self._max is None def allows(self, other): # type: (Version) -> bool if self._min is not None: if other < self._min: return False if not self._include_min and other == self._min: return False if self._max is not None: if other > self._max: return False if not self._include_max and other == self._max: return False return True def allows_all(self, other): # type: (VersionConstraint) -> bool from .version import Version if other.is_empty(): return True if isinstance(other, Version): return self.allows(other) if isinstance(other, VersionUnion): return all([self.allows_all(constraint) for constraint in other.ranges]) if isinstance(other, VersionRange): return not other.allows_lower(self) and not other.allows_higher(self) raise ValueError("Unknown VersionConstraint type {}.".format(other)) def allows_any(self, other): # type: (VersionConstraint) -> bool from .version import Version if other.is_empty(): return False if isinstance(other, Version): return self.allows(other) if isinstance(other, VersionUnion): return any([self.allows_any(constraint) for constraint in other.ranges]) if isinstance(other, VersionRange): return not other.is_strictly_lower(self) and not other.is_strictly_higher( self ) raise ValueError("Unknown VersionConstraint type {}.".format(other)) def intersect(self, other): # type: (VersionConstraint) -> VersionConstraint from .version import Version if other.is_empty(): return other if isinstance(other, VersionUnion): return other.intersect(self) # A range and a Version just yields the version if it's in the range. if isinstance(other, Version): if self.allows(other): return other return EmptyConstraint() if not isinstance(other, VersionRange): raise ValueError("Unknown VersionConstraint type {}.".format(other)) if self.allows_lower(other): if self.is_strictly_lower(other): return EmptyConstraint() intersect_min = other.min intersect_include_min = other.include_min else: if other.is_strictly_lower(self): return EmptyConstraint() intersect_min = self._min intersect_include_min = self._include_min if self.allows_higher(other): intersect_max = other.max intersect_include_max = other.include_max else: intersect_max = self._max intersect_include_max = self._include_max if intersect_min is None and intersect_max is None: return VersionRange() # If the range is just a single version. if intersect_min == intersect_max: # Because we already verified that the lower range isn't strictly # lower, there must be some overlap. assert intersect_include_min and intersect_include_max return intersect_min # If we got here, there is an actual range. return VersionRange( intersect_min, intersect_max, intersect_include_min, intersect_include_max ) def union(self, other): # type: (VersionConstraint) -> VersionConstraint from .version import Version if isinstance(other, Version): if self.allows(other): return self if other == self.min: return VersionRange( self.min, self.max, include_min=True, include_max=self.include_max ) if other == self.max: return VersionRange( self.min, self.max, include_min=self.include_min, include_max=True ) return VersionUnion.of(self, other) if isinstance(other, VersionRange): # If the two ranges don't overlap, we won't be able to create a single # VersionRange for both of them. edges_touch = ( self.max == other.min and (self.include_max or other.include_min) ) or (self.min == other.max and (self.include_min or other.include_max)) if not edges_touch and not self.allows_any(other): return VersionUnion.of(self, other) if self.allows_lower(other): union_min = self.min union_include_min = self.include_min else: union_min = other.min union_include_min = other.include_min if self.allows_higher(other): union_max = self.max union_include_max = self.include_max else: union_max = other.max union_include_max = other.include_max return VersionRange( union_min, union_max, include_min=union_include_min, include_max=union_include_max, ) return VersionUnion.of(self, other) def difference(self, other): # type: (VersionConstraint) -> VersionConstraint from .version import Version if other.is_empty(): return self if isinstance(other, Version): if not self.allows(other): return self if other == self.min: if not self.include_min: return self return VersionRange(self.min, self.max, False, self.include_max) if other == self.max: if not self.include_max: return self return VersionRange(self.min, self.max, self.include_min, False) return VersionUnion.of( VersionRange(self.min, other, self.include_min, False), VersionRange(other, self.max, False, self.include_max), ) elif isinstance(other, VersionRange): if not self.allows_any(other): return self if not self.allows_lower(other): before = None elif self.min == other.min: before = self.min else: before = VersionRange( self.min, other.min, self.include_min, not other.include_min ) if not self.allows_higher(other): after = None elif self.max == other.max: after = self.max else: after = VersionRange( other.max, self.max, not other.include_max, self.include_max ) if before is None and after is None: return EmptyConstraint() if before is None: return after if after is None: return before return VersionUnion.of(before, after) elif isinstance(other, VersionUnion): ranges = [] # type: List[VersionRange] current = self for range in other.ranges: # Skip any ranges that are strictly lower than [current]. if range.is_strictly_lower(current): continue # If we reach a range strictly higher than [current], no more ranges # will be relevant so we can bail early. if range.is_strictly_higher(current): break difference = current.difference(range) if difference.is_empty(): return EmptyConstraint() elif isinstance(difference, VersionUnion): # If [range] split [current] in half, we only need to continue # checking future ranges against the latter half. ranges.append(difference.ranges[0]) current = difference.ranges[-1] else: current = difference if not ranges: return current return VersionUnion.of(*(ranges + [current])) raise ValueError("Unknown VersionConstraint type {}.".format(other)) def allows_lower(self, other): # type: (VersionRange) -> bool if self.min is None: return other.min is not None if other.min is None: return False if self.min < other.min: return True if self.min > other.min: return False return self.include_min and not other.include_min def allows_higher(self, other): # type: (VersionRange) -> bool if self.max is None: return other.max is not None if other.max is None: return False if self.max < other.max: return False if self.max > other.max: return True return self.include_max and not other.include_max def is_strictly_lower(self, other): # type: (VersionRange) -> bool if self.max is None or other.min is None: return False if self.full_max < other.min: return True if self.full_max > other.min: return False return not self.include_max or not other.include_min def is_strictly_higher(self, other): # type: (VersionRange) -> bool return other.is_strictly_lower(self) def is_adjacent_to(self, other): # type: (VersionRange) -> bool if self.max != other.min: return False return ( self.include_max and not other.include_min or not self.include_max and other.include_min ) def __eq__(self, other): if not isinstance(other, VersionRange): return False return ( self._min == other.min and self._max == other.max and self._include_min == other.include_min and self._include_max == other.include_max ) def __lt__(self, other): return self._cmp(other) < 0 def __le__(self, other): return self._cmp(other) <= 0 def __gt__(self, other): return self._cmp(other) > 0 def __ge__(self, other): return self._cmp(other) >= 0 def _cmp(self, other): # type: (VersionRange) -> int if self.min is None: if other.min is None: return self._compare_max(other) return -1 elif other.min is None: return 1 result = self.min._cmp(other.min) if result != 0: return result if self.include_min != other.include_min: return -1 if self.include_min else 1 return self._compare_max(other) def _compare_max(self, other): # type: (VersionRange) -> int if self.max is None: if other.max is None: return 0 return 1 elif other.max is None: return -1 result = self.max._cmp(other.max) if result != 0: return result if self.include_max != other.include_max: return 1 if self.include_max else -1 return 0 def __str__(self): text = "" if self.min is not None: text += ">=" if self.include_min else ">" text += self.min.text if self.max is not None: if self.min is not None: text += "," text += "{}{}".format("<=" if self.include_max else "<", self.max.text) if self.min is None and self.max is None: return "*" return text def __repr__(self): return "".format(str(self)) def __hash__(self): return hash((self.min, self.max, self.include_min, self.include_max)) PK!gLpoetry/semver/version_union.pyfrom .empty_constraint import EmptyConstraint from .version_constraint import VersionConstraint class VersionUnion(VersionConstraint): """ A version constraint representing a union of multiple disjoint version ranges. An instance of this will only be created if the version can't be represented as a non-compound value. """ def __init__(self, *ranges): self._ranges = list(ranges) @property def ranges(self): return self._ranges @classmethod def of(cls, *ranges): from .version_range import VersionRange flattened = [] for constraint in ranges: if constraint.is_empty(): continue if isinstance(constraint, VersionUnion): flattened += constraint.ranges continue flattened.append(constraint) if not flattened: return EmptyConstraint() if any([constraint.is_any() for constraint in flattened]): return VersionRange() # Only allow Versions and VersionRanges here so we can more easily reason # about everything in flattened. _EmptyVersions and VersionUnions are # filtered out above. for constraint in flattened: if isinstance(constraint, VersionRange): continue raise ValueError("Unknown VersionConstraint type {}.".format(constraint)) flattened.sort() merged = [] for constraint in flattened: # Merge this constraint with the previous one, but only if they touch. if not merged or ( not merged[-1].allows_any(constraint) and not merged[-1].is_adjacent_to(constraint) ): merged.append(constraint) else: merged[-1] = merged[-1].union(constraint) if len(merged) == 1: return merged[0] return VersionUnion(*merged) def is_empty(self): return False def is_any(self): return False def allows(self, version): # type: (Version) -> bool return any([constraint.allows(version) for constraint in self._ranges]) def allows_all(self, other): # type: (VersionConstraint) -> bool our_ranges = iter(self._ranges) their_ranges = iter(self._ranges_for(other)) our_current_range = next(our_ranges, None) their_current_range = next(their_ranges, None) while our_current_range and their_current_range: if our_current_range.allows_all(their_current_range): their_current_range = next(their_ranges, None) else: our_current_range = next(our_ranges, None) return their_current_range is None def allows_any(self, other): # type: (VersionConstraint) -> bool our_ranges = iter(self._ranges) their_ranges = iter(self._ranges_for(other)) our_current_range = next(our_ranges, None) their_current_range = next(their_ranges, None) while our_current_range and their_current_range: if our_current_range.allows_any(their_current_range): return True if their_current_range.allows_higher(our_current_range): our_current_range = next(our_ranges, None) else: their_current_range = next(their_ranges, None) return False def intersect(self, other): # type: (VersionConstraint) -> VersionConstraint our_ranges = iter(self._ranges) their_ranges = iter(self._ranges_for(other)) new_ranges = [] our_current_range = next(our_ranges, None) their_current_range = next(their_ranges, None) while our_current_range and their_current_range: intersection = our_current_range.intersect(their_current_range) if not intersection.is_empty(): new_ranges.append(intersection) if their_current_range.allows_higher(our_current_range): our_current_range = next(our_ranges, None) else: their_current_range = next(their_ranges, None) return VersionUnion.of(*new_ranges) def union(self, other): # type: (VersionConstraint) -> VersionConstraint return VersionUnion.of(self, other) def difference(self, other): # type: (VersionConstraint) -> VersionConstraint our_ranges = iter(self._ranges) their_ranges = iter(self._ranges_for(other)) new_ranges = [] state = { "current": next(our_ranges, None), "their_range": next(their_ranges, None), } def their_next_range(): state["their_range"] = next(their_ranges, None) if state["their_range"]: return True new_ranges.append(state["current"]) our_current = next(our_ranges, None) while our_current: new_ranges.append(our_current) our_current = next(our_ranges, None) return False def our_next_range(include_current=True): if include_current: new_ranges.append(state["current"]) our_current = next(our_ranges, None) if not our_current: return False state["current"] = our_current return True while True: if state["their_range"] is None: break if state["their_range"].is_strictly_lower(state["current"]): if not their_next_range(): break continue if state["their_range"].is_strictly_higher(state["current"]): if not our_next_range(): break continue difference = state["current"].difference(state["their_range"]) if isinstance(difference, VersionUnion): assert len(difference.ranges) == 2 new_ranges.append(difference.ranges[0]) state["current"] = difference.ranges[-1] if not their_next_range(): break elif difference.is_empty(): if not our_next_range(False): break else: state["current"] = difference if state["current"].allows_higher(state["their_range"]): if not their_next_range(): break else: if not our_next_range(): break if not new_ranges: return EmptyConstraint() if len(new_ranges) == 1: return new_ranges[0] return VersionUnion.of(*new_ranges) def _ranges_for( self, constraint ): # type: (VersionConstraint) -> List[VersionRange] from .version_range import VersionRange if constraint.is_empty(): return [] if isinstance(constraint, VersionUnion): return constraint.ranges if isinstance(constraint, VersionRange): return [constraint] raise ValueError("Unknown VersionConstraint type {}".format(constraint)) def _excludes_single_version(self): # type: () -> bool from .version import Version from .version_range import VersionRange return isinstance(VersionRange().difference(self), Version) def __eq__(self, other): if not isinstance(other, VersionUnion): return False return self._ranges == other.ranges def __str__(self): from .version_range import VersionRange if self._excludes_single_version(): return "!={}".format(VersionRange().difference(self)) return " || ".join([str(r) for r in self._ranges]) def __repr__(self): return "".format(str(self)) PK!A poetry/spdx/__init__.pyimport json import os from .license import License from .updater import Updater _licenses = None def license_by_id(identifier): if _licenses is None: load_licenses() id = identifier.lower() if id not in _licenses: raise ValueError("Invalid license id: {}".format(identifier)) return _licenses[id] def load_licenses(): global _licenses _licenses = {} licenses_file = os.path.join(os.path.dirname(__file__), "data", "licenses.json") with open(licenses_file) as f: data = json.loads(f.read()) for name, license in data.items(): _licenses[name.lower()] = License(name, license[0], license[1], license[2]) if __name__ == "__main__": updater = Updater() updater.dump() PK!=|uupoetry/spdx/data/licenses.json{ "0BSD": [ "BSD Zero Clause License", false, false ], "AAL": [ "Attribution Assurance License", true, false ], "ADSL": [ "Amazon Digital Services License", false, false ], "AFL-1.1": [ "Academic Free License v1.1", true, false ], "AFL-1.2": [ "Academic Free License v1.2", true, false ], "AFL-2.0": [ "Academic Free License v2.0", true, false ], "AFL-2.1": [ "Academic Free License v2.1", true, false ], "AFL-3.0": [ "Academic Free License v3.0", true, false ], "AGPL-1.0": [ "Affero General Public License v1.0", false, false ], "AGPL-3.0": [ "GNU Affero General Public License v3.0", true, true ], "AGPL-3.0-only": [ "GNU Affero General Public License v3.0 only", true, false ], "AGPL-3.0-or-later": [ "GNU Affero General Public License v3.0 or later", true, false ], "AMDPLPA": [ "AMD's plpa_map.c License", false, false ], "AML": [ "Apple MIT License", false, false ], "AMPAS": [ "Academy of Motion Picture Arts and Sciences BSD", false, false ], "ANTLR-PD": [ "ANTLR Software Rights Notice", false, false ], "APAFML": [ "Adobe Postscript AFM License", false, false ], "APL-1.0": [ "Adaptive Public License 1.0", true, false ], "APSL-1.0": [ "Apple Public Source License 1.0", true, false ], "APSL-1.1": [ "Apple Public Source License 1.1", true, false ], "APSL-1.2": [ "Apple Public Source License 1.2", true, false ], "APSL-2.0": [ "Apple Public Source License 2.0", true, false ], "Abstyles": [ "Abstyles License", false, false ], "Adobe-2006": [ "Adobe Systems Incorporated Source Code License Agreement", false, false ], "Adobe-Glyph": [ "Adobe Glyph List License", false, false ], "Afmparse": [ "Afmparse License", false, false ], "Aladdin": [ "Aladdin Free Public License", false, false ], "Apache-1.0": [ "Apache License 1.0", false, false ], "Apache-1.1": [ "Apache License 1.1", true, false ], "Apache-2.0": [ "Apache License 2.0", true, false ], "Artistic-1.0": [ "Artistic License 1.0", true, false ], "Artistic-1.0-Perl": [ "Artistic License 1.0 (Perl)", true, false ], "Artistic-1.0-cl8": [ "Artistic License 1.0 w/clause 8", true, false ], "Artistic-2.0": [ "Artistic License 2.0", true, false ], "BSD-1-Clause": [ "BSD 1-Clause License", false, false ], "BSD-2-Clause": [ "BSD 2-Clause \"Simplified\" License", true, false ], "BSD-2-Clause-FreeBSD": [ "BSD 2-Clause FreeBSD License", false, false ], "BSD-2-Clause-NetBSD": [ "BSD 2-Clause NetBSD License", false, false ], "BSD-2-Clause-Patent": [ "BSD-2-Clause Plus Patent License", true, false ], "BSD-3-Clause": [ "BSD 3-Clause \"New\" or \"Revised\" License", true, false ], "BSD-3-Clause-Attribution": [ "BSD with attribution", false, false ], "BSD-3-Clause-Clear": [ "BSD 3-Clause Clear License", false, false ], "BSD-3-Clause-LBNL": [ "Lawrence Berkeley National Labs BSD variant license", false, false ], "BSD-3-Clause-No-Nuclear-License": [ "BSD 3-Clause No Nuclear License", false, false ], "BSD-3-Clause-No-Nuclear-License-2014": [ "BSD 3-Clause No Nuclear License 2014", false, false ], "BSD-3-Clause-No-Nuclear-Warranty": [ "BSD 3-Clause No Nuclear Warranty", false, false ], "BSD-4-Clause": [ "BSD 4-Clause \"Original\" or \"Old\" License", false, false ], "BSD-4-Clause-UC": [ "BSD-4-Clause (University of California-Specific)", false, false ], "BSD-Protection": [ "BSD Protection License", false, false ], "BSD-Source-Code": [ "BSD Source Code Attribution", false, false ], "BSL-1.0": [ "Boost Software License 1.0", true, false ], "Bahyph": [ "Bahyph License", false, false ], "Barr": [ "Barr License", false, false ], "Beerware": [ "Beerware License", false, false ], "BitTorrent-1.0": [ "BitTorrent Open Source License v1.0", false, false ], "BitTorrent-1.1": [ "BitTorrent Open Source License v1.1", false, false ], "Borceux": [ "Borceux license", false, false ], "CATOSL-1.1": [ "Computer Associates Trusted Open Source License 1.1", true, false ], "CC-BY-1.0": [ "Creative Commons Attribution 1.0", false, false ], "CC-BY-2.0": [ "Creative Commons Attribution 2.0", false, false ], "CC-BY-2.5": [ "Creative Commons Attribution 2.5", false, false ], "CC-BY-3.0": [ "Creative Commons Attribution 3.0", false, false ], "CC-BY-4.0": [ "Creative Commons Attribution 4.0", false, false ], "CC-BY-NC-1.0": [ "Creative Commons Attribution Non Commercial 1.0", false, false ], "CC-BY-NC-2.0": [ "Creative Commons Attribution Non Commercial 2.0", false, false ], "CC-BY-NC-2.5": [ "Creative Commons Attribution Non Commercial 2.5", false, false ], "CC-BY-NC-3.0": [ "Creative Commons Attribution Non Commercial 3.0", false, false ], "CC-BY-NC-4.0": [ "Creative Commons Attribution Non Commercial 4.0", false, false ], "CC-BY-NC-ND-1.0": [ "Creative Commons Attribution Non Commercial No Derivatives 1.0", false, false ], "CC-BY-NC-ND-2.0": [ "Creative Commons Attribution Non Commercial No Derivatives 2.0", false, false ], "CC-BY-NC-ND-2.5": [ "Creative Commons Attribution Non Commercial No Derivatives 2.5", false, false ], "CC-BY-NC-ND-3.0": [ "Creative Commons Attribution Non Commercial No Derivatives 3.0", false, false ], "CC-BY-NC-ND-4.0": [ "Creative Commons Attribution Non Commercial No Derivatives 4.0", false, false ], "CC-BY-NC-SA-1.0": [ "Creative Commons Attribution Non Commercial Share Alike 1.0", false, false ], "CC-BY-NC-SA-2.0": [ "Creative Commons Attribution Non Commercial Share Alike 2.0", false, false ], "CC-BY-NC-SA-2.5": [ "Creative Commons Attribution Non Commercial Share Alike 2.5", false, false ], "CC-BY-NC-SA-3.0": [ "Creative Commons Attribution Non Commercial Share Alike 3.0", false, false ], "CC-BY-NC-SA-4.0": [ "Creative Commons Attribution Non Commercial Share Alike 4.0", false, false ], "CC-BY-ND-1.0": [ "Creative Commons Attribution No Derivatives 1.0", false, false ], "CC-BY-ND-2.0": [ "Creative Commons Attribution No Derivatives 2.0", false, false ], "CC-BY-ND-2.5": [ "Creative Commons Attribution No Derivatives 2.5", false, false ], "CC-BY-ND-3.0": [ "Creative Commons Attribution No Derivatives 3.0", false, false ], "CC-BY-ND-4.0": [ "Creative Commons Attribution No Derivatives 4.0", false, false ], "CC-BY-SA-1.0": [ "Creative Commons Attribution Share Alike 1.0", false, false ], "CC-BY-SA-2.0": [ "Creative Commons Attribution Share Alike 2.0", false, false ], "CC-BY-SA-2.5": [ "Creative Commons Attribution Share Alike 2.5", false, false ], "CC-BY-SA-3.0": [ "Creative Commons Attribution Share Alike 3.0", false, false ], "CC-BY-SA-4.0": [ "Creative Commons Attribution Share Alike 4.0", false, false ], "CC0-1.0": [ "Creative Commons Zero v1.0 Universal", false, false ], "CDDL-1.0": [ "Common Development and Distribution License 1.0", true, false ], "CDDL-1.1": [ "Common Development and Distribution License 1.1", false, false ], "CDLA-Permissive-1.0": [ "Community Data License Agreement Permissive 1.0", false, false ], "CDLA-Sharing-1.0": [ "Community Data License Agreement Sharing 1.0", false, false ], "CECILL-1.0": [ "CeCILL Free Software License Agreement v1.0", false, false ], "CECILL-1.1": [ "CeCILL Free Software License Agreement v1.1", false, false ], "CECILL-2.0": [ "CeCILL Free Software License Agreement v2.0", false, false ], "CECILL-2.1": [ "CeCILL Free Software License Agreement v2.1", true, false ], "CECILL-B": [ "CeCILL-B Free Software License Agreement", false, false ], "CECILL-C": [ "CeCILL-C Free Software License Agreement", false, false ], "CNRI-Jython": [ "CNRI Jython License", false, false ], "CNRI-Python": [ "CNRI Python License", true, false ], "CNRI-Python-GPL-Compatible": [ "CNRI Python Open Source GPL Compatible License Agreement", false, false ], "CPAL-1.0": [ "Common Public Attribution License 1.0", true, false ], "CPL-1.0": [ "Common Public License 1.0", true, false ], "CPOL-1.02": [ "Code Project Open License 1.02", false, false ], "CUA-OPL-1.0": [ "CUA Office Public License v1.0", true, false ], "Caldera": [ "Caldera License", false, false ], "ClArtistic": [ "Clarified Artistic License", false, false ], "Condor-1.1": [ "Condor Public License v1.1", false, false ], "Crossword": [ "Crossword License", false, false ], "CrystalStacker": [ "CrystalStacker License", false, false ], "Cube": [ "Cube License", false, false ], "D-FSL-1.0": [ "Deutsche Freie Software Lizenz", false, false ], "DOC": [ "DOC License", false, false ], "DSDP": [ "DSDP License", false, false ], "Dotseqn": [ "Dotseqn License", false, false ], "ECL-1.0": [ "Educational Community License v1.0", true, false ], "ECL-2.0": [ "Educational Community License v2.0", true, false ], "EFL-1.0": [ "Eiffel Forum License v1.0", true, false ], "EFL-2.0": [ "Eiffel Forum License v2.0", true, false ], "EPL-1.0": [ "Eclipse Public License 1.0", true, false ], "EPL-2.0": [ "Eclipse Public License 2.0", true, false ], "EUDatagrid": [ "EU DataGrid Software License", true, false ], "EUPL-1.0": [ "European Union Public License 1.0", false, false ], "EUPL-1.1": [ "European Union Public License 1.1", true, false ], "EUPL-1.2": [ "European Union Public License 1.2", true, false ], "Entessa": [ "Entessa Public License v1.0", true, false ], "ErlPL-1.1": [ "Erlang Public License v1.1", false, false ], "Eurosym": [ "Eurosym License", false, false ], "FSFAP": [ "FSF All Permissive License", false, false ], "FSFUL": [ "FSF Unlimited License", false, false ], "FSFULLR": [ "FSF Unlimited License (with License Retention)", false, false ], "FTL": [ "Freetype Project License", false, false ], "Fair": [ "Fair License", true, false ], "Frameworx-1.0": [ "Frameworx Open License 1.0", true, false ], "FreeImage": [ "FreeImage Public License v1.0", false, false ], "GFDL-1.1": [ "GNU Free Documentation License v1.1", false, true ], "GFDL-1.1-only": [ "GNU Free Documentation License v1.1 only", false, false ], "GFDL-1.1-or-later": [ "GNU Free Documentation License v1.1 or later", false, false ], "GFDL-1.2": [ "GNU Free Documentation License v1.2", false, true ], "GFDL-1.2-only": [ "GNU Free Documentation License v1.2 only", false, false ], "GFDL-1.2-or-later": [ "GNU Free Documentation License v1.2 or later", false, false ], "GFDL-1.3": [ "GNU Free Documentation License v1.3", false, true ], "GFDL-1.3-only": [ "GNU Free Documentation License v1.3 only", false, false ], "GFDL-1.3-or-later": [ "GNU Free Documentation License v1.3 or later", false, false ], "GL2PS": [ "GL2PS License", false, false ], "GPL-1.0": [ "GNU General Public License v1.0 only", false, true ], "GPL-1.0+": [ "GNU General Public License v1.0 or later", false, true ], "GPL-1.0-only": [ "GNU General Public License v1.0 only", false, false ], "GPL-1.0-or-later": [ "GNU General Public License v1.0 or later", false, false ], "GPL-2.0": [ "GNU General Public License v2.0 only", true, true ], "GPL-2.0+": [ "GNU General Public License v2.0 or later", true, true ], "GPL-2.0-only": [ "GNU General Public License v2.0 only", true, false ], "GPL-2.0-or-later": [ "GNU General Public License v2.0 or later", true, false ], "GPL-2.0-with-GCC-exception": [ "GNU General Public License v2.0 w/GCC Runtime Library exception", false, true ], "GPL-2.0-with-autoconf-exception": [ "GNU General Public License v2.0 w/Autoconf exception", false, true ], "GPL-2.0-with-bison-exception": [ "GNU General Public License v2.0 w/Bison exception", false, true ], "GPL-2.0-with-classpath-exception": [ "GNU General Public License v2.0 w/Classpath exception", false, true ], "GPL-2.0-with-font-exception": [ "GNU General Public License v2.0 w/Font exception", false, true ], "GPL-3.0": [ "GNU General Public License v3.0 only", true, true ], "GPL-3.0+": [ "GNU General Public License v3.0 or later", true, true ], "GPL-3.0-only": [ "GNU General Public License v3.0 only", true, false ], "GPL-3.0-or-later": [ "GNU General Public License v3.0 or later", true, false ], "GPL-3.0-with-GCC-exception": [ "GNU General Public License v3.0 w/GCC Runtime Library exception", true, true ], "GPL-3.0-with-autoconf-exception": [ "GNU General Public License v3.0 w/Autoconf exception", false, true ], "Giftware": [ "Giftware License", false, false ], "Glide": [ "3dfx Glide License", false, false ], "Glulxe": [ "Glulxe License", false, false ], "HPND": [ "Historical Permission Notice and Disclaimer", true, false ], "HaskellReport": [ "Haskell Language Report License", false, false ], "IBM-pibs": [ "IBM PowerPC Initialization and Boot Software", false, false ], "ICU": [ "ICU License", false, false ], "IJG": [ "Independent JPEG Group License", false, false ], "IPA": [ "IPA Font License", true, false ], "IPL-1.0": [ "IBM Public License v1.0", true, false ], "ISC": [ "ISC License", true, false ], "ImageMagick": [ "ImageMagick License", false, false ], "Imlib2": [ "Imlib2 License", false, false ], "Info-ZIP": [ "Info-ZIP License", false, false ], "Intel": [ "Intel Open Source License", true, false ], "Intel-ACPI": [ "Intel ACPI Software License Agreement", false, false ], "Interbase-1.0": [ "Interbase Public License v1.0", false, false ], "JSON": [ "JSON License", false, false ], "JasPer-2.0": [ "JasPer License", false, false ], "LAL-1.2": [ "Licence Art Libre 1.2", false, false ], "LAL-1.3": [ "Licence Art Libre 1.3", false, false ], "LGPL-2.0": [ "GNU Library General Public License v2 only", true, true ], "LGPL-2.0+": [ "GNU Library General Public License v2 or later", true, true ], "LGPL-2.0-only": [ "GNU Library General Public License v2 only", true, false ], "LGPL-2.0-or-later": [ "GNU Library General Public License v2 or later", true, false ], "LGPL-2.1": [ "GNU Lesser General Public License v2.1 only", true, true ], "LGPL-2.1+": [ "GNU Library General Public License v2 or later", true, true ], "LGPL-2.1-only": [ "GNU Lesser General Public License v2.1 only", true, false ], "LGPL-2.1-or-later": [ "GNU Lesser General Public License v2.1 or later", true, false ], "LGPL-3.0": [ "GNU Lesser General Public License v3.0 only", true, true ], "LGPL-3.0+": [ "GNU Lesser General Public License v3.0 or later", true, true ], "LGPL-3.0-only": [ "GNU Lesser General Public License v3.0 only", true, false ], "LGPL-3.0-or-later": [ "GNU Lesser General Public License v3.0 or later", true, false ], "LGPLLR": [ "Lesser General Public License For Linguistic Resources", false, false ], "LPL-1.0": [ "Lucent Public License Version 1.0", true, false ], "LPL-1.02": [ "Lucent Public License v1.02", true, false ], "LPPL-1.0": [ "LaTeX Project Public License v1.0", false, false ], "LPPL-1.1": [ "LaTeX Project Public License v1.1", false, false ], "LPPL-1.2": [ "LaTeX Project Public License v1.2", false, false ], "LPPL-1.3a": [ "LaTeX Project Public License v1.3a", false, false ], "LPPL-1.3c": [ "LaTeX Project Public License v1.3c", true, false ], "Latex2e": [ "Latex2e License", false, false ], "Leptonica": [ "Leptonica License", false, false ], "LiLiQ-P-1.1": [ "Licence Libre du Qu\u00e9bec \u2013 Permissive version 1.1", true, false ], "LiLiQ-R-1.1": [ "Licence Libre du Qu\u00e9bec \u2013 R\u00e9ciprocit\u00e9 version 1.1", true, false ], "LiLiQ-Rplus-1.1": [ "Licence Libre du Qu\u00e9bec \u2013 R\u00e9ciprocit\u00e9 forte version 1.1", true, false ], "Libpng": [ "libpng License", false, false ], "MIT": [ "MIT License", true, false ], "MIT-CMU": [ "CMU License", false, false ], "MIT-advertising": [ "Enlightenment License (e16)", false, false ], "MIT-enna": [ "enna License", false, false ], "MIT-feh": [ "feh License", false, false ], "MITNFA": [ "MIT +no-false-attribs license", false, false ], "MPL-1.0": [ "Mozilla Public License 1.0", true, false ], "MPL-1.1": [ "Mozilla Public License 1.1", true, false ], "MPL-2.0": [ "Mozilla Public License 2.0", true, false ], "MPL-2.0-no-copyleft-exception": [ "Mozilla Public License 2.0 (no copyleft exception)", true, false ], "MS-PL": [ "Microsoft Public License", true, false ], "MS-RL": [ "Microsoft Reciprocal License", true, false ], "MTLL": [ "Matrix Template Library License", false, false ], "MakeIndex": [ "MakeIndex License", false, false ], "MirOS": [ "MirOS License", true, false ], "Motosoto": [ "Motosoto License", true, false ], "Multics": [ "Multics License", true, false ], "Mup": [ "Mup License", false, false ], "NASA-1.3": [ "NASA Open Source Agreement 1.3", true, false ], "NBPL-1.0": [ "Net Boolean Public License v1", false, false ], "NCSA": [ "University of Illinois/NCSA Open Source License", true, false ], "NGPL": [ "Nethack General Public License", true, false ], "NLOD-1.0": [ "Norwegian Licence for Open Government Data", false, false ], "NLPL": [ "No Limit Public License", false, false ], "NOSL": [ "Netizen Open Source License", false, false ], "NPL-1.0": [ "Netscape Public License v1.0", false, false ], "NPL-1.1": [ "Netscape Public License v1.1", false, false ], "NPOSL-3.0": [ "Non-Profit Open Software License 3.0", true, false ], "NRL": [ "NRL License", false, false ], "NTP": [ "NTP License", true, false ], "Naumen": [ "Naumen Public License", true, false ], "Net-SNMP": [ "Net-SNMP License", false, false ], "NetCDF": [ "NetCDF license", false, false ], "Newsletr": [ "Newsletr License", false, false ], "Nokia": [ "Nokia Open Source License", true, false ], "Noweb": [ "Noweb License", false, false ], "Nunit": [ "Nunit License", false, true ], "OCCT-PL": [ "Open CASCADE Technology Public License", false, false ], "OCLC-2.0": [ "OCLC Research Public License 2.0", true, false ], "ODbL-1.0": [ "ODC Open Database License v1.0", false, false ], "OFL-1.0": [ "SIL Open Font License 1.0", false, false ], "OFL-1.1": [ "SIL Open Font License 1.1", true, false ], "OGTSL": [ "Open Group Test Suite License", true, false ], "OLDAP-1.1": [ "Open LDAP Public License v1.1", false, false ], "OLDAP-1.2": [ "Open LDAP Public License v1.2", false, false ], "OLDAP-1.3": [ "Open LDAP Public License v1.3", false, false ], "OLDAP-1.4": [ "Open LDAP Public License v1.4", false, false ], "OLDAP-2.0": [ "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", false, false ], "OLDAP-2.0.1": [ "Open LDAP Public License v2.0.1", false, false ], "OLDAP-2.1": [ "Open LDAP Public License v2.1", false, false ], "OLDAP-2.2": [ "Open LDAP Public License v2.2", false, false ], "OLDAP-2.2.1": [ "Open LDAP Public License v2.2.1", false, false ], "OLDAP-2.2.2": [ "Open LDAP Public License 2.2.2", false, false ], "OLDAP-2.3": [ "Open LDAP Public License v2.3", false, false ], "OLDAP-2.4": [ "Open LDAP Public License v2.4", false, false ], "OLDAP-2.5": [ "Open LDAP Public License v2.5", false, false ], "OLDAP-2.6": [ "Open LDAP Public License v2.6", false, false ], "OLDAP-2.7": [ "Open LDAP Public License v2.7", false, false ], "OLDAP-2.8": [ "Open LDAP Public License v2.8", false, false ], "OML": [ "Open Market License", false, false ], "OPL-1.0": [ "Open Public License v1.0", false, false ], "OSET-PL-2.1": [ "OSET Public License version 2.1", true, false ], "OSL-1.0": [ "Open Software License 1.0", true, false ], "OSL-1.1": [ "Open Software License 1.1", false, false ], "OSL-2.0": [ "Open Software License 2.0", true, false ], "OSL-2.1": [ "Open Software License 2.1", true, false ], "OSL-3.0": [ "Open Software License 3.0", true, false ], "OpenSSL": [ "OpenSSL License", false, false ], "PDDL-1.0": [ "ODC Public Domain Dedication & License 1.0", false, false ], "PHP-3.0": [ "PHP License v3.0", true, false ], "PHP-3.01": [ "PHP License v3.01", false, false ], "Plexus": [ "Plexus Classworlds License", false, false ], "PostgreSQL": [ "PostgreSQL License", true, false ], "Python-2.0": [ "Python License 2.0", true, false ], "QPL-1.0": [ "Q Public License 1.0", true, false ], "Qhull": [ "Qhull License", false, false ], "RHeCos-1.1": [ "Red Hat eCos Public License v1.1", false, false ], "RPL-1.1": [ "Reciprocal Public License 1.1", true, false ], "RPL-1.5": [ "Reciprocal Public License 1.5", true, false ], "RPSL-1.0": [ "RealNetworks Public Source License v1.0", true, false ], "RSA-MD": [ "RSA Message-Digest License ", false, false ], "RSCPL": [ "Ricoh Source Code Public License", true, false ], "Rdisc": [ "Rdisc License", false, false ], "Ruby": [ "Ruby License", false, false ], "SAX-PD": [ "Sax Public Domain Notice", false, false ], "SCEA": [ "SCEA Shared Source License", false, false ], "SGI-B-1.0": [ "SGI Free Software License B v1.0", false, false ], "SGI-B-1.1": [ "SGI Free Software License B v1.1", false, false ], "SGI-B-2.0": [ "SGI Free Software License B v2.0", false, false ], "SISSL": [ "Sun Industry Standards Source License v1.1", true, false ], "SISSL-1.2": [ "Sun Industry Standards Source License v1.2", false, false ], "SMLNJ": [ "Standard ML of New Jersey License", false, false ], "SMPPL": [ "Secure Messaging Protocol Public License", false, false ], "SNIA": [ "SNIA Public License 1.1", false, false ], "SPL-1.0": [ "Sun Public License v1.0", true, false ], "SWL": [ "Scheme Widget Library (SWL) Software License Agreement", false, false ], "Saxpath": [ "Saxpath License", false, false ], "Sendmail": [ "Sendmail License", false, false ], "SimPL-2.0": [ "Simple Public License 2.0", true, false ], "Sleepycat": [ "Sleepycat License", true, false ], "Spencer-86": [ "Spencer License 86", false, false ], "Spencer-94": [ "Spencer License 94", false, false ], "Spencer-99": [ "Spencer License 99", false, false ], "StandardML-NJ": [ "Standard ML of New Jersey License", false, true ], "SugarCRM-1.1.3": [ "SugarCRM Public License v1.1.3", false, false ], "TCL": [ "TCL/TK License", false, false ], "TCP-wrappers": [ "TCP Wrappers License", false, false ], "TMate": [ "TMate Open Source License", false, false ], "TORQUE-1.1": [ "TORQUE v2.5+ Software License v1.1", false, false ], "TOSL": [ "Trusster Open Source License", false, false ], "UPL-1.0": [ "Universal Permissive License v1.0", true, false ], "Unicode-DFS-2015": [ "Unicode License Agreement - Data Files and Software (2015)", false, false ], "Unicode-DFS-2016": [ "Unicode License Agreement - Data Files and Software (2016)", false, false ], "Unicode-TOU": [ "Unicode Terms of Use", false, false ], "Unlicense": [ "The Unlicense", false, false ], "VOSTROM": [ "VOSTROM Public License for Open Source", false, false ], "VSL-1.0": [ "Vovida Software License v1.0", true, false ], "Vim": [ "Vim License", false, false ], "W3C": [ "W3C Software Notice and License (2002-12-31)", true, false ], "W3C-19980720": [ "W3C Software Notice and License (1998-07-20)", false, false ], "W3C-20150513": [ "W3C Software Notice and Document License (2015-05-13)", false, false ], "WTFPL": [ "Do What The F*ck You Want To Public License", false, false ], "Watcom-1.0": [ "Sybase Open Watcom Public License 1.0", true, false ], "Wsuipa": [ "Wsuipa License", false, false ], "X11": [ "X11 License", false, false ], "XFree86-1.1": [ "XFree86 License 1.1", false, false ], "XSkat": [ "XSkat License", false, false ], "Xerox": [ "Xerox License", false, false ], "Xnet": [ "X.Net License", true, false ], "YPL-1.0": [ "Yahoo! Public License v1.0", false, false ], "YPL-1.1": [ "Yahoo! Public License v1.1", false, false ], "ZPL-1.1": [ "Zope Public License 1.1", false, false ], "ZPL-2.0": [ "Zope Public License 2.0", true, false ], "ZPL-2.1": [ "Zope Public License 2.1", false, false ], "Zed": [ "Zed License", false, false ], "Zend-2.0": [ "Zend License v2.0", false, false ], "Zimbra-1.3": [ "Zimbra Public License v1.3", false, false ], "Zimbra-1.4": [ "Zimbra Public License v1.4", false, false ], "Zlib": [ "zlib License", true, false ], "bzip2-1.0.5": [ "bzip2 and libbzip2 License v1.0.5", false, false ], "bzip2-1.0.6": [ "bzip2 and libbzip2 License v1.0.6", false, false ], "curl": [ "curl License", false, false ], "diffmark": [ "diffmark license", false, false ], "dvipdfm": [ "dvipdfm License", false, false ], "eCos-2.0": [ "eCos license version 2.0", false, true ], "eGenix": [ "eGenix.com Public License 1.1.0", false, false ], "gSOAP-1.3b": [ "gSOAP Public License v1.3b", false, false ], "gnuplot": [ "gnuplot License", false, false ], "iMatix": [ "iMatix Standard Function Library Agreement", false, false ], "libtiff": [ "libtiff License", false, false ], "mpich2": [ "mpich2 License", false, false ], "psfrag": [ "psfrag License", false, false ], "psutils": [ "psutils License", false, false ], "wxWindows": [ "wxWindows Library License", false, true ], "xinetd": [ "xinetd License", false, false ], "xpp": [ "XPP License", false, false ], "zlib-acknowledgement": [ "zlib/libpng License with Acknowledgement", false, false ] }PK!japoetry/spdx/license.pyfrom collections import namedtuple class License(namedtuple("License", "id name is_osi_approved is_deprecated")): CLASSIFIER_SUPPORTED = { # Not OSI Approved "Aladdin", "CC0-1.0", "CECILL-B", "CECILL-C", "NPL-1.0", "NPL-1.1", # OSI Approved "AFPL", "AFL-1.1", "AFL-1.2", "AFL-2.0", "AFL-2.1", "AFL-3.0", "Apache-1.1", "Apache-2.0", "APSL-1.1", "APSL-1.2", "APSL-2.0", "Artistic-1.0", "Artistic-2.0", "AAL", "AGPL-3.0", "AGPL-3.0-only", "AGPL-3.0-or-later", "BSL-1.0", "BSD-2-Clause", "BSD-3-Clause", "CDDL-1.0", "CECILL-2.1", "CPL-1.0", "EFL-1.0", "EFL-2.0", "EPL-1.0", "EPL-2.0", "EUPL-1.1", "EUPL-1.2", "GPL-2.0", "GPL-2.0+", "GPL-2.0-only", "GPL-2.0-or-later", "GPL-3.0", "GPL-3.0+", "GPL-3.0-only", "GPL-3.0-or-later", "LGPL-2.0", "LGPL-2.0+", "LGPL-2.0-only", "LGPL-2.0-or-later", "LGPL-3.0", "LGPL-3.0+", "LGPL-3.0-only", "LGPL-3.0-or-later", "MIT", "MPL-1.0", "MPL-1.1", "MPL-1.2", "Nokia", "W3C", "ZPL-1.0", "ZPL-2.0", "ZPL-2.1", } CLASSIFIER_NAMES = { # Not OSI Approved "AFPL": "Aladdin Free Public License (AFPL)", "CC0-1.0": "CC0 1.0 Universal (CC0 1.0) Public Domain Dedication", "CECILL-B": "CeCILL-B Free Software License Agreement (CECILL-B)", "CECILL-C": "CeCILL-C Free Software License Agreement (CECILL-C)", "NPL-1.0": "Netscape Public License (NPL)", "NPL-1.1": "Netscape Public License (NPL)", # OSI Approved "AFL-1.1": "Academic Free License (AFL)", "AFL-1.2": "Academic Free License (AFL)", "AFL-2.0": "Academic Free License (AFL)", "AFL-2.1": "Academic Free License (AFL)", "AFL-3.0": "Academic Free License (AFL)", "Apache-1.1": "Apache Software License", "Apache-2.0": "Apache Software License", "APSL-1.1": "Apple Public Source License", "APSL-1.2": "Apple Public Source License", "APSL-2.0": "Apple Public Source License", "Artistic-1.0": "Artistic License", "Artistic-2.0": "Artistic License", "AAL": "Attribution Assurance License", "AGPL-3.0": "GNU Affero General Public License v3", "AGPL-3.0-only": "GNU Affero General Public License v3", "AGPL-3.0-or-later": "GNU Affero General Public License v3 or later (AGPLv3+)", "BSL-1.0": "Boost Software License 1.0 (BSL-1.0)", "BSD-2-Clause": "BSD License", "BSD-3-Clause": "BSD License", "CDDL-1.0": "Common Development and Distribution License 1.0 (CDDL-1.0)", "CECILL-2.1": "CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)", "CPL-1.0": "Common Public License", "EPL-1.0": "Eclipse Public License 1.0 (EPL-1.0)", "EFL-1.0": "Eiffel Forum License", "EFL-2.0": "Eiffel Forum License", "EUPL-1.1": "European Union Public Licence 1.1 (EUPL 1.1)", "EUPL-1.2": "European Union Public Licence 1.2 (EUPL 1.2)", "GPL-2.0": "GNU General Public License v2 (GPLv2)", "GPL-2.0-only": "GNU General Public License v2 (GPLv2)", "GPL-2.0+": "GNU General Public License v2 or later (GPLv2+)", "GPL-2.0-or-later": "GNU General Public License v2 or later (GPLv2+)", "GPL-3.0": "GNU General Public License v3 (GPLv3)", "GPL-3.0-only": "GNU General Public License v3 (GPLv3)", "GPL-3.0+": "GNU General Public License v3 or later (GPLv3+)", "GPL-3.0-or-later": "GNU General Public License v3 or later (GPLv3+)", "LGPL-2.0": "GNU Lesser General Public License v2 (LGPLv2)", "LGPL-2.0-only": "GNU Lesser General Public License v2 (LGPLv2)", "LGPL-2.0+": "GNU Lesser General Public License v2 or later (LGPLv2+)", "LGPL-2.0-or-later": "GNU Lesser General Public License v2 or later (LGPLv2+)", "LGPL-3.0": "GNU Lesser General Public License v3 (LGPLv3)", "LGPL-3.0-only": "GNU Lesser General Public License v3 (LGPLv3)", "LGPL-3.0+": "GNU Lesser General Public License v3 or later (LGPLv3+)", "LGPL-3.0-or-later": "GNU Lesser General Public License v3 or later (LGPLv3+)", "MPL-1.0": "Mozilla Public License 1.0 (MPL)", "MPL-1.1": "Mozilla Public License 1.1 (MPL 1.1)", "MPL-2.0": "Mozilla Public License 2.0 (MPL 2.0)", "W3C": "W3C License", "ZPL-1.1": "Zope Public License", "ZPL-2.0": "Zope Public License", "ZPL-2.1": "Zope Public License", } @property def classifier(self): parts = ["License"] if self.is_osi_approved: parts.append("OSI Approved") name = self.classifier_name if name is not None: parts.append(name) return " :: ".join(parts) @property def classifier_name(self): if self.id not in self.CLASSIFIER_SUPPORTED: if self.is_osi_approved: return None return "Other/Proprietary License" if self.id in self.CLASSIFIER_NAMES: return self.CLASSIFIER_NAMES[self.id] return self.name PK!XI.poetry/spdx/updater.pyimport json import os try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen class Updater: BASE_URL = "https://raw.githubusercontent.com/spdx/license-list-data/master/json/" def __init__(self, base_url=BASE_URL): self._base_url = base_url def dump(self, file=None): if file is None: file = os.path.join(os.path.dirname(__file__), "data", "licenses.json") licenses_url = self._base_url + "licenses.json" with open(file, "w") as f: f.write( json.dumps(self.get_licenses(licenses_url), indent=2, sort_keys=True) ) def get_licenses(self, url): licenses = {} with urlopen(url) as r: data = json.loads(r.read().decode()) for info in data["licenses"]: licenses[info["licenseId"]] = [ info["name"], info["isOsiApproved"], info["isDeprecatedLicenseId"], ] return licenses PK!poetry/utils/__init__.pyPK!oԓo o poetry/utils/_compat.pyimport sys try: from functools32 import lru_cache except ImportError: from functools import lru_cache try: from glob2 import glob except ImportError: from glob import glob try: import urllib.parse as urlparse except ImportError: import urlparse try: # Python 2 long = long unicode = unicode basestring = basestring except NameError: # Python 3 long = int unicode = str basestring = str PY2 = sys.version_info[0] == 2 PY35 = sys.version_info >= (3, 5) PY36 = sys.version_info >= (3, 6) WINDOWS = sys.platform == "win32" if PY2: import pipes shell_quote = pipes.quote else: import shlex shell_quote = shlex.quote if PY35: from pathlib import Path else: from pathlib2 import Path if not PY36: from collections import OrderedDict else: OrderedDict = dict def decode(string, encodings=None): if not PY2 and not isinstance(string, bytes): return string if PY2 and isinstance(string, unicode): return string encodings = encodings or ["utf-8", "latin1", "ascii"] for encoding in encodings: try: return string.decode(encoding) except (UnicodeEncodeError, UnicodeDecodeError): pass return string.decode(encodings[0], errors="ignore") def encode(string, encodings=None): if not PY2 and isinstance(string, bytes): return string if PY2 and isinstance(string, str): return string encodings = encodings or ["utf-8", "latin1", "ascii"] for encoding in encodings: try: return string.encode(encoding) except (UnicodeEncodeError, UnicodeDecodeError): pass return string.encode(encodings[0], errors="ignore") def to_str(string): if isinstance(string, str) or not isinstance(string, (unicode, bytes)): return string if PY2: method = "encode" else: method = "decode" encodings = ["utf-8", "latin1", "ascii"] for encoding in encodings: try: return getattr(string, method)(encoding) except (UnicodeEncodeError, UnicodeDecodeError): pass return getattr(string, method)(encodings[0], errors="ignore") def list_to_shell_command(cmd): executable = cmd[0] if " " in executable: executable = '"{}"'.format(executable) cmd[0] = executable return " ".join(cmd) PK!O͂G"G"poetry/utils/appdirs.py""" This code was taken from https://github.com/ActiveState/appdirs and modified to suit our purposes. """ import os import sys WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") def expanduser(path): """ Expand ~ and ~user constructions. Includes a workaround for http://bugs.python.org/issue14768 """ expanded = os.path.expanduser(path) if path.startswith("~/") and expanded.startswith("//"): expanded = expanded[1:] return expanded def user_cache_dir(appname): r""" Return full path to the user-specific cache dir for this application. "appname" is the name of application. Typical user cache directories are: macOS: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Windows: C:\Users\\AppData\Local\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir`). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. """ if WINDOWS: # Get the base path path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) # Add our app name and Cache directory to it path = os.path.join(path, appname, "Cache") elif sys.platform == "darwin": # Get the base path path = expanduser("~/Library/Caches") # Add our app name to it path = os.path.join(path, appname) else: # Get the base path path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) # Add our app name to it path = os.path.join(path, appname) return path def user_data_dir(appname, roaming=False): r""" Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\ ... ...Application Data\ Win XP (roaming): C:\Documents and Settings\\Local ... ...Settings\Application Data\ Win 7 (not roaming): C:\Users\\AppData\Local\ Win 7 (roaming): C:\Users\\AppData\Roaming\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". """ if WINDOWS: const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) elif sys.platform == "darwin": path = os.path.join(expanduser("~/Library/Application Support/"), appname) else: path = os.path.join( os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname ) return path def user_config_dir(appname, roaming=True): """Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "roaming" (boolean, default True) can be set False to not use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: same as user_data_dir Unix: ~/.config/ Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by default "~/.config/". """ if WINDOWS: path = user_data_dir(appname, roaming=roaming) elif sys.platform == "darwin": path = user_data_dir(appname) else: path = os.getenv("XDG_CONFIG_HOME", expanduser("~/.config")) path = os.path.join(path, appname) return path # for the discussion regarding site_config_dirs locations # see def site_config_dirs(appname): r"""Return a list of potential user-shared config dirs for this application. "appname" is the name of application. Typical user config directories are: macOS: /Library/Application Support// Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in $XDG_CONFIG_DIRS Win XP: C:\Documents and Settings\All Users\Application ... ...Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: Hidden, but writeable on Win 7: C:\ProgramData\\ """ if WINDOWS: path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) pathlist = [os.path.join(path, appname)] elif sys.platform == "darwin": pathlist = [os.path.join("/Library/Application Support", appname)] else: # try looking in $XDG_CONFIG_DIRS xdg_config_dirs = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg") if xdg_config_dirs: pathlist = [ os.path.join(expanduser(x), appname) for x in xdg_config_dirs.split(os.pathsep) ] else: pathlist = [] # always look in /etc directly as well pathlist.append("/etc") return pathlist # -- Windows support functions -- def _get_win_folder_from_registry(csidl_name): """ This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. """ import _winreg shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", }[csidl_name] key = _winreg.OpenKey( _winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders", ) directory, _type = _winreg.QueryValueEx(key, shell_folder_name) return directory def _get_win_folder_with_ctypes(csidl_name): csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, "CSIDL_LOCAL_APPDATA": 28, }[csidl_name] buf = ctypes.create_unicode_buffer(1024) ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in buf: if ord(c) > 255: has_high_char = True break if has_high_char: buf2 = ctypes.create_unicode_buffer(1024) if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): buf = buf2 return buf.value if WINDOWS: try: import ctypes _get_win_folder = _get_win_folder_with_ctypes except ImportError: _get_win_folder = _get_win_folder_from_registry def _win_path_to_bytes(path): """Encode Windows paths to bytes. Only used on Python 2. Motivation is to be consistent with other operating systems where paths are also returned as bytes. This avoids problems mixing bytes and Unicode elsewhere in the codebase. For more details and discussion see . If encoding using ASCII and MBCS fails, return the original Unicode path. """ for encoding in ("ASCII", "MBCS"): try: return path.encode(encoding) except (UnicodeEncodeError, LookupError): pass return path PK!Y@Y@poetry/utils/env.pyimport json import os import platform import subprocess import sys import sysconfig import warnings from contextlib import contextmanager from subprocess import CalledProcessError from typing import Any from typing import Dict from typing import Optional from typing import Tuple from poetry.config import Config from poetry.locations import CACHE_DIR from poetry.utils._compat import Path from poetry.utils._compat import decode from poetry.utils._compat import encode from poetry.utils._compat import list_to_shell_command from poetry.version.markers import BaseMarker GET_ENVIRONMENT_INFO = """\ import json import os import platform import sys if hasattr(sys, "implementation"): info = sys.implementation.version iver = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": iver += kind[0] + str(info.serial) implementation_name = sys.implementation.name else: iver = "0" implementation_name = "" env = { "implementation_name": implementation_name, "implementation_version": iver, "os_name": os.name, "platform_machine": platform.machine(), "platform_release": platform.release(), "platform_system": platform.system(), "platform_version": platform.version(), "python_full_version": platform.python_version(), "platform_python_implementation": platform.python_implementation(), "python_version": platform.python_version()[:3], "sys_platform": sys.platform, "version_info": tuple(sys.version_info), } print(json.dumps(env)) """ GET_BASE_PREFIX = """\ import sys if hasattr(sys, "real_prefix"): print(sys.real_prefix) elif hasattr(sys, "base_prefix"): print(sys.base_prefix) else: print(sys.prefix) """ GET_CONFIG_VAR = """\ import sysconfig print(sysconfig.get_config_var("{config_var}")), """ GET_PYTHON_VERSION = """\ import sys print('.'.join([str(s) for s in sys.version_info[:3]])) """ class EnvError(Exception): pass class EnvCommandError(EnvError): def __init__(self, e): # type: (CalledProcessError) -> None message = "Command {} errored with the following output: \n{}".format( e.cmd, decode(e.output) ) super(EnvCommandError, self).__init__(message) class Env(object): """ An abstract Python environment. """ _env = None def __init__(self, path, base=None): # type: (Path, Optional[Path]) -> None self._is_windows = sys.platform == "win32" self._path = path bin_dir = "bin" if not self._is_windows else "Scripts" self._bin_dir = self._path / bin_dir self._base = base or path self._marker_env = None @property def path(self): # type: () -> Path return self._path @property def base(self): # type: () -> Path return self._base @property def version_info(self): # type: () -> Tuple[int] return tuple(self.marker_env["version_info"]) @property def python_implementation(self): # type: () -> str return self.marker_env["platform_python_implementation"] @property def python(self): # type: () -> str """ Path to current python executable """ return self._bin("python") @property def marker_env(self): if self._marker_env is None: self._marker_env = self.get_marker_env() return self._marker_env @property def pip(self): # type: () -> str """ Path to current pip executable """ return self._bin("pip") @classmethod def get(cls, cwd, reload=False): # type: (Path, bool) -> Env if cls._env is not None and not reload: return cls._env # Check if we are inside a virtualenv or not in_venv = os.environ.get("VIRTUAL_ENV") is not None if not in_venv: # Checking if a local virtualenv exists if (cwd / ".venv").exists(): venv = cwd / ".venv" return VirtualEnv(venv) config = Config.create("config.toml") create_venv = config.setting("settings.virtualenvs.create", True) if not create_venv: return SystemEnv(Path(sys.prefix)) venv_path = config.setting("settings.virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) name = cwd.name name = "{}-py{}".format( name, ".".join([str(v) for v in sys.version_info[:2]]) ) venv = venv_path / name if not venv.exists(): return SystemEnv(Path(sys.prefix)) return VirtualEnv(venv) if os.environ.get("VIRTUAL_ENV") is not None: prefix = Path(os.environ["VIRTUAL_ENV"]) base_prefix = None else: prefix = Path(sys.prefix) base_prefix = cls.get_base_prefix() return VirtualEnv(prefix, base_prefix) @classmethod def create_venv(cls, cwd, io, name=None): # type: (Path, IO, bool) -> Env if cls._env is not None: return cls._env env = cls.get(cwd) if env.is_venv(): # Already inside a virtualenv. return env config = Config.create("config.toml") create_venv = config.setting("settings.virtualenvs.create") root_venv = config.setting("settings.virtualenvs.in-project") venv_path = config.setting("settings.virtualenvs.path") if root_venv: venv_path = cwd / ".venv" elif venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) if not name: name = cwd.name name = "{}-py{}".format(name, ".".join([str(v) for v in sys.version_info[:2]])) if root_venv: venv = venv_path else: venv = venv_path / name if not venv.exists(): if create_venv is False: io.writeln( "" "Skipping virtualenv creation, " "as specified in config file." "" ) return SystemEnv(Path(sys.prefix)) io.writeln( "Creating virtualenv {} in {}".format(name, str(venv_path)) ) cls.build_venv(str(venv)) else: if io.is_very_verbose(): io.writeln("Virtualenv {} already exists.".format(name)) # venv detection: # stdlib venv may symlink sys.executable, so we can't use realpath. # but others can symlink *to* the venv Python, # so we can't just use sys.executable. # So we just check every item in the symlink tree (generally <= 3) p = os.path.normcase(sys.executable) paths = [p] while os.path.islink(p): p = os.path.normcase(os.path.join(os.path.dirname(p), os.readlink(p))) paths.append(p) p_venv = os.path.normcase(str(venv)) if any(p.startswith(p_venv) for p in paths): # Running properly in the virtualenv, don't need to do anything return SystemEnv(Path(sys.prefix), cls.get_base_prefix()) return VirtualEnv(venv) @classmethod def build_venv(cls, path): try: from venv import EnvBuilder builder = EnvBuilder(with_pip=True) build = builder.create except ImportError: # We fallback on virtualenv for Python 2.7 from virtualenv import create_environment build = create_environment build(path) @classmethod def get_base_prefix(cls): # type: () -> Path if hasattr(sys, "real_prefix"): return sys.real_prefix if hasattr(sys, "base_prefix"): return sys.base_prefix return sys.prefix def get_version_info(self): # type: () -> Tuple[int] raise NotImplementedError() def get_python_implementation(self): # type: () -> str raise NotImplementedError() def get_marker_env(self): # type: () -> Dict[str, Any] raise NotImplementedError() def config_var(self, var): # type: (str) -> Any raise NotImplementedError() def is_valid_for_marker(self, marker): # type: (BaseMarker) -> bool return marker.validate(self.marker_env) def is_sane(self): # type: () -> bool """ Checks whether the current environment is sane or not. """ return True def run(self, bin, *args, **kwargs): """ Run a command inside the Python environment. """ bin = self._bin(bin) cmd = [bin] + list(args) shell = kwargs.get("shell", False) call = kwargs.pop("call", False) input_ = kwargs.pop("input_", None) if shell: cmd = list_to_shell_command(cmd) try: if self._is_windows: kwargs["shell"] = True if input_: p = subprocess.Popen( cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs ) output = p.communicate(encode(input_))[0] elif call: return subprocess.call(cmd, stderr=subprocess.STDOUT, **kwargs) else: output = subprocess.check_output( cmd, stderr=subprocess.STDOUT, **kwargs ) except CalledProcessError as e: raise EnvCommandError(e) return decode(output) def execute(self, bin, *args, **kwargs): bin = self._bin(bin) return subprocess.call([bin] + list(args), **kwargs) def is_venv(self): # type: () -> bool raise NotImplementedError() def _bin(self, bin): # type: (str) -> str """ Return path to the given executable. """ bin_path = (self._bin_dir / bin).with_suffix(".exe" if self._is_windows else "") if not bin_path.exists(): return bin return str(bin_path) def __repr__(self): return '{}("{}")'.format(self.__class__.__name__, self._path) class SystemEnv(Env): """ A system (i.e. not a virtualenv) Python environment. """ def get_version_info(self): # type: () -> Tuple[int] return sys.version_info def get_python_implementation(self): # type: () -> str return platform.python_implementation() def get_marker_env(self): # type: () -> Dict[str, Any] if hasattr(sys, "implementation"): info = sys.implementation.version iver = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": iver += kind[0] + str(info.serial) implementation_name = sys.implementation.name else: iver = "0" implementation_name = "" return { "implementation_name": implementation_name, "implementation_version": iver, "os_name": os.name, "platform_machine": platform.machine(), "platform_release": platform.release(), "platform_system": platform.system(), "platform_version": platform.version(), "python_full_version": platform.python_version(), "platform_python_implementation": platform.python_implementation(), "python_version": platform.python_version()[:3], "sys_platform": sys.platform, "version_info": sys.version_info, } def config_var(self, var): # type: (str) -> Any try: return sysconfig.get_config_var(var) except IOError as e: warnings.warn("{0}".format(e), RuntimeWarning) return def is_venv(self): # type: () -> bool return self._path != self._base class VirtualEnv(Env): """ A virtual Python environment. """ def __init__(self, path, base=None): # type: (Path, Optional[Path]) -> None super(VirtualEnv, self).__init__(path, base) # If base is None, it probably means this is # a virtualenv created from VIRTUAL_ENV. # In this case we need to get sys.base_prefix # from inside the virtualenv. if base is None: self._base = Path(self.run("python", "-", input_=GET_BASE_PREFIX).strip()) def get_version_info(self): # type: () -> Tuple[int] output = self.run("python", "-", input_=GET_PYTHON_VERSION) return tuple([int(s) for s in output.strip().split(".")]) def get_python_implementation(self): # type: () -> str return self.marker_env["platform_python_implementation"] def get_marker_env(self): # type: () -> Dict[str, Any] output = self.run("python", "-", input_=GET_ENVIRONMENT_INFO) return json.loads(output) def config_var(self, var): # type: (str) -> Any try: value = self.run( "python", "-", input_=GET_CONFIG_VAR.format(config_var=var) ).strip() except EnvCommandError as e: warnings.warn("{0}".format(e), RuntimeWarning) return None if value == "None": value = None elif value == "1": value = 1 elif value == "0": value = 0 return value def is_venv(self): # type: () -> bool return True def is_sane(self): # A virtualenv is considered sane if both "python" and "pip" exist. return os.path.exists(self._bin("python")) and os.path.exists(self._bin("pip")) def run(self, bin, *args, **kwargs): with self.temp_environ(): os.environ["PATH"] = self._updated_path() os.environ["VIRTUAL_ENV"] = str(self._path) self.unset_env("PYTHONHOME") self.unset_env("__PYVENV_LAUNCHER__") return super(VirtualEnv, self).run(bin, *args, **kwargs) def execute(self, bin, *args, **kwargs): with self.temp_environ(): os.environ["PATH"] = self._updated_path() os.environ["VIRTUAL_ENV"] = str(self._path) self.unset_env("PYTHONHOME") self.unset_env("__PYVENV_LAUNCHER__") return super(VirtualEnv, self).execute(bin, *args, **kwargs) @contextmanager def temp_environ(self): environ = dict(os.environ) try: yield finally: os.environ.clear() os.environ.update(environ) def unset_env(self, key): if key in os.environ: del os.environ[key] def _updated_path(self): return os.pathsep.join([str(self._bin_dir), os.environ["PATH"]]) class NullEnv(SystemEnv): def __init__(self, path=None, base=None, execute=False): if path is None: path = Path(sys.prefix) super(NullEnv, self).__init__(path, base=base) self._execute = execute self.executed = [] def run(self, bin, *args): self.executed.append([bin] + list(args)) if self._execute: return super(NullEnv, self).run(bin, *args) def _bin(self, bin): return bin class MockEnv(NullEnv): def __init__( self, version_info=(3, 7, 0), python_implementation="CPython", platform="darwin", os_name="posix", is_venv=False, **kwargs ): super(MockEnv, self).__init__(**kwargs) self._version_info = version_info self._python_implementation = python_implementation self._platform = platform self._os_name = os_name self._is_venv = is_venv @property def version_info(self): # type: () -> Tuple[int] return self._version_info @property def python_implementation(self): # type: () -> str return self._python_implementation @property def platform(self): # type: () -> str return self._platform @property def os(self): # type: () -> str return self._os_name def is_venv(self): # type: () -> bool return self._is_venv PK!3U^ ^ poetry/utils/helpers.pyimport os import re import shutil import stat import tempfile from contextlib import contextmanager from typing import List from typing import Optional from typing import Union from poetry.config import Config from poetry.version import Version _canonicalize_regex = re.compile("[-_]+") def canonicalize_name(name): # type: (str) -> str return _canonicalize_regex.sub("-", name).lower() def module_name(name): # type: (str) -> str return canonicalize_name(name).replace(".", "_").replace("-", "_") def normalize_version(version): # type: (str) -> str return str(Version(version)) @contextmanager def temporary_directory(*args, **kwargs): try: from tempfile import TemporaryDirectory with TemporaryDirectory(*args, **kwargs) as name: yield name except ImportError: name = tempfile.mkdtemp(*args, **kwargs) yield name shutil.rmtree(name) def parse_requires(requires): # type: (str) -> List[str] lines = requires.split("\n") requires_dist = [] in_section = False current_marker = None for line in lines: line = line.strip() if not line: if in_section: in_section = False continue if line.startswith("["): # extras or conditional dependencies marker = line.lstrip("[").rstrip("]") if ":" not in marker: extra, marker = marker, None else: extra, marker = marker.split(":") if extra: if marker: marker = '{} and extra == "{}"'.format(marker, extra) else: marker = 'extra == "{}"'.format(extra) if marker: current_marker = marker continue if current_marker: line = "{}; {}".format(line, current_marker) requires_dist.append(line) return requires_dist def get_http_basic_auth( config, repository_name ): # type: (Config, str) -> Optional[tuple] repo_auth = config.setting("http-basic.{}".format(repository_name)) if repo_auth: return repo_auth["username"], repo_auth.get("password") return None def _on_rm_error(func, path, exc_info): os.chmod(path, stat.S_IWRITE) func(path) def safe_rmtree(path): shutil.rmtree(path, onerror=_on_rm_error) PK!|poetry/utils/patterns.pyimport re wheel_file_re = re.compile( r"""^(?P(?P.+?)(-(?P\d.+?))?) ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) \.whl|\.dist-info)$""", re.VERBOSE, ) PK!phy/y/poetry/utils/setup_reader.pyimport ast try: from configparser import ConfigParser except ImportError: from ConfigParser import ConfigParser from typing import Any from typing import Dict from typing import Iterable from typing import List from typing import Optional from typing import Tuple from typing import Union from ._compat import PY35 from ._compat import basestring from ._compat import Path class SetupReader(object): """ Class that reads a setup.py file without executing it. """ DEFAULT = { "name": None, "version": None, "install_requires": [], "extras_require": {}, "python_requires": None, } FILES = ["setup.py", "setup.cfg"] @classmethod def read_from_directory( cls, directory ): # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]] if isinstance(directory, basestring): directory = Path(directory) result = cls.DEFAULT.copy() for filename in cls.FILES: filepath = directory / filename if not filepath.exists(): continue new_result = getattr(cls(), "read_{}".format(filename.replace(".", "_")))( filepath ) for key in result.keys(): if new_result[key]: result[key] = new_result[key] return result @classmethod def _is_empty_result(cls, result): # type: (Dict[str, Any]) -> bool return ( not result["install_requires"] and not result["extras_require"] and not result["python_requires"] ) def read_setup_py( self, filepath ): # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]] if not PY35: return self.DEFAULT if isinstance(filepath, basestring): filepath = Path(filepath) with filepath.open(encoding="utf-8") as f: content = f.read() result = {} body = ast.parse(content).body setup_call, body = self._find_setup_call(body) if not setup_call: return self.DEFAULT # Inspecting keyword arguments result["name"] = self._find_single_string(setup_call, body, "name") result["version"] = self._find_single_string(setup_call, body, "version") result["install_requires"] = self._find_install_requires(setup_call, body) result["extras_require"] = self._find_extras_require(setup_call, body) result["python_requires"] = self._find_single_string( setup_call, body, "python_requires" ) return result def read_setup_cfg( self, filepath ): # type: (Union[basestring, Path]) -> Dict[str, Union[List, Dict]] parser = ConfigParser() parser.read(str(filepath)) name = None version = None if parser.has_option("metadata", "name"): name = parser.get("metadata", "name") if parser.has_option("metadata", "version"): version = parser.get("metadata", "version") install_requires = [] extras_require = {} python_requires = None if parser.has_section("options"): if parser.has_option("options", "install_requires"): for dep in parser.get("options", "install_requires").split("\n"): dep = dep.strip() if not dep: continue install_requires.append(dep) if parser.has_option("options", "python_requires"): python_requires = parser.get("options", "python_requires") if parser.has_section("options.extras_require"): for group in parser.options("options.extras_require"): extras_require[group] = [] deps = parser.get("options.extras_require", group) for dep in deps.split("\n"): dep = dep.strip() if not dep: continue extras_require[group].append(dep) return { "name": name, "version": version, "install_requires": install_requires, "extras_require": extras_require, "python_requires": python_requires, } def _find_setup_call( self, elements ): # type: (List[Any]) -> Tuple[Optional[ast.Call], Optional[List[Any]]] funcdefs = [] for i, element in enumerate(elements): if isinstance(element, ast.If) and i == len(elements) - 1: # Checking if the last element is an if statement # and if it is 'if __name__ == "__main__"' which # could contain the call to setup() test = element.test if not isinstance(test, ast.Compare): continue left = test.left if not isinstance(left, ast.Name): continue if left.id != "__name__": continue setup_call, body = self._find_sub_setup_call([element]) if not setup_call: continue return setup_call, body + elements if not isinstance(element, ast.Expr): if isinstance(element, ast.FunctionDef): funcdefs.append(element) continue value = element.value if not isinstance(value, ast.Call): continue func = value.func if not isinstance(func, ast.Name): continue if func.id != "setup": continue return value, elements # Nothing, we inspect the function definitions return self._find_sub_setup_call(funcdefs) def _find_sub_setup_call( self, elements ): # type: (List[Any]) -> Tuple[Optional[ast.Call], Optional[List[Any]]] for element in elements: if not isinstance(element, (ast.FunctionDef, ast.If)): continue setup_call = self._find_setup_call(element.body) if setup_call != (None, None): setup_call, body = setup_call body = elements + body return setup_call, body return None, None def _find_install_requires( self, call, body ): # type: (ast.Call, Iterable[Any]) -> List[str] install_requires = [] value = self._find_in_call(call, "install_requires") if value is None: # Trying to find in kwargs kwargs = self._find_call_kwargs(call) if kwargs is None or not isinstance(kwargs, ast.Name): return install_requires variable = self._find_variable_in_body(body, kwargs.id) if not isinstance(variable, (ast.Dict, ast.Call)): return install_requires if isinstance(variable, ast.Call): if not isinstance(variable.func, ast.Name): return install_requires if variable.func.id != "dict": return install_requires value = self._find_in_call(variable, "install_requires") else: value = self._find_in_dict(variable, "install_requires") if value is None: return install_requires if isinstance(value, ast.List): for el in value.elts: install_requires.append(el.s) elif isinstance(value, ast.Name): variable = self._find_variable_in_body(body, value.id) if variable is not None and isinstance(variable, ast.List): for el in variable.elts: install_requires.append(el.s) return install_requires def _find_extras_require( self, call, body ): # type: (ast.Call, Iterable[Any]) -> Dict[str, List] extras_require = {} value = self._find_in_call(call, "extras_require") if value is None: # Trying to find in kwargs kwargs = self._find_call_kwargs(call) if kwargs is None or not isinstance(kwargs, ast.Name): return extras_require variable = self._find_variable_in_body(body, kwargs.id) if not isinstance(variable, (ast.Dict, ast.Call)): return extras_require if isinstance(variable, ast.Call): if not isinstance(variable.func, ast.Name): return extras_require if variable.func.id != "dict": return extras_require value = self._find_in_call(variable, "extras_require") else: value = self._find_in_dict(variable, "extras_require") if value is None: return extras_require if isinstance(value, ast.Dict): for key, val in zip(value.keys, value.values): if isinstance(val, ast.Name): val = self._find_variable_in_body(body, val.id) if isinstance(val, ast.List): extras_require[key.s] = [e.s for e in val.elts] elif isinstance(value, ast.Name): variable = self._find_variable_in_body(body, value.id) if variable is None or not isinstance(variable, ast.Dict): return extras_require for key, val in zip(variable.keys, variable.values): if isinstance(val, ast.Name): val = self._find_variable_in_body(body, val.id) if isinstance(val, ast.List): extras_require[key.s] = [e.s for e in val.elts] return extras_require def _find_single_string( self, call, body, name ): # type: (ast.Call, List[Any], str) -> Optional[str] value = self._find_in_call(call, name) if value is None: # Trying to find in kwargs kwargs = self._find_call_kwargs(call) if kwargs is None or not isinstance(kwargs, ast.Name): return variable = self._find_variable_in_body(body, kwargs.id) if not isinstance(variable, (ast.Dict, ast.Call)): return if isinstance(variable, ast.Call): if not isinstance(variable.func, ast.Name): return if variable.func.id != "dict": return value = self._find_in_call(variable, name) else: value = self._find_in_dict(variable, name) if value is None: return if isinstance(value, ast.Str): return value.s elif isinstance(value, ast.Name): variable = self._find_variable_in_body(body, value.id) if variable is not None and isinstance(variable, ast.Str): return variable.s def _find_in_call(self, call, name): # type: (ast.Call, str) -> Optional[Any] for keyword in call.keywords: if keyword.arg == name: return keyword.value def _find_call_kwargs(self, call): # type: (ast.Call) -> Optional[Any] kwargs = None for keyword in call.keywords: if keyword.arg is None: kwargs = keyword.value return kwargs def _find_variable_in_body( self, body, name ): # type: (Iterable[Any], str) -> Optional[Any] found = None for elem in body: if found: break if not isinstance(elem, ast.Assign): continue for target in elem.targets: if not isinstance(target, ast.Name): continue if target.id == name: return elem.value def _find_in_dict(self, dict_, name): # type: (ast.Call, str) -> Optional[Any] for key, val in zip(dict_.keys, dict_.values): if isinstance(key, ast.Str) and key.s == name: return val PK! m~poetry/utils/shell.pyimport os from shellingham import detect_shell from shellingham import ShellDetectionFailure class Shell: """ Represents the current shell. """ _shell = None def __init__(self, name, path): # type: (str, str) -> None self._name = name self._path = path @property def name(self): # type: () -> str return self._name @property def path(self): # type: () -> str return self._path @classmethod def get(cls): # type: () -> Shell """ Retrieve the current shell. """ if cls._shell is not None: return cls._shell try: name, path = detect_shell(os.getpid()) except (RuntimeError, ShellDetectionFailure): raise RuntimeError("Unable to detect the current shell.") cls._shell = cls(name, path) return cls._shell def __repr__(self): # type: () -> str return '{}("{}", "{}")'.format(self.__class__.__name__, self._name, self._path) PK!LA1poetry/utils/toml_file.py# -*- coding: utf-8 -*- from tomlkit.toml_file import TOMLFile as BaseTOMLFile from typing import Union from ._compat import Path class TomlFile(BaseTOMLFile): def __init__(self, path): # type: (Union[str, Path]) -> None super(TomlFile, self).__init__(str(path)) self._path_ = Path(path) @property def path(self): # type: () -> Path return self._path_ def __getattr__(self, item): return getattr(self._path_, item) def __str__(self): return str(self._path) PK!qSSpoetry/vcs/__init__.pyimport subprocess import warnings from poetry.utils._compat import Path from .git import Git def get_vcs(directory): # type: (Path) -> Git directory = directory.resolve() for p in [directory] + list(directory.parents): if (p / ".git").is_dir(): try: return Git(p) except (subprocess.CalledProcessError, OSError): # Either git could not be found or does not exist warnings.warn( "git executable could not be found", category=RuntimeWarning ) return PK!>ճ. poetry/vcs/git.py# -*- coding: utf-8 -*- import re import subprocess from poetry.utils._compat import decode class GitConfig: def __init__(self, requires_git_presence=False): self._config = {} try: config_list = decode( subprocess.check_output( ["git", "config", "-l"], stderr=subprocess.STDOUT ) ) m = re.findall("(?ms)^([^=]+)=(.*?)$", config_list) if m: for group in m: self._config[group[0]] = group[1] except (subprocess.CalledProcessError, OSError): if requires_git_presence: raise def get(self, key, default=None): return self._config.get(key, default) def __getitem__(self, item): return self._config[item] class Git: def __init__(self, work_dir=None): self._config = GitConfig(requires_git_presence=True) self._work_dir = work_dir @property def config(self): # type: () -> GitConfig return self._config def clone(self, repository, dest): # type: (...) -> str return self.run("clone", repository, str(dest)) def checkout(self, rev, folder=None): # type: (...) -> str args = [] if folder is None and self._work_dir: folder = self._work_dir if folder: args += [ "--git-dir", (folder / ".git").as_posix(), "--work-tree", folder.as_posix(), ] args += ["checkout", rev] return self.run(*args) def rev_parse(self, rev, folder=None): # type: (...) -> str args = [] if folder is None and self._work_dir: folder = self._work_dir if folder: args += [ "--git-dir", (folder / ".git").as_posix(), "--work-tree", folder.as_posix(), ] args += ["rev-parse", rev] return self.run(*args) def get_ignored_files(self, folder=None): # type: (...) -> list args = [] if folder is None and self._work_dir: folder = self._work_dir if folder: args += [ "--git-dir", (folder / ".git").as_posix(), "--work-tree", folder.as_posix(), ] args += ["ls-files", "--others", "-i", "--exclude-standard"] output = self.run(*args) return output.split("\n") def run(self, *args): # type: (...) -> str return decode( subprocess.check_output(["git"] + list(args), stderr=subprocess.STDOUT) ) PK!Nt)upoetry/version/__init__.pyimport operator from typing import Union from .exceptions import InvalidVersion from .legacy_version import LegacyVersion from .version import Version OP_EQ = operator.eq OP_LT = operator.lt OP_LE = operator.le OP_GT = operator.gt OP_GE = operator.ge OP_NE = operator.ne _trans_op = { "=": OP_EQ, "==": OP_EQ, "<": OP_LT, "<=": OP_LE, ">": OP_GT, ">=": OP_GE, "!=": OP_NE, } def parse( version, strict=False # type: str # type: bool ): # type:(...) -> Union[Version, LegacyVersion] """ Parse the given version string and return either a :class:`Version` object or a LegacyVersion object depending on if the given version is a valid PEP 440 version or a legacy version. If strict=True only PEP 440 versions will be accepted. """ try: return Version(version) except InvalidVersion: if strict: raise return LegacyVersion(version) PK!bVpoetry/version/base.pyclass BaseVersion: def __hash__(self): return hash(self._key) def __lt__(self, other): return self._compare(other, lambda s, o: s < o) def __le__(self, other): return self._compare(other, lambda s, o: s <= o) def __eq__(self, other): return self._compare(other, lambda s, o: s == o) def __ge__(self, other): return self._compare(other, lambda s, o: s >= o) def __gt__(self, other): return self._compare(other, lambda s, o: s > o) def __ne__(self, other): return self._compare(other, lambda s, o: s != o) def _compare(self, other, method): if not isinstance(other, BaseVersion): return NotImplemented return method(self._key, other._key) PK!XE%,,poetry/version/exceptions.pyclass InvalidVersion(ValueError): pass PK!iipoetry/version/helpers.pyfrom poetry.semver import parse_constraint from poetry.semver import Version from poetry.semver import VersionUnion PYTHON_VERSION = [ "2.7.*", "3.0.*", "3.1.*", "3.2.*", "3.3.*", "3.4.*", "3.5.*", "3.6.*", "3.7.*", "3.8.*", ] def format_python_constraint(constraint): """ This helper will help in transforming disjunctive constraint into proper constraint. """ if isinstance(constraint, Version): if constraint.precision >= 3: return "=={}".format(str(constraint)) # Transform 3.6 or 3 if constraint.precision == 2: # 3.6 constraint = parse_constraint( "~{}.{}".format(constraint.major, constraint.minor) ) else: constraint = parse_constraint("^{}.0".format(constraint.major)) if not isinstance(constraint, VersionUnion): return str(constraint) formatted = [] accepted = [] for version in PYTHON_VERSION: version_constraint = parse_constraint(version) matches = constraint.allows_any(version_constraint) if not matches: formatted.append("!=" + version) else: accepted.append(version) # Checking lower bound low = accepted[0] formatted.insert(0, ">=" + ".".join(low.split(".")[:2])) return ", ".join(formatted) PK!Ѷ poetry/version/legacy_version.pyimport re from .base import BaseVersion class LegacyVersion(BaseVersion): def __init__(self, version): self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): return self._version def __repr__(self): return "".format(repr(str(self))) @property def public(self): return self._version @property def base_version(self): return self._version @property def local(self): return None @property def is_prerelease(self): return False @property def is_postrelease(self): return False _legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) _legacy_version_replacement_map = { "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", } def _parse_version_parts(s): for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) if not part or part == ".": continue if part[:1] in "0123456789": # pad for numeric comparison yield part.zfill(8) else: yield "*" + part # ensure that alpha/beta/candidate are before final yield "*final" def _legacy_cmpkey(version): # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, # as before all PEP 440 versions. epoch = -1 # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. parts = [] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag if part < "*final": while parts and parts[-1] == "*final-": parts.pop() # remove trailing zeros from each series of numeric parts while parts and parts[-1] == "00000000": parts.pop() parts.append(part) parts = tuple(parts) return epoch, parts PK!TyCCpoetry/version/markers.pyimport re from pyparsing import ParseException, ParseResults, stringStart, stringEnd from pyparsing import ZeroOrMore, Group, Forward, QuotedString from pyparsing import Literal as L # noqa from typing import Any from typing import Dict from typing import Iterator from typing import List class InvalidMarker(ValueError): """ An invalid marker was found, users should refer to PEP 508. """ class UndefinedComparison(ValueError): """ An invalid operation was attempted on a value that doesn't support it. """ class UndefinedEnvironmentName(ValueError): """ A name was attempted to be used that does not exist inside of the environment. """ class Node(object): def __init__(self, value): self.value = value def __str__(self): return str(self.value) def __repr__(self): return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) def serialize(self): raise NotImplementedError class Variable(Node): def serialize(self): return str(self) class Value(Node): def serialize(self): return '"{0}"'.format(self) class Op(Node): def serialize(self): return str(self) VARIABLE = ( L("implementation_version") | L("platform_python_implementation") | L("implementation_name") | L("python_full_version") | L("platform_release") | L("platform_version") | L("platform_machine") | L("platform_system") | L("python_version") | L("sys_platform") | L("os_name") | L("os.name") | L("sys.platform") # PEP-345 | L("platform.version") # PEP-345 | L("platform.machine") # PEP-345 | L("platform.python_implementation") # PEP-345 | L("python_implementation") # PEP-345 | L("extra") # undocumented setuptools legacy ) ALIASES = { "os.name": "os_name", "sys.platform": "sys_platform", "platform.version": "platform_version", "platform.machine": "platform_machine", "platform.python_implementation": "platform_python_implementation", "python_implementation": "platform_python_implementation", } VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) VERSION_CMP = ( L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") ) MARKER_OP = VERSION_CMP | L("not in") | L("in") MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) MARKER_VALUE = QuotedString("'") | QuotedString('"') MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) BOOLOP = L("and") | L("or") MARKER_VAR = VARIABLE | MARKER_VALUE MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) LPAREN = L("(").suppress() RPAREN = L(")").suppress() MARKER_EXPR = Forward() MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) MARKER = stringStart + MARKER_EXPR + stringEnd _undefined = object() def _coerce_parse_result(results): if isinstance(results, ParseResults): return [_coerce_parse_result(i) for i in results] else: return results def _format_marker(marker, first=True): assert isinstance(marker, (list, tuple, str)) # Sometimes we have a structure like [[...]] which is a single item list # where the single item is itself it's own list. In that case we want skip # the rest of this function so that we don't get extraneous () on the # outside. if ( isinstance(marker, list) and len(marker) == 1 and isinstance(marker[0], (list, tuple)) ): return _format_marker(marker[0]) if isinstance(marker, list): inner = (_format_marker(m, first=False) for m in marker) if first: return " ".join(inner) else: return "(" + " ".join(inner) + ")" elif isinstance(marker, tuple): return " ".join([m.serialize() for m in marker]) else: return marker class BaseMarker(object): def intersect(self, other): # type: (BaseMarker) -> BaseMarker raise NotImplementedError() def union(self, other): # type: (BaseMarker) -> BaseMarker raise NotImplementedError() def is_any(self): # type: () -> bool return False def is_empty(self): # type: () -> bool return False def validate(self, environment): # type: (Dict[str, Any]) -> bool raise NotImplementedError() def without_extras(self): # type: () -> BaseMarker raise NotImplementedError() def __repr__(self): return "<{} {}>".format(self.__class__.__name__, str(self)) class AnyMarker(BaseMarker): def intersect(self, other): return other def union(self, other): return self def is_any(self): return True def is_empty(self): # type: () -> bool return False def validate(self, environment): return True def without_extras(self): return self def __str__(self): return "" def __repr__(self): return "" class EmptyMarker(BaseMarker): def intersect(self, other): return self def union(self, other): return other def is_any(self): return False def is_empty(self): # type: () -> bool return True def validate(self, environment): return False def without_extras(self): return self def __str__(self): return "" def __repr__(self): return "" class SingleMarker(BaseMarker): _CONSTRAINT_RE = re.compile(r"(?i)^(~=|!=|>=?|<=?|==?|in|not in)?\s*(.+)$") def __init__(self, name, constraint): from poetry.packages.constraints import ( parse_constraint as parse_generic_constraint, ) from poetry.semver import parse_constraint self._name = name self._constraint_string = str(constraint) # Extract operator and value m = self._CONSTRAINT_RE.match(self._constraint_string) self._operator = m.group(1) if self._operator is None: self._operator = "==" self._value = m.group(2) self._parser = parse_generic_constraint if self._name == "python_version": self._parser = parse_constraint if name == "python_version": if self._operator in {"in", "not in"}: versions = [] for v in re.split("[ ,]+", self._value): split = v.split(".") if len(split) in [1, 2]: split.append("*") op = "" if self._operator == "in" else "!=" else: op = "==" if self._operator == "in" else "!=" versions.append(op + ".".join(split)) glue = ", " if self._operator == "in": glue = " || " self._constraint = self._parser(glue.join(versions)) else: self._constraint = self._parser(self._constraint_string) else: self._constraint = self._parser(self._constraint_string) @property def name(self): return self._name @property def constraint_string(self): if self._operator in {"in", "not in"}: return "{} {}".format(self._operator, self._value) return self._constraint_string @property def constraint(self): return self._constraint @property def operator(self): return self._operator @property def value(self): return self._value def intersect(self, other): if isinstance(other, SingleMarker): if other.name != self.name: return MultiMarker(self, other) if self == other: return self if self._operator in {"in", "not in"} or other.operator in {"in", "not in"}: return MultiMarker.of(self, other) new_constraint = self._constraint.intersect(other.constraint) if new_constraint.is_empty(): return EmptyMarker() if new_constraint == self._constraint or new_constraint == other.constraint: return SingleMarker(self._name, new_constraint) return MultiMarker.of(self, other) return other.intersect(self) def union(self, other): if isinstance(other, SingleMarker): if self == other: return self return MarkerUnion(self, other) return other.union(self) def validate(self, environment): if environment is None: return True if self._name not in environment: return True return self._constraint.allows(self._parser(environment[self._name])) def without_extras(self): if self.name == "extra": return EmptyMarker() return self def __eq__(self, other): if not isinstance(other, SingleMarker): return False return self._name == other.name and self._constraint == other.constraint def __hash__(self): return hash((self._name, self._constraint_string)) def __str__(self): return _format_marker( (Variable(self._name), Op(self._operator), Value(self._value)) ) def _flatten_markers( markers, flatten_class ): # type: (Iterator[BaseMarker], Any) -> List[BaseMarker] flattened = [] for marker in markers: if isinstance(marker, flatten_class): flattened += _flatten_markers(marker.markers, flatten_class) else: flattened.append(marker) return flattened class MultiMarker(BaseMarker): def __init__(self, *markers): self._markers = [] markers = _flatten_markers(markers, MultiMarker) for m in markers: self._markers.append(m) @classmethod def of(cls, *markers): new_markers = [] markers = _flatten_markers(markers, MultiMarker) for marker in markers: if marker in new_markers or marker.is_empty(): continue if isinstance(marker, SingleMarker): intersected = False for i, mark in enumerate(new_markers): if ( not isinstance(mark, SingleMarker) or isinstance(mark, SingleMarker) and mark.name != marker.name ): continue intersection = mark.constraint.intersect(marker.constraint) if intersection == mark.constraint: intersected = True break elif intersection == marker.constraint: new_markers[i] = marker intersected = True break elif intersection.is_empty(): return EmptyMarker() if intersected: continue new_markers.append(marker) if not new_markers: return EmptyMarker() return MultiMarker(*new_markers) @property def markers(self): return self._markers def intersect(self, other): if other.is_any(): return self if other.is_empty(): return other new_markers = self._markers + [other] return MultiMarker.of(*new_markers) def union(self, other): if isinstance(other, (SingleMarker, MultiMarker)): return MarkerUnion(self, other) return other.union(self) def validate(self, environment): for m in self._markers: if not m.validate(environment): return False return True def without_extras(self): new_markers = [] for m in self._markers: marker = m.without_extras() if not marker.is_empty(): new_markers.append(marker) return self.of(*new_markers) def __eq__(self, other): if not isinstance(other, MultiMarker): return False return set(self._markers) == set(other.markers) def __hash__(self): h = hash("multi") for m in self._markers: h |= hash(m) return h def __str__(self): elements = [] for m in self._markers: if isinstance(m, SingleMarker): elements.append(str(m)) elif isinstance(m, MultiMarker): elements.append(str(m)) else: elements.append("({})".format(str(m))) return " and ".join(elements) class MarkerUnion(BaseMarker): def __init__(self, *markers): self._markers = [] markers = _flatten_markers(markers, MarkerUnion) for marker in markers: if marker in self._markers: continue if isinstance(marker, SingleMarker) and marker.name == "python_version": intersected = False for i, mark in enumerate(self._markers): if ( not isinstance(mark, SingleMarker) or isinstance(mark, SingleMarker) and mark.name != marker.name ): continue intersection = mark.constraint.union(marker.constraint) if intersection == mark.constraint: intersected = True break elif intersection == marker.constraint: self._markers[i] = marker intersected = True break if intersected: continue self._markers.append(marker) @property def markers(self): return self._markers def append(self, marker): if marker in self._markers: return self._markers.append(marker) def intersect(self, other): if other.is_any(): return self if other.is_empty(): return other new_markers = [] if isinstance(other, (SingleMarker, MultiMarker)): for marker in self._markers: intersection = marker.intersect(other) if not intersection.is_empty(): new_markers.append(intersection) elif isinstance(other, MarkerUnion): for our_marker in self._markers: for their_marker in other.markers: intersection = our_marker.intersect(their_marker) if not intersection.is_empty(): new_markers.append(intersection) return MarkerUnion(*new_markers) def union(self, other): if other.is_any(): return other if other.is_empty(): return self new_markers = self._markers + [other] return MarkerUnion(*new_markers) def validate(self, environment): for m in self._markers: if m.validate(environment): return True return False def without_extras(self): new_markers = [] for m in self._markers: marker = m.without_extras() if not marker.is_empty(): new_markers.append(marker) return MarkerUnion(*new_markers) def __eq__(self, other): if not isinstance(other, MarkerUnion): return False return set(self._markers) == set(other.markers) def __hash__(self): h = hash("union") for m in self._markers: h |= hash(m) return h def __str__(self): return " or ".join(str(m) for m in self._markers) def parse_marker(marker): if marker == "": return EmptyMarker() if not marker or marker == "*": return AnyMarker() markers = _coerce_parse_result(MARKER.parseString(marker)) return _compact_markers(markers) def _compact_markers(markers): groups = [MultiMarker()] for marker in markers: if isinstance(marker, list): groups[-1] = MultiMarker.of(groups[-1], _compact_markers(marker)) elif isinstance(marker, tuple): lhs, op, rhs = marker if isinstance(lhs, Variable): name = lhs.value value = rhs.value else: value = lhs.value name = rhs.value groups[-1] = MultiMarker.of( groups[-1], SingleMarker(name, "{}{}".format(op, value)) ) else: if marker == "or": groups.append(MultiMarker()) for i, group in enumerate(reversed(groups)): if group.is_empty(): del groups[len(groups) - 1 - i] continue if isinstance(group, MultiMarker) and len(group.markers) == 1: groups[len(groups) - 1 - i] = group.markers[0] if not groups: return EmptyMarker() if len(groups) == 1: return groups[0] return MarkerUnion(*groups) PK!uU""poetry/version/requirements.py# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function import string import re try: import urllib.parse as urlparse except ImportError: from urlparse import urlparse from pyparsing import stringStart, stringEnd, originalTextFor, ParseException from pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from pyparsing import Literal as L # noqa from poetry.semver import parse_constraint from .markers import MARKER_EXPR from .markers import parse_marker LEGACY_REGEX = r""" (?P(==|!=|<=|>=|<|>)) \s* (?P [^,;\s)]* # Since this is a "legacy" specifier, and the version # string can be just about anything, we match everything # except for whitespace, a semi-colon for marker support, # a closing paren since versions can be enclosed in # them, and a comma since it's a version separator. ) """ REGEX = r""" (?P(~=|==|!=|<=|>=|<|>|===)) (?P (?: # The identity operators allow for an escape hatch that will # do an exact string match of the version you wish to install. # This will not be parsed by PEP 440 and we cannot determine # any semantic meaning from it. This operator is discouraged # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* [^\s]* # We just match everything, except for whitespace # since we are only testing for strict identity. ) | (?: # The (non)equality operators allow for wild card and local # versions to be specified so we have to define these two # operators separately to enable that. (?<===|!=) # Only match for equals and not equals \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? # You cannot use a wild card and a dev or local version # together so group them with a | and make them optional. (?: (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local | \.\* # Wild card syntax of .* )? ) | (?: # The compatible operator requires at least two digits in the # release segment. (?<=~=) # Only match for the compatible operator \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) | (?: # All other operators only allow a sub set of what the # (non)equality operators do. Specifically they do not allow # local versions to be specified nor do they allow the prefix # matching wild cards. (?".format(str(self)) PK!M۠pppoetry/version/specifiers.py# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function import abc import functools import itertools import re import sys from .legacy_version import LegacyVersion from .version import Version PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 # flake8: noqa if PY3: string_types = (str,) else: string_types = (basestring,) def with_metaclass(meta, *bases): """ Create a base class with a metaclass. """ # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, "temporary_class", (), {}) def parse(version): """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version. """ try: return Version(version) except InvalidVersion: return LegacyVersion(version) class InvalidVersion(ValueError): """ An invalid version was found, users should refer to PEP 440. """ class InvalidSpecifier(ValueError): """ An invalid specifier was found, users should refer to PEP 440. """ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __str__(self): """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. """ @abc.abstractmethod def __hash__(self): """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are equal. """ @abc.abstractmethod def __ne__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are not equal. """ @abc.abstractproperty def prereleases(self): """ Returns whether or not pre-releases as a whole are allowed by this specifier. """ @prereleases.setter def prereleases(self, value): """ Sets whether or not pre-releases as a whole are allowed by this specifier. """ @abc.abstractmethod def contains(self, item, prereleases=None): """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. """ class _IndividualSpecifier(BaseSpecifier): _operators = {} def __init__(self, spec="", prereleases=None): match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) self._spec = (match.group("operator").strip(), match.group("version").strip()) # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) def __str__(self): return "{0}{1}".format(*self._spec) def __hash__(self): return hash(self._spec) def __eq__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec == other._spec def __ne__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec != other._spec def _get_operator(self, op): return getattr(self, "_compare_{0}".format(self._operators[op])) def _coerce_version(self, version): if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def operator(self): return self._spec[0] @property def version(self): return self._spec[1] @property def prereleases(self): return self._prereleases @prereleases.setter def prereleases(self, value): self._prereleases = value def __contains__(self, item): return self.contains(item) def contains(self, item, prereleases=None): # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. if item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. return self._get_operator(self.operator)(item, self.version) def filter(self, iterable, prereleases=None): yielded = False found_prereleases = [] kw = {"prereleases": prereleases if prereleases is not None else True} # Attempt to iterate over all the values in the iterable and if any of # them match, yield them. for version in iterable: parsed_version = self._coerce_version(version) if self.contains(parsed_version, **kw): # If our version is a prerelease, and we were not set to allow # prereleases, then we'll store it for later incase nothing # else matches this specifier. if parsed_version.is_prerelease and not ( prereleases or self.prereleases ): found_prereleases.append(version) # Either this is not a prerelease, or we should have been # accepting prereleases from the beginning. else: yielded = True yield version # Now that we've iterated over everything, determine if we've yielded # any values, and if we have not and we have any prereleases stored up # then we will go ahead and yield the prereleases. if not yielded and found_prereleases: for version in found_prereleases: yield version class LegacySpecifier(_IndividualSpecifier): _regex_str = r""" (?P(==|!=|<=|>=|<|>)) \s* (?P [^,;\s)]* # Since this is a "legacy" specifier, and the version # string can be just about anything, we match everything # except for whitespace, a semi-colon for marker support, # a closing paren since versions can be enclosed in # them, and a comma since it's a version separator. ) """ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) _operators = { "==": "equal", "!=": "not_equal", "<=": "less_than_equal", ">=": "greater_than_equal", "<": "less_than", ">": "greater_than", } def _coerce_version(self, version): if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): return prospective > self._coerce_version(spec) def _require_version_compare(fn): @functools.wraps(fn) def wrapped(self, prospective, spec): if not isinstance(prospective, Version): return False return fn(self, prospective, spec) return wrapped class Specifier(_IndividualSpecifier): _regex_str = r""" (?P(~=|==|!=|<=|>=|<|>|===)) (?P (?: # The identity operators allow for an escape hatch that will # do an exact string match of the version you wish to install. # This will not be parsed by PEP 440 and we cannot determine # any semantic meaning from it. This operator is discouraged # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* [^\s]* # We just match everything, except for whitespace # since we are only testing for strict identity. ) | (?: # The (non)equality operators allow for wild card and local # versions to be specified so we have to define these two # operators separately to enable that. (?<===|!=) # Only match for equals and not equals \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? # You cannot use a wild card and a dev or local version # together so group them with a | and make them optional. (?: (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local | \.\* # Wild card syntax of .* )? ) | (?: # The compatible operator requires at least two digits in the # release segment. (?<=~=) # Only match for the compatible operator \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) | (?: # All other operators only allow a sub set of what the # (non)equality operators do. Specifically they do not allow # local versions to be specified nor do they allow the prefix # matching wild cards. (?=": "greater_than_equal", "<": "less_than", ">": "greater_than", "===": "arbitrary", } @_require_version_compare def _compare_compatible(self, prospective, spec): # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of # implementing it ourselves. The only thing we need to do is construct # the other specifiers. # We want everything but the last item in the version, but we want to # ignore post and dev releases and we want to treat the pre-release as # it's own separate segment. prefix = ".".join( list( itertools.takewhile( lambda x: (not x.startswith("post") and not x.startswith("dev")), _version_split(spec), ) )[:-1] ) # Add the prefix notation to the end of our string prefix += ".*" return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( prospective, prefix ) @_require_version_compare def _compare_equal(self, prospective, spec): # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. prospective = prospective[: len(spec)] # Pad out our two sides with zeros so that they both equal the same # length. spec, prospective = _pad_version(spec, prospective) else: # Convert our spec string into a Version spec = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. if not spec.local: prospective = Version(prospective.public) return prospective == spec @_require_version_compare def _compare_not_equal(self, prospective, spec): return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): return prospective <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): return prospective >= Version(spec) @_require_version_compare def _compare_less_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective < spec: return False # This special case is here so that, unless the specifier itself # includes is a pre-release version, that we do not accept pre-release # versions for the version mentioned in the specifier (e.g. <3.1 should # not match 3.1.dev0, but should match 3.0.dev0). if not spec.is_prerelease and prospective.is_prerelease: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # less than the spec version *and* it's not a pre-release of the same # version in the spec. return True @_require_version_compare def _compare_greater_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective > spec: return False # This special case is here so that, unless the specifier itself # includes is a post-release version, that we do not accept # post-release versions for the version mentioned in the specifier # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). if not spec.is_postrelease and prospective.is_postrelease: if Version(prospective.base_version) == Version(spec.base_version): return False # Ensure that we do not allow a local version of the version mentioned # in the specifier, which is technically greater than, to match. if prospective.local is not None: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # greater than the spec version *and* it's not a pre-release of the # same version in the spec. return True def _compare_arbitrary(self, prospective, spec): return str(prospective).lower() == str(spec).lower() @property def prereleases(self): # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: return self._prereleases # Look at all of our specifiers and determine if they are inclusive # operators, and if they are if they are including an explicit # prerelease. operator, version = self._spec if operator in ["==", ">=", "<=", "~=", "==="]: # The == specifier can include a trailing .*, if it does we # want to remove before parsing. if operator == "==" and version.endswith(".*"): version = version[:-2] # Parse the version, and if it is a pre-release than this # specifier allows pre-releases. if parse(version).is_prerelease: return True return False @prereleases.setter def prereleases(self, value): self._prereleases = value _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") def _version_split(version): result = [] for item in version.split("."): match = _prefix_regex.search(item) if match: result.extend(match.groups()) else: result.append(item) return result def _pad_version(left, right): left_split, right_split = [], [] # Get the release segment of our versions left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) # Get the rest of our versions left_split.append(left[len(left_split[0]) :]) right_split.append(right[len(right_split[0]) :]) # Insert our padding left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): # Split on , to break each indidivual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() for specifier in specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: parsed.add(LegacySpecifier(specifier)) # Turn our parsed specifiers into a frozen set and save them for later. self._specs = frozenset(parsed) # Store our prereleases value so we can use it later to determine if # we accept prereleases or not. self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "".format(str(self), pre) def __str__(self): return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): return hash(self._specs) def __and__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): return NotImplemented specifier = SpecifierSet() specifier._specs = frozenset(self._specs | other._specs) if self._prereleases is None and other._prereleases is not None: specifier._prereleases = other._prereleases elif self._prereleases is not None and other._prereleases is None: specifier._prereleases = self._prereleases elif self._prereleases == other._prereleases: specifier._prereleases = self._prereleases else: raise ValueError( "Cannot combine SpecifierSets with True and False prerelease " "overrides." ) return specifier def __eq__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs == other._specs def __ne__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs != other._specs def __len__(self): return len(self._specs) def __iter__(self): return iter(self._specs) @property def prereleases(self): # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: return self._prereleases # If we don't have any specifiers, and we don't have a forced value, # then we'll just return None since we don't know if this should have # pre-releases or not. if not self._specs: return None # Otherwise we'll see if any of the given specifiers accept # prereleases, if any of them do we'll return True, otherwise False. return any(s.prereleases for s in self._specs) @prereleases.setter def prereleases(self, value): self._prereleases = value def __contains__(self, item): return self.contains(item) def contains(self, item, prereleases=None): # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # We can determine if we're going to allow pre-releases by looking to # see if any of the underlying items supports them. If none of them do # and this item is a pre-release then we do not allow it and we can # short circuit that here. # Note: This means that 1.0.dev1 would not be contained in something # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 if not prereleases and item.is_prerelease: return False # We simply dispatch to the underlying specs here to make sure that the # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers # will always return True, this is an explicit design decision. return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter(self, iterable, prereleases=None): # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # If we have any specifiers, then we want to wrap our iterable in the # filter method for each one, this will act as a logical AND amongst # each specifier. if self._specs: for spec in self._specs: iterable = spec.filter(iterable, prereleases=bool(prereleases)) return iterable # If we do not have any specifiers, then we need to have a rough filter # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: filtered = [] found_prereleases = [] for item in iterable: # Ensure that we some kind of Version class for this item. if not isinstance(item, (LegacyVersion, Version)): parsed_version = parse(item) else: parsed_version = item # Filter out any item which is parsed as a LegacyVersion if isinstance(parsed_version, LegacyVersion): continue # Store any item which is a pre-release for later unless we've # already found a final version or we are accepting prereleases if parsed_version.is_prerelease and not prereleases: if not filtered: found_prereleases.append(item) else: filtered.append(item) # If we've found no items except for pre-releases, then we'll go # ahead and use the pre-releases if not filtered and found_prereleases and prereleases is None: return found_prereleases return filtered PK!i|poetry/version/utils.pyclass Infinity(object): def __repr__(self): return "Infinity" def __hash__(self): return hash(repr(self)) def __lt__(self, other): return False def __le__(self, other): return False def __eq__(self, other): return isinstance(other, self.__class__) def __ne__(self, other): return not isinstance(other, self.__class__) def __gt__(self, other): return True def __ge__(self, other): return True def __neg__(self): return NegativeInfinity Infinity = Infinity() class NegativeInfinity(object): def __repr__(self): return "-Infinity" def __hash__(self): return hash(repr(self)) def __lt__(self, other): return True def __le__(self, other): return True def __eq__(self, other): return isinstance(other, self.__class__) def __ne__(self, other): return not isinstance(other, self.__class__) def __gt__(self, other): return False def __ge__(self, other): return False def __neg__(self): return Infinity NegativeInfinity = NegativeInfinity() PK!poetry/version/version.pyimport re from collections import namedtuple from itertools import dropwhile from .base import BaseVersion from .exceptions import InvalidVersion from .utils import Infinity _Version = namedtuple("_Version", ["epoch", "release", "dev", "pre", "post", "local"]) VERSION_PATTERN = re.compile( r""" ^ v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
            [-_.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_.]?
                (?Ppost|rev|r)
                [-_.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_.]?
            (?Pdev)
            [-_.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?       # local version
    $
""",
    re.IGNORECASE | re.VERBOSE,
)


class Version(BaseVersion):
    def __init__(self, version):
        # Validate the version and parse it into pieces
        match = VERSION_PATTERN.match(version)
        if not match:
            raise InvalidVersion("Invalid version: '{0}'".format(version))

        # Store the parsed out pieces of the version
        self._version = _Version(
            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
            release=tuple(int(i) for i in match.group("release").split(".")),
            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
            post=_parse_letter_version(
                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
            ),
            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
            local=_parse_local_version(match.group("local")),
        )

        # Generate a key which will be used for sorting
        self._key = _cmpkey(
            self._version.epoch,
            self._version.release,
            self._version.pre,
            self._version.post,
            self._version.dev,
            self._version.local,
        )

    def __repr__(self):
        return "".format(repr(str(self)))

    def __str__(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        # Pre-release
        if self._version.pre is not None:
            parts.append("".join(str(x) for x in self._version.pre))

        # Post-release
        if self._version.post is not None:
            parts.append(".post{0}".format(self._version.post[1]))

        # Development release
        if self._version.dev is not None:
            parts.append(".dev{0}".format(self._version.dev[1]))

        # Local version segment
        if self._version.local is not None:
            parts.append("+{0}".format(".".join(str(x) for x in self._version.local)))

        return "".join(parts)

    @property
    def public(self):
        return str(self).split("+", 1)[0]

    @property
    def base_version(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        return "".join(parts)

    @property
    def local(self):
        version_string = str(self)
        if "+" in version_string:
            return version_string.split("+", 1)[1]

    @property
    def is_prerelease(self):
        return bool(self._version.dev or self._version.pre)

    @property
    def is_postrelease(self):
        return bool(self._version.post)


def _parse_letter_version(letter, number):
    if letter:
        # We consider there to be an implicit 0 in a pre-release if there is
        # not a numeral associated with it.
        if number is None:
            number = 0

        # We normalize any letters to their lower case form
        letter = letter.lower()

        # We consider some words to be alternate spellings of other words and
        # in those cases we want to normalize the spellings to our preferred
        # spelling.
        if letter == "alpha":
            letter = "a"
        elif letter == "beta":
            letter = "b"
        elif letter in ["c", "pre", "preview"]:
            letter = "rc"
        elif letter in ["rev", "r"]:
            letter = "post"

        return letter, int(number)
    if not letter and number:
        # We assume if we are given a number, but we are not given a letter
        # then this is using the implicit post release syntax (e.g. 1.0-1)
        letter = "post"

        return letter, int(number)


_local_version_seperators = re.compile(r"[._-]")


def _parse_local_version(local):
    """
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    """
    if local is not None:
        return tuple(
            part.lower() if not part.isdigit() else int(part)
            for part in _local_version_seperators.split(local)
        )


def _cmpkey(epoch, release, pre, post, dev, local):
    # When we compare a release version, we want to compare it with all of the
    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    # leading zeros until we come to something non zero, then take the rest
    # re-reverse it back into the correct order and make it a tuple and use
    # that for our sorting key.
    release = tuple(reversed(list(dropwhile(lambda x: x == 0, reversed(release)))))

    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    # We'll do this by abusing the pre segment, but we _only_ want to do this
    # if there is not a pre or a post segment. If we have one of those then
    # the normal sorting rules will handle this case correctly.
    if pre is None and post is None and dev is not None:
        pre = -Infinity

    # Versions without a pre-release (except as noted above) should sort after
    # those with one.
    elif pre is None:
        pre = Infinity

    # Versions without a post segment should sort before those with one.
    if post is None:
        post = -Infinity

    # Versions without a development segment should sort after those with one.
    if dev is None:
        dev = Infinity

    if local is None:
        # Versions without a local segment should sort before those with one.
        local = -Infinity
    else:
        # Versions with a local segment need that segment parsed to implement
        # the sorting rules in PEP440.
        # - Alpha numeric segments sort before numeric segments
        # - Alpha numeric segments sort lexicographically
        # - Numeric segments sort numerically
        # - Shorter versions sort before longer versions when the prefixes
        #   match exactly
        local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)

    return epoch, release, pre, post, dev, local
PK!-ww	w	"poetry/version/version_selector.pyfrom typing import Union

from poetry.packages import Dependency
from poetry.packages import Package
from poetry.semver import parse_constraint
from poetry.semver import Version


class VersionSelector(object):
    def __init__(self, pool):
        self._pool = pool

    def find_best_candidate(
        self,
        package_name,  # type: str
        target_package_version=None,  # type:  Union[str, None]
        allow_prereleases=False,  # type: bool
    ):  # type: (...) -> Union[Package, bool]
        """
        Given a package name and optional version,
        returns the latest Package that matches
        """
        if target_package_version:
            constraint = parse_constraint(target_package_version)
        else:
            constraint = parse_constraint("*")

        candidates = self._pool.find_packages(
            package_name, constraint, allow_prereleases=allow_prereleases
        )

        if not candidates:
            return False

        dependency = Dependency(package_name, constraint)

        # Select highest version if we have many
        package = candidates[0]
        for candidate in candidates:
            if candidate.is_prerelease() and not dependency.allows_prereleases():
                continue

            # Select highest version of the two
            if package.version < candidate.version:
                package = candidate

        return package

    def find_recommended_require_version(self, package):
        version = package.version

        return self._transform_version(version.text, package.pretty_version)

    def _transform_version(self, version, pretty_version):
        # attempt to transform 2.1.1 to 2.1
        # this allows you to upgrade through minor versions
        try:
            parsed = Version.parse(version)
            parts = [parsed.major, parsed.minor, parsed.patch]
        except ValueError:
            return pretty_version

        # check to see if we have a semver-looking version
        if len(parts) == 3:
            # remove the last parts (the patch version number and any extra)
            if parts[0] != 0:
                del parts[2]

            version = ".".join(str(p) for p in parts)
            if parsed.is_prerelease():
                version += "-{}".format(".".join(str(p) for p in parsed.prerelease))
        else:
            return pretty_version

        return "^{}".format(version)
PK!Hi&.+poetry-0.12.11a0.dist-info/entry_points.txtN+I/N.,()*O-)PzPi<..PK!5&&"poetry-0.12.11a0.dist-info/LICENSECopyright (c) 2018 Sébastien Eustace

Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:

The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
PK!H4VY poetry-0.12.11a0.dist-info/WHEELA
 н#V;/"dF&[xjxdڠwsތ`hA7HQt[YֻbaPf^PK!H
f]#poetry-0.12.11a0.dist-info/METADATA\FvhR,Ih#Kz*$Sc$< eFT<*/țIs
%\ӧΥB,_tY%y6V'qucU.ׁ{sqt\4UT^晚Bg3khW:UT"x1JG:WzTX-뺨Ƈ2Vb&aONã@É}8
'iTU<[ꛋs(F7Vb_w,EVDQh@oȋ>[?-tO?Ewݯ"E>oRFy/$ZVy2-2ѕ7IurӤ*⥎.r3>SL6kz'TtFs)xUt3o%P]b{Us#\>{r~ҫ6_̂zUy
8jk.zO)M8<>sE^bwyM`?`
\~%o>6U;deYE{XMRmM!nJ#3kж8	rn@dص&ō^aoOo"mT4[3ӎE;%3l|bulKrT,bYe"MTRHςy&L=RUkJM;b3[^/
}QA	
Z˙ԗڑrMQu\vZzd]ՏF8A8E,a&#`q4fI"긩5,}S )ֹ#lQ@ZT#*'b$vBlLbܒHte*l%&TU-kBtLm.)")&( Xdjh׭U=͸eoIH
ܭnxˣ2j.hy@2|-`,")EЉXdbJUɪ ֩0p:~nޑvƗ_0/2
U#Qm
m#mG5JG=ցϡ_N䤭dx{6n±339V2(8ٴ$Kj#e635е#J4QLyKni;%CS캜ӷCuAYTΒwz֢P㬙9,tQ?ǟpdX8ΠR;khE0[U]6HIMAs&X$z~[S_>x&لbc_6IA7.L*ȢV0LV둡^`!a)G{5-2u9/_ b\@´mߝz{\@eьyճO_<WJ݁!v	.V^5h"`KHrY~pڸ,iiwSFc>C__Y|~"-	)
S˪K9Sm
"'7n
AX
U3oa
M5Zګ ˤ^+72d*Nuζi;!&HslxT@F~Wg.s3Ts{^QefZFp?#=ta6}!ʚYm-8NsxnF[L@8&nmaUs+t8V7WQAC'IBN8?`Ru[>AfD1G$b?q̫ q8XbЖptҁٲ8mHc~1cj]a_ZTMp/O.TȨ4Fc)hE^41t,@o
jqA4WƏ?т' IgA,iQ$df1PTȂg(p'\5iJ(
Cd>7	11X&"=;!}nČ-&&)y|G) E,ȇek3>ΰ5iaVEpcFӆ5XnR-
fc``o#@oKl1M%c
&;5Mf=bTx,4#1@YMQFfd8<`_"0:M4i\ꜭ-ZyɛtR;E4sbb&bqKw6ța@P.AȌ[r,XKM	MDqs~_~r@o dSsښ5[@]g.L
l}DCr+DZkѣi)K̗&-!fdoمhök8' 4npi纾[1/	#2	9+*C4M3
X@;z)K>!D{04`bZ~rv>~Ba8	Q
d|DsM%)_Smm$jc.nZncc!KΨ<?*<+	 @zS}F@V=Ű$-mXE7Q+k@YNZUM~3lb^6{`#NҳhHbe0Fw` zlEbc~0q_}?%$#Yhw})}y	BOH9o4fb]!Zaڍ߸-GRzIiAH`ݨƁ8*/}ӫ$PDE}GwC*~pKMf󶚯(95r
--.jjjhyݕYpiE(bl8W~nO+ݮ ~cɢ]jRvQI#G&&%C_20w׭mdۆiLS1!GCͰF膡k̍:'FRP$X+zɗsԔHN/JmI1^J'e%qm)SbB
`G24AKbfLƆN&Zdl?|)6d~9'Z3Cij\XәےLL-l[Wp9WHd]zF_J5i5~3'}ô-"OU^]Q5k+FW)ZӮWј@LmZȤyo~e|EӀ	 cU3"!!g?y6{]sƷo2D'f66ZoCr3lgy9FdnZ#1],F˳9NI"0Q' ޵($ݽJȈpFw
 ]˹-WR
Q\y`9KݧIߠÿI6t֟czvR
Pi\
@
uՙ䙴=wÁ~8t8Tu-^H{ֆIf5#icT-E@70۞uh.Q.ߝ^XuwXrr6m~MBA:80Mg=ҨΏXؑѹy8
1'j*mt.JBK
e\eqd<_c@zOjy/=Lfo:>*r7pU53r/HXFS͈fI?:	㋋ѓneݟڟ~43O0T
KcK^mxF`Ӗ eX2kW^zhK>|>QA$[hkE_^|8:S/^1x*U1{c	y8;nnȆkijX	[=K0cB}hm<8>w!&vlنoj&n
IbpًO}j[=\
kw۝V9nvg_;Cz#xsw&W1Qb|#vl2q"LCaUK+wQ4rRzڽ/.`bY9K$|FF2i͸TUbӍ}+p=vt82ͨj$eNW$y(9ZЖloXIbK@W?l3{g_D9y/3:<88<PEaw%gg	U	@B|[wY57i_^L]\LZHV$IbfCT۸c]+>tǤ1ְ7(cݷ`#tOK|ͮ{{uȕ}vy7 k
1\"wt۽Q-/'mq!\m-%bD{}B62Vaup 6/|[FIw M!qQ-
_ȦbOgw,i*!/jd`Ǣ̛չǂ|.=dFg&\J L^7B;	}hB$uv6NS?4ğv{N
>1A+ܛl1hx8
N$Ͷ]{Peks`UJ[
n]pMVPv
r?Ƥ1fh׺rV^(ĹKF)VÆ/3޸mW5im~}xWe.(HX$\O|y/ڝnm^'tdv8Ƀm'RНj%=o۾qxq=hW}\WnP9
;
/TjsiQT,	O
+x$D
NZx#\ĕKWa(L.v!whT'tfcP2(Ukn΃.beЬ:)xtm=6?;g?9Kv̇΅wm5ה'r(Uۍ΀εSd5-dYY;VRM	LRDnEO=iƟdZ	mnmэBWV@|,Qd%)ڼ3R|ۻȮDg`n2Lms+	m2k*06D6u.4/?~~߱Kq=D•G?51mºhŕh.oI
{t#9'v7X~>SS[3hʱ3ܸCx܇nF.cM0^t9v6Wcbrn'5vv*XF[C٭( M}Am˰VIѣ/$X:n5ɗ*tYOԭmKRo^p{&ϴm-_	aZ`ƭNwYoinѕ`ђ5-*
q^,ރOYp.g%_~/dnWk=-6vj)]DTR`3`ㅿWr+R,PD4:fKo>%£:|E.:Ԃ.U^ƾKuҷ@s:;סBR`DY8~5l'u_"ѬJyT(ׂDU[;ܛB^De|،y<WZKbđy6?:tM.Zl+@rCnPK!H2	7!poetry-0.12.11a0.dist-info/RECORDGwL緈@ "ZYn`uKY1Y_Hkgr /kTf1yj%Ii[w
ki1=b=X".`QᲃIxm^!H*3ǕID	=$;
*plr]H1R"|,tUȃ0ǾR&_헿*
VyIʭWJ1";w8/<=q'^=B=Lpՠo#9];Nn&WEqDt:v|KpIZFtc*	LDwDE'8Fk^v
 E0Y/pgnFK}ԝةĴ#_AGML|(܂ѹ(C9~T%4Hbgu7$יg\V(ϓ|Afe30ƬХ&$qPjӜ+7'W%8AϨP?^,g-I eMW}ͣT
Q#`cAwR?2g'7C%&1;y#\X
pz>Gh=o|p1ϲr)Q8PtW [ícϡ2ɧd@
פ- ʃXŰ(XU.;9ZG޾P$2>Mi򩈲w/U{GXQpNЊ'YB|uG'`q /pL?$ʮ,NMEÖeal9ɈHC3W\e~)QŠwbU䡎0-ez_^n{(4̝
)P(Dpe4*Eۡj۷Cv:>=U7xOyNXHH<RJ9W1wX\UAܬr
WgsMr9S:*Hcmbxb
Cv۶}tގ#ѻ5LX
NroAP`t5)u|=htc5#9qUK(t"%گX
wßp~k	cׁ{3iqVGӳԪ=VaokQ]U5/sɱk[)~j8咥m1/J/~>k=Gu6
tlMFoY˚┹;ľ!zNNkdHwJT;:+924@;¾-GóWmnd?jks-֞+ zï"hydD"hkHL#pٛθm^ϖ-6'sY㉤Nb'Qq|&BXîx-VHJKi!B-+L!M4P6oA>_İ5L7	TTonzzd
qlM0.mJû_aN؇4-Z8e9{
87[?oM͞d#f{@B
UC!97/.2eJJ>g "3eFCM?ˡsc)z~fq1>Gwn)sI9	,'R	7E%OgvSW0
Ys3feQ/Elmfh©:ML#.M^R*yݜUa'$:㑠nHYBJ"Mڌm_;[y<@pbfO 
yJ1ߓHqY,`}ŝl')]8LEX[/6}WmK;j]Ja8R'*[K;
Gȓq-Tc7E~Ibkx}IՋC4H߄dwr2%pJ9F;9;z	5U!wΠe]~,W
Dq/snݤ}%C(':_Ac.iQiy9u1h^+qc._MsN!:iX791y͇^EH=Dߕ^3;^okA[G8bx"ꗫ$'CLe@Ht^R߹8v_nUPM6U^.`խ~N2s
:(;΂kOL-C+$u7`úx~y9<2q$Ni޳m5jBgoa\ނx@{.E"uoc_-JO-Х
&mys\ǦH	Zu-%݀øI-Uͧ:&1r@tגJSؖ&'ى)G>.0]tI'Mlj!4깅ٍ/:]ŜoْQ
j.V;th4j=TT>]&npu!wz1$kr|xxFkDsS
BN^Kv;5 )o1w0~O!iQ%ѭ2tcKI%0\p7;ƭbcws|J0S G˲O^^m[JCĿW<dRE(b:sIԶI!忂-jj0B"T
gL6$KsS}W߭
upk!{VOb}܄5O4ߖ
 +V֙`0-wlU4WWz;IIWlT&6WB[L(6uiđUWAaFah|zU19Fyڼ5]LIGrtoMaGV:ˎL,YȎy aU
gpS@}Uw͏&'B$
	=x=o!eِ{p|un^#FG& Yֺf3|M@d/wGGrfhZooҦy`4%ASf"xV3kwGnK]B~)YBͺآ[L.g([n.G=e{J'n[s:_6}g
|fݭBطA4#=dgho_;;zgUSUq-k1Mzy!Z?_o_skh (1bM'W1}d7=2
h>^Ao9V|BxdE˽
	
uYUa@&S%鎟:z}h.sG>{X`k ~w!ڱcZb3)m_nuɡ]X߳ܖQy{TD
tݧnPN渹UBKAu}P?3׮xL@LO|S)޴@ZH;Ke%^K|q=q7e68J8HW 0iל!ȇ87=b 
&%v.b/UDPW
se[*pr>	B_hz $KICO^\wK""z<-=wrL6<~21ԷbhEĂt5dUSſpo#Q^ad
.f#\td)>5Wk~.k(x#߃2M֖9~~ڵVG{/wҞghUj遈{7pBW~Z@ݑ(zOxX1FˁItb*06ceZ{ujBЏ|]|8$WN1IN_*jacA}TT	躊sgWoH6"!^xwaԱ½]4l6JqPw>M,jڠJ8KRfë{nDR+w}_?y⽪cdMXU[ݙ/k^٬a 	?Hܓ@̦tϏB[\;ɁrШ-Ȼ;	hGO4KGHS1b܂=.`k/d
]Z^LX^3M
f缑tMJpMw_xIs{V-{:#JHCR5xfݳQ+
N%p!Of?q`%a{-02Nf|<"{C-p.q/G:|jU.c9SE
e^k߼1n>_nz&͂t=rr
en˾]`-An{{B}__dmlt]o]D8M>o/mm@Ŷ
h'˝Ei\hg}`
 ﷚>Aa77r3+шTT@ UR4=ݺѲOOKE3<>Th2ZO|ar>I$W{d'	V>T5Yh
^1BTuJ|SO4xᨸ@/
~:`no/4.$Yi^F?[AA:h,,SsQ@hFvj#di՗+<*6RSv+y3Cd|LU\Zs_<:ZIjw"*¾mVfW݀	`RC2&Iu#16]A 7s1$o]uQH)m\!Rhhi7pF!̘Ih쒨H/ȨsJ@^'L:S9NfBAkLU/Q#X
ɌhU0uM!Mۋz
o*¿d '>79F9qDʵIk<켮M^
+Worz{ 9{{J?A׾͝%#\cefm6
2'4u=YrIsʙ&o5Iݽ	"4=8eܦvƱPmz9Nz78m8&=TZ9q$ff#t4poetry/masonry/builders/__init__.pyPK!=ɯcc"5poetry/masonry/builders/builder.pyPK!W[RYY#Mpoetry/masonry/builders/complete.pyPK!]q=@1@1 XRpoetry/masonry/builders/sdist.pyPK!e{.. փpoetry/masonry/builders/wheel.pyPK!H/
/
ղpoetry/masonry/metadata.pyPK!5k!!%<poetry/masonry/publishing/__init__.pyPK!]%#
#
&poetry/masonry/publishing/publisher.pyPK!t!!%poetry/masonry/publishing/uploader.pyPK! 
poetry/masonry/utils/__init__.pyPK!ƨC88Kpoetry/masonry/utils/helpers.pyPK!ҭpoetry/masonry/utils/include.pyPK!Q

poetry/masonry/utils/module.pyPK!ܿ??'poetry/masonry/utils/package_include.pyPK!b]""wpoetry/masonry/utils/tags.pyPK!lapoetry/mixology/__init__.pyPK!Jpoetry/mixology/assignment.pyPK!uE''!poetry/mixology/failure.pyPK!1>1>"Ipoetry/mixology/incompatibility.pyPK!cXbb(poetry/mixology/incompatibility_cause.pyPK!m0#+poetry/mixology/partial_solution.pyPK!⟶ee3poetry/mixology/result.pyPK!:m	ϭpoetry/mixology/set_relation.pyPK!`poetry/mixology/term.pyPK!|CEE!npoetry/mixology/version_solver.pyPK!ͪӲIpoetry/packages/__init__.pyPK!"'cc'poetry/packages/constraints/__init__.pyPK!i-#poetry/packages/constraints/any_constraint.pyPK!Kynn.&poetry/packages/constraints/base_constraint.pyPK!
f^))poetry/packages/constraints/constraint.pyPK!U>/d6poetry/packages/constraints/empty_constraint.pyPK!Pe		/8poetry/packages/constraints/multi_constraint.pyPK!q/Bpoetry/packages/constraints/union_constraint.pyPK!QFF2Ppoetry/packages/constraints/wildcard_constraint.pyPK!?&&Xpoetry/packages/dependency.pyPK!%	poetry/packages/dependency_package.pyPK!8'Opoetry/packages/directory_dependency.pyPK!r44"poetry/packages/file_dependency.pyPK!ˉ5jjpoetry/packages/locker.pyPK!1-t,t,poetry/packages/package.pyPK!}33%Tpoetry/packages/package_collection.pyPK!v"poetry/packages/project_package.pyPK!!poetry/packages/utils/__init__.pyPK!1]poetry/packages/utils/link.pyPK!h&poetry/packages/utils/utils.pyPK!ٺg!Hpoetry/packages/vcs_dependency.pyPK!+""hpoetry/poetry.pyPK!-;}:poetry/puzzle/__init__.pyPK!#:poetry/puzzle/dependencies.pyPK!EOG)>poetry/puzzle/exceptions.pyPK!,MYY$@poetry/puzzle/operations/__init__.pyPK!h9ii#PApoetry/puzzle/operations/install.pyPK!Tk++%Cpoetry/puzzle/operations/operation.pyPK!^Ntt%hHpoetry/puzzle/operations/uninstall.pyPK!Y%{ZZ"Kpoetry/puzzle/operations/update.pyPK!bbOpoetry/puzzle/provider.pyPK!2		-	-poetry/puzzle/solver.pyPK!y::'poetry/repositories/__init__.pyPK!R%%poetry/repositories/auth.pyPK!
uKK&poetry/repositories/base_repository.pyPK!QtZZ!poetry/repositories/exceptions.pyPK!WVRR+$poetry/repositories/installed_repository.pyPK!]..(poetry/repositories/legacy_repository.pyPK!^:		poetry/repositories/pool.pyPK!EzzsXsX&$poetry/repositories/pypi_repository.pyPK!m(#
#
!|poetry/repositories/repository.pyPK!6%poetry/semver/__init__.pyPK!Du-22!poetry/semver/empty_constraint.pyPK!<څ..poetry/semver/exceptions.pyPK!^[poetry/semver/patterns.pyPK!H^i//poetry/semver/version.pyPK!#OO#%poetry/semver/version_constraint.pyPK!Ì44poetry/semver/version_range.pyPK!gLpoetry/semver/version_union.pyPK!A
*poetry/spdx/__init__.pyPK!=|uu-poetry/spdx/data/licenses.jsonPK!japoetry/spdx/license.pyPK!XI.poetry/spdx/updater.pyPK!poetry/utils/__init__.pyPK!oԓo	o	poetry/utils/_compat.pyPK!O͂G"G"poetry/utils/appdirs.pyPK!Y@Y@<poetry/utils/env.pyPK!3U^	^	*poetry/utils/helpers.pyPK!|Y4poetry/utils/patterns.pyPK!phy/y/g5poetry/utils/setup_reader.pyPK!
m~epoetry/utils/shell.pyPK!LA1Lipoetry/utils/toml_file.pyPK!qSSkpoetry/vcs/__init__.pyPK!>ճ.

npoetry/vcs/git.pyPK!Nt)uxpoetry/version/__init__.pyPK!bV|poetry/version/base.pyPK!XE%,,poetry/version/exceptions.pyPK!iiQpoetry/version/helpers.pyPK!Ѷ poetry/version/legacy_version.pyPK!TyCCpoetry/version/markers.pyPK!uU""poetry/version/requirements.pyPK!M۠pppoetry/version/specifiers.pyPK!i||f	poetry/version/utils.pyPK!Bk	poetry/version/version.pyPK!-ww	w	"j	poetry/version/version_selector.pyPK!Hi&.+!	poetry-0.12.11a0.dist-info/entry_points.txtPK!5&&"	poetry-0.12.11a0.dist-info/LICENSEPK!H4VY 	poetry-0.12.11a0.dist-info/WHEELPK!H
f]#	poetry-0.12.11a0.dist-info/METADATAPK!H2	7!÷	poetry-0.12.11a0.dist-info/RECORDPK0