From b41326dbe197d404e82a0850f622d32d6196c3b7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 7 Dec 2022 00:10:45 +0000 Subject: [PATCH] Rename `marker_expr` to `marker` This is better aligned with the naming from PEP 508. --- packaging/_parser.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packaging/_parser.py b/packaging/_parser.py index d13965d9..103f679a 100644 --- a/packaging/_parser.py +++ b/packaging/_parser.py @@ -147,7 +147,7 @@ def _parse_requirement_marker( else: tokenizer.read() - marker = _parse_marker_expr(tokenizer) + marker = _parse_marker(tokenizer) tokenizer.consume("WS") return marker @@ -234,12 +234,12 @@ def _parse_version_many(tokenizer: Tokenizer) -> str: # Recursive descent parser for marker expression # -------------------------------------------------------------------------------------- def parse_marker(source: str) -> MarkerList: - return _parse_marker_expr(Tokenizer(source, rules=DEFAULT_RULES)) + return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES)) -def _parse_marker_expr(tokenizer: Tokenizer) -> MarkerList: +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: """ - marker_expr = marker_atom (BOOLOP marker_atom)+ + marker = marker_atom (BOOLOP marker_atom)+ """ expression = [_parse_marker_atom(tokenizer)] while tokenizer.check("BOOLOP"): @@ -251,7 +251,7 @@ def _parse_marker_expr(tokenizer: Tokenizer) -> MarkerList: def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: """ - marker_atom = WS? LEFT_PARENTHESIS WS? marker_expr WS? RIGHT_PARENTHESIS WS? + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? | WS? marker_item WS? """ @@ -259,7 +259,7 @@ def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: if tokenizer.check("LEFT_PARENTHESIS", peek=True): with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"): tokenizer.consume("WS") - marker: MarkerAtom = _parse_marker_expr(tokenizer) + marker: MarkerAtom = _parse_marker(tokenizer) tokenizer.consume("WS") else: marker = _parse_marker_item(tokenizer)