Skip to content

Commit

Permalink
Replace TokenParser with shlex.split
Browse files Browse the repository at this point in the history
  • Loading branch information
Secrus committed Nov 24, 2024
1 parent 8496b5d commit c1ef304
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 164 deletions.
1 change: 1 addition & 0 deletions news/453.removal.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Remove `TokenParser` and replace it with `helpers.tokenize` that maps directly to `shlex.split`.
9 changes: 9 additions & 0 deletions src/cleo/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,12 @@ def option(
description=description,
default=default,
)


def tokenize(string: str) -> list[str]: # pragma: no cover
"""
Split the string s using shell-like syntax. Maps directly to using `shlex.split`
"""
import shlex

return shlex.split(string)
4 changes: 2 additions & 2 deletions src/cleo/io/inputs/string_input.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from cleo.helpers import tokenize
from cleo.io.inputs.argv_input import ArgvInput
from cleo.io.inputs.token_parser import TokenParser


class StringInput(ArgvInput):
Expand All @@ -15,4 +15,4 @@ def __init__(self, input: str) -> None:
self._set_tokens(self._tokenize(input))

def _tokenize(self, input: str) -> list[str]:
return TokenParser().parse(input)
return tokenize(input)
112 changes: 0 additions & 112 deletions src/cleo/io/inputs/token_parser.py

This file was deleted.

50 changes: 0 additions & 50 deletions tests/io/inputs/test_token_parser.py

This file was deleted.

0 comments on commit c1ef304

Please sign in to comment.