Skip to content

Commit

Permalink
Change codegen to treat typing.Union[Foo, NoneType] and typing.Option…
Browse files Browse the repository at this point in the history
…al[Foo] as the same
  • Loading branch information
lpetre committed Aug 11, 2021
1 parent 1c3a27b commit e68c375
Show file tree
Hide file tree
Showing 2 changed files with 594 additions and 512 deletions.
9 changes: 6 additions & 3 deletions libcst/codegen/gen_matcher_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import re
from dataclasses import dataclass, fields
from typing import Generator, List, Optional, Sequence, Set, Tuple, Type, Union

Expand All @@ -12,6 +13,8 @@


CST_DIR: Set[str] = set(dir(cst))
CLASS_RE = r'<class \'(.*?)\'>'
OPTIONAL_RE = r'typing\.Union\[([^,]*?), NoneType]'


class CleanseFullTypeNames(cst.CSTTransformer):
Expand Down Expand Up @@ -396,8 +399,8 @@ def _get_clean_type_and_aliases(

# First, get the type as a parseable expression.
typestr = repr(typeobj)
if typestr.startswith("<class '") and typestr.endswith("'>"):
typestr = typestr[8:-2]
typestr = re.sub(CLASS_RE, r"\1", typestr)
typestr = re.sub(OPTIONAL_RE, r"typing.Optional[\1]", typestr)

# Now, parse the expression with LibCST.
cleanser = CleanseFullTypeNames()
Expand Down Expand Up @@ -457,7 +460,7 @@ def _get_fields(node: Type[cst.CSTNode]) -> Generator[Field, None, None]:
generated_code.append("")
generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes")
generated_code.append("from dataclasses import dataclass")
generated_code.append("from typing import Callable, Sequence, Union")
generated_code.append("from typing import Callable, Optional, Sequence, Union")
generated_code.append("from typing_extensions import Literal")
generated_code.append("import libcst as cst")
generated_code.append("")
Expand Down
Loading

0 comments on commit e68c375

Please sign in to comment.