Skip to content

Commit

Permalink
Tests, infra, minor bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
brentyi committed Oct 5, 2021
1 parent c624e46 commit a6570f6
Show file tree
Hide file tree
Showing 15 changed files with 333 additions and 134 deletions.
6 changes: 6 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[flake8]
# E203: whitespace before :
# E501: line too long (<n> characters)
# W503: line break before binary operator
; ignore = E203,E501,D100,D101,D102,D103,W503
ignore = E203,E501,W503
28 changes: 28 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: build

on:
push:
branches: [master]
pull_request:
branches: [master]

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.7", "3.8", "3.9"]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[testing]"
- name: Test with pytest
run: |
pytest
17 changes: 17 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: lint

on:
push:
branches: [master]
pull_request:
branches: [master]

jobs:
black-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Black Code Formatter
uses: lgeiger/black-action@master
with:
args: ". --check"
31 changes: 31 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# This workflows will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries

name: Upload Python Package

on:
release:
types: [created]

jobs:
deploy:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ __pycache__
.hypothesis
.ipynb_checkpoints
.cache
build/
dist/
25 changes: 16 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,15 +1,22 @@
# dcargs

`dcargs` is argparse + datclasses, with the goal of generating portable,
reusable, and strongly typed CLI interfaces.
`dcargs` is a tool for generating portable, reusable, and strongly typed CLI
interfaces from dataclass definitions.

We expose only one function, which takes a dataclass type and instantiates it
via CLI flags:
We expose one function, `parse(Type[T]) -> T`, which takes a dataclass type and
instantiates it via an argparse-style CLI interface:

```python
# Importable via dcargs.parse
def parse(cls: Type[DataclassType], description: str = "") -> DataclassType:
...
import dataclasses

import dcargs

@dataclasses.dataclass
class Args:
field1: str
field2: int

args = dcargs.parse(Args)
```

The parse function supports dataclasses containing:
Expand All @@ -18,8 +25,8 @@ The parse function supports dataclasses containing:
- [x] Boolean flags
- [x] Enums (via `enum.Enum`)
- [x] Optional types
- [x] Literal types (by populating `choices`)
- [ ] Sequence and list types (by populating `nargs`)
- [x] Literal types (populates `choices`)
- [ ] Sequence and list types
- [x] Forward references (including in unions)
- [x] Automatic helptext generation
- [x] Nested dataclasses
Expand Down
22 changes: 8 additions & 14 deletions dcargs/_arguments.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,9 @@
import dataclasses
import enum
from typing import (
TYPE_CHECKING,
Any,
Callable,
List,
Literal,
Optional,
Set,
Type,
Union,
get_type_hints,
)
from typing import (TYPE_CHECKING, Any, Callable, List, Optional, Set, Type,
Union)

from typing_extensions import Literal # Python 3.7 compat

from . import _docstrings

Expand Down Expand Up @@ -73,6 +65,8 @@ def _bool_flags(arg: ArgumentDefinition) -> None:
"""For booleans, we use a `store_true` action."""
if arg.type is not bool:
return

# TODO: what if the default value of the field is set to true by the user?
arg.action = "store_true"
arg.type = None
arg.default = False
Expand Down Expand Up @@ -100,7 +94,7 @@ def _handle_optionals(arg: ArgumentDefinition) -> None:
not required."""
field = arg.field
if hasattr(field.type, "__origin__") and field.type.__origin__ is Union:
options = set(get_type_hints(arg.parent_class)[field.name].__args__)
options = set(field.type.__args__)
assert (
len(options) == 2 and type(None) in options
), "Union must be either over dataclasses (for subparsers) or Optional"
Expand All @@ -125,7 +119,7 @@ def _choices_from_literals(arg: ArgumentDefinition) -> None:

def _enums_as_strings(arg: ArgumentDefinition) -> None:
"""For enums, use string representations."""
if arg.type is not None and issubclass(arg.type, enum.Enum):
if type(arg.type) is type and issubclass(arg.type, enum.Enum):
if arg.choices is None:
arg.choices = set(x.name for x in arg.type)
else:
Expand Down
5 changes: 2 additions & 3 deletions dcargs/_docstrings.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@ def make(cls) -> "_Tokenization":
if toktype in (tokenize.NEWLINE, tokenize.NL):
line_number += 1
tokens_from_line[line_number] = []
elif toktype != tokenize.INDENT:
# Add everything else except for whitespace
elif toktype is not tokenize.INDENT:
token = _Token(token_type=toktype, token=tok, line_number=line_number)
tokens.append(token)
tokens_from_line[line_number].append(token)
Expand Down Expand Up @@ -82,7 +81,7 @@ def get_field_docstring(cls: Type, field_name: str) -> str:
return comment[1:].strip()

# Check for comment on the line before the field
# TODO: this will likely results in unintentional docstrings?
# TODO: this may result in unintentional helptext?
comment_index = field_index
comments: List[str] = []
while True:
Expand Down
27 changes: 15 additions & 12 deletions dcargs/_parse.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,35 @@
import argparse
import dataclasses
import enum
from typing import Any, ClassVar, Dict, Generic, Type, TypeVar, Union, get_type_hints
from typing import Any, Dict, Optional, Sequence, Type, TypeVar, Union

from . import _strings
from ._parsers import Parser, ParserDefinition

DataclassType = TypeVar("DataclassType")


def parse(cls: Type[DataclassType], description: str = "") -> DataclassType:
def parse(
cls: Type[DataclassType],
description: str = "",
args: Optional[Sequence[str]] = None,
) -> DataclassType:
"""Populate a dataclass via CLI args."""
assert dataclasses.is_dataclass(cls)

parser_definition = ParserDefinition.from_dataclass(cls)
parser_definition = ParserDefinition.from_dataclass(cls, parent_dataclasses=set())

root_parser = argparse.ArgumentParser(
description=_strings.dedent(description),
formatter_class=argparse.RawTextHelpFormatter,
)
parser_definition.apply(Parser.make(root_parser))
namespace = root_parser.parse_args()
namespace = root_parser.parse_args(args)

return construct_dataclass(cls, vars(namespace))
return _construct_dataclass(cls, vars(namespace))


def construct_dataclass(
def _construct_dataclass(
cls: Type[DataclassType], values: Dict[str, Any]
) -> DataclassType:
"""Construct a dataclass object from a dictionary of values from argparse."""
Expand All @@ -34,6 +38,7 @@ def construct_dataclass(
fields = dataclasses.fields(cls)

kwargs: Dict[str, Any] = {}

for field in fields:
if not field.init:
continue
Expand All @@ -47,7 +52,7 @@ def construct_dataclass(
# Nested dataclasses
elif dataclasses.is_dataclass(field.type):
arg_prefix = field.name + _strings.NESTED_DATACLASS_DELIMETER
value = construct_dataclass(
value = _construct_dataclass(
field.type,
values={
k[len(arg_prefix) :]: v
Expand All @@ -60,20 +65,18 @@ def construct_dataclass(
elif (
hasattr(field.type, "__origin__")
and field.type.__origin__ is Union
and all(
map(dataclasses.is_dataclass, get_type_hints(cls)[field.name].__args__)
)
and all(map(dataclasses.is_dataclass, field.type.__args__))
):
subparser_dest = _strings.SUBPARSER_DEST_FMT.format(name=field.name)
assert subparser_dest in values.keys()
options = get_type_hints(cls)[field.name].__args__
options = field.type.__args__
chosen_cls = None
for option in options:
if option.__name__ == values[subparser_dest]:
chosen_cls = option
break
assert chosen_cls is not None
value = construct_dataclass(chosen_cls, values)
value = _construct_dataclass(chosen_cls, values)

# General case
else:
Expand Down
23 changes: 18 additions & 5 deletions dcargs/_parsers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import argparse
import dataclasses
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union, get_type_hints
from typing import Any, Dict, List, Optional, Set, Type, Union, get_type_hints

from . import _strings
from ._arguments import ArgumentDefinition
Expand Down Expand Up @@ -51,22 +51,33 @@ def apply(self, parsers: Parser) -> None:
subparser_def.apply(Parser.make(subparser))

@staticmethod
def from_dataclass(cls: Type[Any]) -> "ParserDefinition":
def from_dataclass(
cls: Type[Any], parent_dataclasses: Set[Type] = set()
) -> "ParserDefinition":
"""Create a parser definition from a dataclass."""

assert dataclasses.is_dataclass(cls)
assert (
cls not in parent_dataclasses
), f"Found a cyclic dataclass dependency with type {cls}"

args = []
subparsers = None
annotations = get_type_hints(cls)
for field in dataclasses.fields(cls):
if not field.init:
continue

# Resolve forward references
field.type = annotations[field.name]

vanilla_field: bool = True

# Add arguments for nested dataclasses
if dataclasses.is_dataclass(field.type):
child_definition = ParserDefinition.from_dataclass(field.type)
child_definition = ParserDefinition.from_dataclass(
field.type, parent_dataclasses | {cls}
)
child_args = child_definition.args
for arg in child_args:
arg.name = (
Expand All @@ -82,7 +93,7 @@ def from_dataclass(cls: Type[Any]) -> "ParserDefinition":

# Unions of dataclasses should create subparsers
if hasattr(field.type, "__origin__") and field.type.__origin__ is Union:
options = get_type_hints(cls)[field.name].__args__
options = field.type.__args__
if all(map(dataclasses.is_dataclass, options)):
assert (
subparsers is None
Expand All @@ -91,7 +102,9 @@ def from_dataclass(cls: Type[Any]) -> "ParserDefinition":
subparsers = SubparsersDefinition(
name=field.name,
parsers={
option.__name__: ParserDefinition.from_dataclass(option)
option.__name__: ParserDefinition.from_dataclass(
option, parent_dataclasses | {cls}
)
for option in options
},
)
Expand Down
42 changes: 0 additions & 42 deletions example.py

This file was deleted.

Loading

0 comments on commit a6570f6

Please sign in to comment.