Enums#

We can generate argument parsers from more advanced type annotations, like enums.

 1import dataclasses
 2import enum
 3
 4import tyro
 5
 6
 7class OptimizerType(enum.Enum):
 8    ADAM = enum.auto()
 9    SGD = enum.auto()
10
11
12@dataclasses.dataclass(frozen=True)
13class TrainConfig:
14    # Enums are handled seamlessly.
15    optimizer_type: OptimizerType = OptimizerType.ADAM
16    """Gradient-based optimizer to use."""
17
18    learning_rate: float = 1e-4
19    """Learning rate for optimizer."""
20
21
22if __name__ == "__main__":
23    config = tyro.cli(TrainConfig)
24    print(config)

python 01_basics/04_enums.py --help
usage: 04_enums.py [-h] [--optimizer-type {ADAM,SGD}] [--learning-rate FLOAT]

╭─ options ────────────────────────────────────────────────────────────────╮
│ -h, --help              show this help message and exit                  │
│ --optimizer-type {ADAM,SGD}                                              │
│                         Gradient-based optimizer to use. (default: ADAM) │
│ --learning-rate FLOAT   Learning rate for optimizer. (default: 0.0001)   │
╰──────────────────────────────────────────────────────────────────────────╯

python 01_basics/04_enums.py --optimizer-type SGD
TrainConfig(optimizer_type=<OptimizerType.SGD: 2>, learning_rate=0.0001)

python 01_basics/04_enums.py --optimizer-type ADAM --learning-rate 3e-4
TrainConfig(optimizer_type=<OptimizerType.ADAM: 1>, learning_rate=0.0003)