Skip to content

Commit

Permalink
Update config_manager.py
Browse files Browse the repository at this point in the history
  • Loading branch information
152334H authored Oct 4, 2024
1 parent 4da7376 commit 747c3b5
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions torchtitan/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,8 +333,8 @@ def __init__(self):
choices=["bfloat16", "float32"],
help="""
torch dtype to use for parameters when applying mixed precision.
When data_parallel_degree > 1, this changes FSDP's `param_dtype`.
When data_parallel_degree == 1, this enables AMP autocast.
When data_parallel_shard_degree > 1, this changes FSDP's `param_dtype`.
When data_parallel_shard_degree == 1, this enables AMP autocast.
""",
)
self.parser.add_argument(
Expand Down

0 comments on commit 747c3b5

Please sign in to comment.