Skip to content

Commit

Permalink
Fix typo (#952)
Browse files Browse the repository at this point in the history
  • Loading branch information
KoyamaSohei authored Aug 21, 2023
1 parent 852d632 commit 824f2ff
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions alpa/device_mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -1513,9 +1513,9 @@ class DistributedArray:
a normal numpy array.
Internally, it stores a pointer to all remote buffers.
The buffers are stored distributedly on remote workers' device memeory.
The buffers are stored distributedly on remote workers' device memory.
When users require the value of the array. These buffers will be gathered
to the dirver.
to the driver.
"""

def __init__(self,
Expand Down
2 changes: 1 addition & 1 deletion alpa/pipeline_parallel/pipeshard_executable.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""The dirver part and worker part of a pipeshard executable."""
"""The driver part and worker part of a pipeshard executable."""
import logging
from functools import partial
import json
Expand Down
2 changes: 1 addition & 1 deletion benchmark/alpa/benchmark_parallel_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def benchmark_training_executable(niter,
executable.sync()
e2e_latency = (time.time() - tic) / niter
latencies = [e2e_latency]
print(f"latency with dirver overhead: {e2e_latency:.3f}")
print(f"latency with driver overhead: {e2e_latency:.3f}")
else:
# Benchmark latency without driver overhead
for i in range(niter):
Expand Down

0 comments on commit 824f2ff

Please sign in to comment.