Skip to content

Commit

Permalink
Accelerate dataloader_config to non_blocking if pin_memory is enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
rockerBOO committed Jan 23, 2025
1 parent c4b0bb6 commit 50d8daa
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion library/train_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
Tuple,
Union
)
from accelerate import Accelerator, InitProcessGroupKwargs, DistributedDataParallelKwargs, PartialState
from accelerate import Accelerator, InitProcessGroupKwargs, DistributedDataParallelKwargs, PartialState, DataLoaderConfiguration
import glob
import math
import os
Expand Down Expand Up @@ -5299,6 +5299,8 @@ def prepare_accelerator(args: argparse.Namespace):
kwargs_handlers = [i for i in kwargs_handlers if i is not None]
deepspeed_plugin = deepspeed_utils.prepare_deepspeed_plugin(args)

dataloader_config = DataLoaderConfiguration(non_blocking=args.pin_memory)

accelerator = Accelerator(
gradient_accumulation_steps=args.gradient_accumulation_steps,
mixed_precision=args.mixed_precision,
Expand All @@ -5307,6 +5309,7 @@ def prepare_accelerator(args: argparse.Namespace):
kwargs_handlers=kwargs_handlers,
dynamo_backend=dynamo_backend,
deepspeed_plugin=deepspeed_plugin,
dataloader_config=dataloader_config
)
print("accelerator device:", accelerator.device)
return accelerator
Expand Down

0 comments on commit 50d8daa

Please sign in to comment.