mirror of
https://github.com/THU-MIG/yolov10.git
synced 2025-07-07 22:04:53 +08:00
Fixed dataloader CPU bottleneck for small batch sizes (#7659)
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
parent
7a39ecd0d3
commit
c267bd3a0c
@ -107,7 +107,7 @@ def build_dataloader(dataset, batch, workers, shuffle=True, rank=-1):
|
|||||||
"""Return an InfiniteDataLoader or DataLoader for training or validation set."""
|
"""Return an InfiniteDataLoader or DataLoader for training or validation set."""
|
||||||
batch = min(batch, len(dataset))
|
batch = min(batch, len(dataset))
|
||||||
nd = torch.cuda.device_count() # number of CUDA devices
|
nd = torch.cuda.device_count() # number of CUDA devices
|
||||||
nw = min([os.cpu_count() // max(nd, 1), batch, workers]) # number of workers
|
nw = min([os.cpu_count() // max(nd, 1), workers]) # number of workers
|
||||||
sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle)
|
sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle)
|
||||||
generator = torch.Generator()
|
generator = torch.Generator()
|
||||||
generator.manual_seed(6148914691236517205 + RANK)
|
generator.manual_seed(6148914691236517205 + RANK)
|
||||||
|
@ -332,10 +332,7 @@ class BaseTrainer:
|
|||||||
f'Image sizes {self.args.imgsz} train, {self.args.imgsz} val\n'
|
f'Image sizes {self.args.imgsz} train, {self.args.imgsz} val\n'
|
||||||
f'Using {self.train_loader.num_workers * (world_size or 1)} dataloader workers\n'
|
f'Using {self.train_loader.num_workers * (world_size or 1)} dataloader workers\n'
|
||||||
f"Logging results to {colorstr('bold', self.save_dir)}\n"
|
f"Logging results to {colorstr('bold', self.save_dir)}\n"
|
||||||
f'Starting training for '
|
f'Starting training for ' + (f"{self.args.time} hours..." if self.args.time else f"{self.epochs} epochs...")
|
||||||
f'{self.args.time} hours...'
|
|
||||||
if self.args.time
|
|
||||||
else f"{self.epochs} epochs..."
|
|
||||||
)
|
)
|
||||||
if self.args.close_mosaic:
|
if self.args.close_mosaic:
|
||||||
base_idx = (self.epochs - self.args.close_mosaic) * nb
|
base_idx = (self.epochs - self.args.close_mosaic) * nb
|
||||||
|
Loading…
x
Reference in New Issue
Block a user