mirror of
https://github.com/THU-MIG/yolov10.git
synced 2025-05-24 06:07:03 +08:00
Resolve python argparse DDP bug (#6009)
This commit is contained in:
parent
9e424700a8
commit
ab0b47e386
@ -1,12 +1,10 @@
|
|||||||
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from . import USER_CONFIG_DIR
|
from . import USER_CONFIG_DIR
|
||||||
from .torch_utils import TORCH_1_9
|
from .torch_utils import TORCH_1_9
|
||||||
@ -28,14 +26,19 @@ def generate_ddp_file(trainer):
|
|||||||
"""Generates a DDP file and returns its file name."""
|
"""Generates a DDP file and returns its file name."""
|
||||||
module, name = f'{trainer.__class__.__module__}.{trainer.__class__.__name__}'.rsplit('.', 1)
|
module, name = f'{trainer.__class__.__module__}.{trainer.__class__.__name__}'.rsplit('.', 1)
|
||||||
|
|
||||||
content = f'''overrides = {vars(trainer.args)} \nif __name__ == "__main__":
|
content = f"""
|
||||||
|
# Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
|
||||||
|
overrides = {vars(trainer.args)}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
from {module} import {name}
|
from {module} import {name}
|
||||||
from ultralytics.utils import DEFAULT_CFG_DICT
|
from ultralytics.utils import DEFAULT_CFG_DICT
|
||||||
|
|
||||||
cfg = DEFAULT_CFG_DICT.copy()
|
cfg = DEFAULT_CFG_DICT.copy()
|
||||||
cfg.update(save_dir='') # handle the extra key 'save_dir'
|
cfg.update(save_dir='') # handle the extra key 'save_dir'
|
||||||
trainer = {name}(cfg=cfg, overrides=overrides)
|
trainer = {name}(cfg=cfg, overrides=overrides)
|
||||||
trainer.train()'''
|
results = trainer.train()
|
||||||
|
"""
|
||||||
(USER_CONFIG_DIR / 'DDP').mkdir(exist_ok=True)
|
(USER_CONFIG_DIR / 'DDP').mkdir(exist_ok=True)
|
||||||
with tempfile.NamedTemporaryFile(prefix='_temp_',
|
with tempfile.NamedTemporaryFile(prefix='_temp_',
|
||||||
suffix=f'{id(trainer)}.py',
|
suffix=f'{id(trainer)}.py',
|
||||||
@ -52,10 +55,7 @@ def generate_ddp_command(world_size, trainer):
|
|||||||
import __main__ # noqa local import to avoid https://github.com/Lightning-AI/lightning/issues/15218
|
import __main__ # noqa local import to avoid https://github.com/Lightning-AI/lightning/issues/15218
|
||||||
if not trainer.resume:
|
if not trainer.resume:
|
||||||
shutil.rmtree(trainer.save_dir) # remove the save_dir
|
shutil.rmtree(trainer.save_dir) # remove the save_dir
|
||||||
file = str(Path(sys.argv[0]).resolve())
|
file = generate_ddp_file(trainer)
|
||||||
safe_pattern = re.compile(r'^[a-zA-Z0-9_. /\\-]{1,128}$') # allowed characters and maximum of 100 characters
|
|
||||||
if not (safe_pattern.match(file) and Path(file).exists() and file.endswith('.py')): # using CLI
|
|
||||||
file = generate_ddp_file(trainer)
|
|
||||||
dist_cmd = 'torch.distributed.run' if TORCH_1_9 else 'torch.distributed.launch'
|
dist_cmd = 'torch.distributed.run' if TORCH_1_9 else 'torch.distributed.launch'
|
||||||
port = find_free_network_port()
|
port = find_free_network_port()
|
||||||
cmd = [sys.executable, '-m', dist_cmd, '--nproc_per_node', f'{world_size}', '--master_port', f'{port}', file]
|
cmd = [sys.executable, '-m', dist_cmd, '--nproc_per_node', f'{world_size}', '--master_port', f'{port}', file]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user