mirror of
https://github.com/THU-MIG/yolov10.git
synced 2025-05-23 13:34:23 +08:00
Fix Ray 2.7.0 breaking changes (#4964)
This commit is contained in:
parent
742ec7fb1d
commit
0cf82f5040
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@ -241,7 +241,7 @@ jobs:
|
|||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
Conda:
|
Conda:
|
||||||
if: github.repository == 'ultralytics/ultralytics' && (github.event_name != 'workflow_dispatch' || github.event.inputs.conda == 'true' || github.event_name == 'schedule')
|
if: github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event.inputs.conda == 'true')
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
@ -79,7 +79,6 @@ def test_predict_sam():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not CUDA_IS_AVAILABLE, reason='CUDA is not available')
|
@pytest.mark.skipif(not CUDA_IS_AVAILABLE, reason='CUDA is not available')
|
||||||
@pytest.mark.skipif(True, reason="RayTune Error pyarrow.lib.ArrowInvalid: URI has empty scheme: './runs/tune'")
|
|
||||||
def test_model_ray_tune():
|
def test_model_ray_tune():
|
||||||
with contextlib.suppress(RuntimeError): # RuntimeError may be caused by out-of-memory
|
with contextlib.suppress(RuntimeError): # RuntimeError may be caused by out-of-memory
|
||||||
YOLO('yolov8n-cls.yaml').tune(use_ray=True,
|
YOLO('yolov8n-cls.yaml').tune(use_ray=True,
|
||||||
|
@ -41,7 +41,7 @@ def test_model_methods():
|
|||||||
model.to('cpu')
|
model.to('cpu')
|
||||||
model.fuse()
|
model.fuse()
|
||||||
model.clear_callback('on_train_start')
|
model.clear_callback('on_train_start')
|
||||||
model._reset_callbacks()
|
model.reset_callbacks()
|
||||||
|
|
||||||
# Model properties
|
# Model properties
|
||||||
_ = model.names
|
_ = model.names
|
||||||
|
@ -392,17 +392,17 @@ class Model(nn.Module):
|
|||||||
"""Clear all event callbacks."""
|
"""Clear all event callbacks."""
|
||||||
self.callbacks[event] = []
|
self.callbacks[event] = []
|
||||||
|
|
||||||
|
def reset_callbacks(self):
|
||||||
|
"""Reset all registered callbacks."""
|
||||||
|
for event in callbacks.default_callbacks.keys():
|
||||||
|
self.callbacks[event] = [callbacks.default_callbacks[event][0]]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _reset_ckpt_args(args):
|
def _reset_ckpt_args(args):
|
||||||
"""Reset arguments when loading a PyTorch model."""
|
"""Reset arguments when loading a PyTorch model."""
|
||||||
include = {'imgsz', 'data', 'task', 'single_cls'} # only remember these arguments when loading a PyTorch model
|
include = {'imgsz', 'data', 'task', 'single_cls'} # only remember these arguments when loading a PyTorch model
|
||||||
return {k: v for k, v in args.items() if k in include}
|
return {k: v for k, v in args.items() if k in include}
|
||||||
|
|
||||||
def _reset_callbacks(self):
|
|
||||||
"""Reset all registered callbacks."""
|
|
||||||
for event in callbacks.default_callbacks.keys():
|
|
||||||
self.callbacks[event] = [callbacks.default_callbacks[event][0]]
|
|
||||||
|
|
||||||
# def __getattr__(self, attr):
|
# def __getattr__(self, attr):
|
||||||
# """Raises error if object has no requested attribute."""
|
# """Raises error if object has no requested attribute."""
|
||||||
# name = self.__class__.__name__
|
# name = self.__class__.__name__
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from ultralytics.cfg import TASK2DATA, TASK2METRIC
|
from ultralytics.cfg import TASK2DATA, TASK2METRIC, get_save_dir
|
||||||
from ultralytics.utils import DEFAULT_CFG_DICT, LOGGER, NUM_THREADS
|
from ultralytics.utils import DEFAULT_CFG, DEFAULT_CFG_DICT, LOGGER, NUM_THREADS
|
||||||
|
|
||||||
|
|
||||||
def run_ray_tune(model,
|
def run_ray_tune(model,
|
||||||
@ -93,9 +93,10 @@ def run_ray_tune(model,
|
|||||||
Returns:
|
Returns:
|
||||||
None.
|
None.
|
||||||
"""
|
"""
|
||||||
model._reset_callbacks()
|
model.reset_callbacks()
|
||||||
config.update(train_args)
|
config.update(train_args)
|
||||||
model.train(**config)
|
results = model.train(**config)
|
||||||
|
return results.results_dict
|
||||||
|
|
||||||
# Get search space
|
# Get search space
|
||||||
if not space:
|
if not space:
|
||||||
@ -123,10 +124,12 @@ def run_ray_tune(model,
|
|||||||
tuner_callbacks = [WandbLoggerCallback(project='YOLOv8-tune')] if wandb else []
|
tuner_callbacks = [WandbLoggerCallback(project='YOLOv8-tune')] if wandb else []
|
||||||
|
|
||||||
# Create the Ray Tune hyperparameter search tuner
|
# Create the Ray Tune hyperparameter search tuner
|
||||||
|
tune_dir = get_save_dir(DEFAULT_CFG, name='tune')
|
||||||
|
tune_dir.mkdir(parents=True, exist_ok=True)
|
||||||
tuner = tune.Tuner(trainable_with_resources,
|
tuner = tune.Tuner(trainable_with_resources,
|
||||||
param_space=space,
|
param_space=space,
|
||||||
tune_config=tune.TuneConfig(scheduler=asha_scheduler, num_samples=max_samples),
|
tune_config=tune.TuneConfig(scheduler=asha_scheduler, num_samples=max_samples),
|
||||||
run_config=RunConfig(callbacks=tuner_callbacks, storage_path='./runs/tune'))
|
run_config=RunConfig(callbacks=tuner_callbacks, storage_path=tune_dir))
|
||||||
|
|
||||||
# Run the hyperparameter search
|
# Run the hyperparameter search
|
||||||
tuner.fit()
|
tuner.fit()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user