mirror of
https://github.com/THU-MIG/yolov10.git
synced 2025-05-23 21:44:22 +08:00
ultralytics 8.0.37
add TFLite metadata in AutoBackend (#953)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Ayush Chaurasia <ayush.chaurarsia@gmail.com> Co-authored-by: Yonghye Kwon <developer.0hye@gmail.com> Co-authored-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
parent
20fe708f31
commit
bdc6cd4d8b
3
.gitignore
vendored
3
.gitignore
vendored
@ -81,6 +81,9 @@ target/
|
|||||||
profile_default/
|
profile_default/
|
||||||
ipython_config.py
|
ipython_config.py
|
||||||
|
|
||||||
|
# Profiling
|
||||||
|
*.pclprof
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
|
@ -216,9 +216,7 @@ See [Classification Docs](https://docs.ultralytics.com/tasks/classification/) fo
|
|||||||
|
|
||||||
## <div align="center">Ultralytics HUB</div>
|
## <div align="center">Ultralytics HUB</div>
|
||||||
|
|
||||||
[Ultralytics HUB](https://bit.ly/ultralytics_hub) is our ⭐ **NEW** no-code solution to visualize datasets, train YOLOv8
|
Experience seamless AI with [Ultralytics HUB](https://bit.ly/ultralytics_hub) ⭐, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 (coming soon) 🚀 model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://ultralytics.com/app_install). Start your journey for **Free** now!
|
||||||
🚀 models, and deploy to the real world in a seamless experience. Get started for **Free** now! Also run YOLOv8 models on
|
|
||||||
your iOS or Android device by downloading the [Ultralytics App](https://ultralytics.com/app_install)!
|
|
||||||
|
|
||||||
<a href="https://bit.ly/ultralytics_hub" target="_blank">
|
<a href="https://bit.ly/ultralytics_hub" target="_blank">
|
||||||
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png"></a>
|
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png"></a>
|
||||||
|
@ -34,10 +34,10 @@ seaborn>=0.11.0
|
|||||||
# openvino-dev>=2022.3 # OpenVINO export
|
# openvino-dev>=2022.3 # OpenVINO export
|
||||||
|
|
||||||
# Extras --------------------------------------
|
# Extras --------------------------------------
|
||||||
ipython # interactive notebook
|
|
||||||
psutil # system utilization
|
psutil # system utilization
|
||||||
thop>=0.1.1 # FLOPs computation
|
thop>=0.1.1 # FLOPs computation
|
||||||
wheel>=0.38.0 # Snyk vulnerability fix
|
wheel>=0.38.0 # Snyk vulnerability fix
|
||||||
|
# ipython # interactive notebook
|
||||||
# albumentations>=1.0.3
|
# albumentations>=1.0.3
|
||||||
# pycocotools>=2.0.6 # COCO mAP
|
# pycocotools>=2.0.6 # COCO mAP
|
||||||
# roboflow
|
# roboflow
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# Ultralytics YOLO 🚀, GPL-3.0 license
|
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||||
|
|
||||||
__version__ = "8.0.36"
|
__version__ = "8.0.37"
|
||||||
|
|
||||||
from ultralytics.yolo.engine.model import YOLO
|
from ultralytics.yolo.engine.model import YOLO
|
||||||
from ultralytics.yolo.utils.checks import check_yolo as checks
|
from ultralytics.yolo.utils.checks import check_yolo as checks
|
||||||
|
|
||||||
__all__ = ["__version__", "YOLO", "hub", "checks"] # allow simpler import
|
__all__ = ["__version__", "YOLO", "checks"] # allow simpler import
|
||||||
|
@ -12,7 +12,7 @@ from random import random
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
from ultralytics.yolo.utils import (DEFAULT_CFG_DICT, ENVIRONMENT, LOGGER, RANK, SETTINGS, TryExcept, __version__,
|
from ultralytics.yolo.utils import (DEFAULT_CFG_DICT, ENVIRONMENT, LOGGER, RANK, SETTINGS, TryExcept, __version__,
|
||||||
colorstr, emojis, get_git_origin_url, is_git_dir, is_github_actions_ci,
|
colorstr, emojis, get_git_origin_url, is_colab, is_git_dir, is_github_actions_ci,
|
||||||
is_pip_package, is_pytest_running)
|
is_pip_package, is_pytest_running)
|
||||||
from ultralytics.yolo.utils.checks import check_online
|
from ultralytics.yolo.utils.checks import check_online
|
||||||
|
|
||||||
@ -36,6 +36,8 @@ def check_dataset_disk_space(url='https://ultralytics.com/assets/coco128.zip', s
|
|||||||
|
|
||||||
def request_with_credentials(url: str) -> any:
|
def request_with_credentials(url: str) -> any:
|
||||||
""" Make an ajax request with cookies attached """
|
""" Make an ajax request with cookies attached """
|
||||||
|
if not is_colab():
|
||||||
|
raise OSError('request_with_credentials() must run in a Colab environment')
|
||||||
from google.colab import output # noqa
|
from google.colab import output # noqa
|
||||||
from IPython import display # noqa
|
from IPython import display # noqa
|
||||||
display.display(
|
display.display(
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
# Ultralytics YOLO 🚀, GPL-3.0 license
|
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||||
|
import ast
|
||||||
|
import contextlib
|
||||||
import json
|
import json
|
||||||
import platform
|
import platform
|
||||||
|
import zipfile
|
||||||
from collections import OrderedDict, namedtuple
|
from collections import OrderedDict, namedtuple
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
@ -207,6 +209,12 @@ class AutoBackend(nn.Module):
|
|||||||
interpreter.allocate_tensors() # allocate
|
interpreter.allocate_tensors() # allocate
|
||||||
input_details = interpreter.get_input_details() # inputs
|
input_details = interpreter.get_input_details() # inputs
|
||||||
output_details = interpreter.get_output_details() # outputs
|
output_details = interpreter.get_output_details() # outputs
|
||||||
|
# load metadata
|
||||||
|
with contextlib.suppress(zipfile.BadZipFile):
|
||||||
|
with zipfile.ZipFile(w, "r") as model:
|
||||||
|
meta_file = model.namelist()[0]
|
||||||
|
meta = ast.literal_eval(model.read(meta_file).decode("utf-8"))
|
||||||
|
stride, names = int(meta['stride']), meta['names']
|
||||||
elif tfjs: # TF.js
|
elif tfjs: # TF.js
|
||||||
raise NotImplementedError('ERROR: YOLOv8 TF.js inference is not supported')
|
raise NotImplementedError('ERROR: YOLOv8 TF.js inference is not supported')
|
||||||
elif paddle: # PaddlePaddle
|
elif paddle: # PaddlePaddle
|
||||||
@ -214,7 +222,7 @@ class AutoBackend(nn.Module):
|
|||||||
check_requirements('paddlepaddle-gpu' if cuda else 'paddlepaddle')
|
check_requirements('paddlepaddle-gpu' if cuda else 'paddlepaddle')
|
||||||
import paddle.inference as pdi
|
import paddle.inference as pdi
|
||||||
if not Path(w).is_file(): # if not *.pdmodel
|
if not Path(w).is_file(): # if not *.pdmodel
|
||||||
w = next(Path(w).rglob('*.pdmodel')) # get *.xml file from *_openvino_model dir
|
w = next(Path(w).rglob('*.pdmodel')) # get *.pdmodel file from *_paddle_model dir
|
||||||
weights = Path(w).with_suffix('.pdiparams')
|
weights = Path(w).with_suffix('.pdiparams')
|
||||||
config = pdi.Config(str(w), str(weights))
|
config = pdi.Config(str(w), str(weights))
|
||||||
if cuda:
|
if cuda:
|
||||||
@ -328,6 +336,9 @@ class AutoBackend(nn.Module):
|
|||||||
scale, zero_point = output['quantization']
|
scale, zero_point = output['quantization']
|
||||||
x = (x.astype(np.float32) - zero_point) * scale # re-scale
|
x = (x.astype(np.float32) - zero_point) * scale # re-scale
|
||||||
y.append(x)
|
y.append(x)
|
||||||
|
# TF segment fixes: export is reversed vs ONNX export and protos are transposed
|
||||||
|
if len(self.output_details) == 2: # segment
|
||||||
|
y = [y[1], np.transpose(y[0], (0, 3, 1, 2))]
|
||||||
y = [x if isinstance(x, np.ndarray) else x.numpy() for x in y]
|
y = [x if isinstance(x, np.ndarray) else x.numpy() for x in y]
|
||||||
y[0][..., :4] *= [w, h, w, h] # xywh normalized to pixels
|
y[0][..., :4] *= [w, h, w, h] # xywh normalized to pixels
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# Ultralytics YOLO 🚀, GPL-3.0 license
|
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||||
|
|
||||||
|
import ast
|
||||||
import contextlib
|
import contextlib
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -427,6 +428,8 @@ def parse_model(d, ch, verbose=True): # model_dict, input_channels(3)
|
|||||||
for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args
|
for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args
|
||||||
m = eval(m) if isinstance(m, str) else m # eval strings
|
m = eval(m) if isinstance(m, str) else m # eval strings
|
||||||
for j, a in enumerate(args):
|
for j, a in enumerate(args):
|
||||||
|
# TODO: re-implement with eval() removal if possible
|
||||||
|
# args[j] = (locals()[a] if a in locals() else ast.literal_eval(a)) if isinstance(a, str) else a
|
||||||
with contextlib.suppress(NameError):
|
with contextlib.suppress(NameError):
|
||||||
args[j] = eval(a) if isinstance(a, str) else a # eval strings
|
args[j] = eval(a) if isinstance(a, str) else a # eval strings
|
||||||
|
|
||||||
@ -480,28 +483,9 @@ def guess_model_task(model):
|
|||||||
Raises:
|
Raises:
|
||||||
SyntaxError: If the task of the model could not be determined.
|
SyntaxError: If the task of the model could not be determined.
|
||||||
"""
|
"""
|
||||||
cfg = None
|
|
||||||
if isinstance(model, dict):
|
|
||||||
cfg = model
|
|
||||||
elif isinstance(model, nn.Module): # PyTorch model
|
|
||||||
for x in 'model.args', 'model.model.args', 'model.model.model.args':
|
|
||||||
with contextlib.suppress(Exception):
|
|
||||||
return eval(x)['task']
|
|
||||||
for x in 'model.yaml', 'model.model.yaml', 'model.model.model.yaml':
|
|
||||||
with contextlib.suppress(Exception):
|
|
||||||
cfg = eval(x)
|
|
||||||
break
|
|
||||||
elif isinstance(model, (str, Path)):
|
|
||||||
model = str(model)
|
|
||||||
if '-seg' in model:
|
|
||||||
return "segment"
|
|
||||||
elif '-cls' in model:
|
|
||||||
return "classify"
|
|
||||||
else:
|
|
||||||
return "detect"
|
|
||||||
|
|
||||||
# Guess from YAML dictionary
|
def cfg2task(cfg):
|
||||||
if cfg:
|
# Guess from YAML dictionary
|
||||||
m = cfg["head"][-1][-2].lower() # output module name
|
m = cfg["head"][-1][-2].lower() # output module name
|
||||||
if m in ["classify", "classifier", "cls", "fc"]:
|
if m in ["classify", "classifier", "cls", "fc"]:
|
||||||
return "classify"
|
return "classify"
|
||||||
@ -510,8 +494,20 @@ def guess_model_task(model):
|
|||||||
if m in ["segment"]:
|
if m in ["segment"]:
|
||||||
return "segment"
|
return "segment"
|
||||||
|
|
||||||
|
# Guess from model cfg
|
||||||
|
if isinstance(model, dict):
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
return cfg2task(model)
|
||||||
|
|
||||||
# Guess from PyTorch model
|
# Guess from PyTorch model
|
||||||
if isinstance(model, nn.Module):
|
if isinstance(model, nn.Module): # PyTorch model
|
||||||
|
for x in 'model.args', 'model.model.args', 'model.model.model.args':
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
return eval(x)['task']
|
||||||
|
for x in 'model.yaml', 'model.model.yaml', 'model.model.model.yaml':
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
return cfg2task(eval(x))
|
||||||
|
|
||||||
for m in model.modules():
|
for m in model.modules():
|
||||||
if isinstance(m, Detect):
|
if isinstance(m, Detect):
|
||||||
return "detect"
|
return "detect"
|
||||||
@ -520,6 +516,16 @@ def guess_model_task(model):
|
|||||||
elif isinstance(m, Classify):
|
elif isinstance(m, Classify):
|
||||||
return "classify"
|
return "classify"
|
||||||
|
|
||||||
|
# Guess from model filename
|
||||||
|
if isinstance(model, (str, Path)):
|
||||||
|
model = Path(model).stem
|
||||||
|
if '-seg' in model:
|
||||||
|
return "segment"
|
||||||
|
elif '-cls' in model:
|
||||||
|
return "classify"
|
||||||
|
else:
|
||||||
|
return "detect"
|
||||||
|
|
||||||
# Unable to determine task from model
|
# Unable to determine task from model
|
||||||
raise SyntaxError("YOLO is unable to automatically guess model task. Explicitly define task for your model, "
|
raise SyntaxError("YOLO is unable to automatically guess model task. Explicitly define task for your model, "
|
||||||
"i.e. 'task=detect', 'task=segment' or 'task=classify'.")
|
"i.e. 'task=detect', 'task=segment' or 'task=classify'.")
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
# Ultralytics YOLO 🚀, GPL-3.0 license
|
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||||
|
|
||||||
from . import v8
|
from . import v8
|
||||||
|
|
||||||
|
__all__ = ["v8"]
|
||||||
|
@ -142,7 +142,7 @@ def check_cfg_mismatch(base: Dict, custom: Dict, e=None):
|
|||||||
string = ''
|
string = ''
|
||||||
for x in mismatched:
|
for x in mismatched:
|
||||||
matches = get_close_matches(x, base) # key list
|
matches = get_close_matches(x, base) # key list
|
||||||
matches = [f"{k}={DEFAULT_CFG_DICT[k]}" if DEFAULT_CFG_DICT[k] is not None else k for k in matches] # k=v
|
matches = [f"{k}={DEFAULT_CFG_DICT[k]}" if DEFAULT_CFG_DICT.get(k) is not None else k for k in matches]
|
||||||
match_str = f"Similar arguments are i.e. {matches}." if matches else ''
|
match_str = f"Similar arguments are i.e. {matches}." if matches else ''
|
||||||
string += f"'{colorstr('red', 'bold', x)}' is not a valid YOLO argument. {match_str}\n"
|
string += f"'{colorstr('red', 'bold', x)}' is not a valid YOLO argument. {match_str}\n"
|
||||||
raise SyntaxError(string + CLI_HELP_MSG) from e
|
raise SyntaxError(string + CLI_HELP_MSG) from e
|
||||||
|
@ -4,3 +4,13 @@ from .base import BaseDataset
|
|||||||
from .build import build_classification_dataloader, build_dataloader, load_inference_source
|
from .build import build_classification_dataloader, build_dataloader, load_inference_source
|
||||||
from .dataset import ClassificationDataset, SemanticDataset, YOLODataset
|
from .dataset import ClassificationDataset, SemanticDataset, YOLODataset
|
||||||
from .dataset_wrappers import MixAndRectDataset
|
from .dataset_wrappers import MixAndRectDataset
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseDataset",
|
||||||
|
"ClassificationDataset",
|
||||||
|
"MixAndRectDataset",
|
||||||
|
"SemanticDataset",
|
||||||
|
"YOLODataset",
|
||||||
|
"build_classification_dataloader",
|
||||||
|
"build_dataloader",
|
||||||
|
"load_inference_source",]
|
||||||
|
@ -73,7 +73,7 @@ from ultralytics.yolo.utils import DEFAULT_CFG, LOGGER, __version__, callbacks,
|
|||||||
from ultralytics.yolo.utils.checks import check_imgsz, check_requirements, check_version, check_yaml
|
from ultralytics.yolo.utils.checks import check_imgsz, check_requirements, check_version, check_yaml
|
||||||
from ultralytics.yolo.utils.files import file_size
|
from ultralytics.yolo.utils.files import file_size
|
||||||
from ultralytics.yolo.utils.ops import Profile
|
from ultralytics.yolo.utils.ops import Profile
|
||||||
from ultralytics.yolo.utils.torch_utils import select_device, smart_inference_mode, get_latest_opset
|
from ultralytics.yolo.utils.torch_utils import get_latest_opset, select_device, smart_inference_mode
|
||||||
|
|
||||||
MACOS = platform.system() == 'Darwin' # macOS environment
|
MACOS = platform.system() == 'Darwin' # macOS environment
|
||||||
|
|
||||||
@ -508,7 +508,7 @@ class Exporter:
|
|||||||
onnx = self.file.with_suffix('.onnx')
|
onnx = self.file.with_suffix('.onnx')
|
||||||
|
|
||||||
# Export to TF SavedModel
|
# Export to TF SavedModel
|
||||||
subprocess.run(f'onnx2tf -i {onnx} --output_signaturedefs -o {f}', shell=True)
|
subprocess.run(f'onnx2tf -i {onnx} -o {f} --non_verbose', shell=True)
|
||||||
|
|
||||||
# Add TFLite metadata
|
# Add TFLite metadata
|
||||||
for tflite_file in Path(f).rglob('*.tflite'):
|
for tflite_file in Path(f).rglob('*.tflite'):
|
||||||
|
@ -108,8 +108,8 @@ class YOLO:
|
|||||||
Raises TypeError is model is not a PyTorch model
|
Raises TypeError is model is not a PyTorch model
|
||||||
"""
|
"""
|
||||||
if not isinstance(self.model, nn.Module):
|
if not isinstance(self.model, nn.Module):
|
||||||
raise TypeError(f"model='{self.model}' must be a PyTorch model, but is a different type. PyTorch models "
|
raise TypeError(f"model='{self.model}' must be a *.pt PyTorch model, but is a different type. "
|
||||||
f"can be used to train, val, predict and export, i.e. "
|
f"PyTorch models can be used to train, val, predict and export, i.e. "
|
||||||
f"'yolo export model=yolov8n.pt', but exported formats like ONNX, TensorRT etc. only "
|
f"'yolo export model=yolov8n.pt', but exported formats like ONNX, TensorRT etc. only "
|
||||||
f"support 'predict' and 'val' modes, i.e. 'yolo predict model=yolov8n.onnx'.")
|
f"support 'predict' and 'val' modes, i.e. 'yolo predict model=yolov8n.onnx'.")
|
||||||
|
|
||||||
@ -240,7 +240,7 @@ class YOLO:
|
|||||||
if RANK in {0, -1}:
|
if RANK in {0, -1}:
|
||||||
self.model, _ = attempt_load_one_weight(str(self.trainer.best))
|
self.model, _ = attempt_load_one_weight(str(self.trainer.best))
|
||||||
self.overrides = self.model.args
|
self.overrides = self.model.args
|
||||||
self.metrics_data = self.trainer.validator.metrics
|
self.metrics_data = self.trainer.validator.metrics
|
||||||
|
|
||||||
def to(self, device):
|
def to(self, device):
|
||||||
"""
|
"""
|
||||||
|
@ -221,11 +221,10 @@ def is_jupyter():
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if running inside a Jupyter Notebook, False otherwise.
|
bool: True if running inside a Jupyter Notebook, False otherwise.
|
||||||
"""
|
"""
|
||||||
try:
|
with contextlib.suppress(Exception):
|
||||||
from IPython import get_ipython
|
from IPython import get_ipython
|
||||||
return get_ipython() is not None
|
return get_ipython() is not None
|
||||||
except ImportError:
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_docker() -> bool:
|
def is_docker() -> bool:
|
||||||
@ -287,11 +286,9 @@ def is_pytest_running():
|
|||||||
Returns:
|
Returns:
|
||||||
(bool): True if pytest is running, False otherwise.
|
(bool): True if pytest is running, False otherwise.
|
||||||
"""
|
"""
|
||||||
try:
|
with contextlib.suppress(Exception):
|
||||||
import sys
|
|
||||||
return "pytest" in sys.modules
|
return "pytest" in sys.modules
|
||||||
except ImportError:
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_github_actions_ci() -> bool:
|
def is_github_actions_ci() -> bool:
|
||||||
|
@ -1 +1,5 @@
|
|||||||
from .base import add_integration_callbacks, default_callbacks
|
from .base import add_integration_callbacks, default_callbacks
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'add_integration_callbacks',
|
||||||
|
'default_callbacks',]
|
||||||
|
@ -17,7 +17,6 @@ import numpy as np
|
|||||||
import pkg_resources as pkg
|
import pkg_resources as pkg
|
||||||
import psutil
|
import psutil
|
||||||
import torch
|
import torch
|
||||||
from IPython import display
|
|
||||||
from matplotlib import font_manager
|
from matplotlib import font_manager
|
||||||
|
|
||||||
from ultralytics.yolo.utils import (AUTOINSTALL, LOGGER, ROOT, USER_CONFIG_DIR, TryExcept, colorstr, downloads, emojis,
|
from ultralytics.yolo.utils import (AUTOINSTALL, LOGGER, ROOT, USER_CONFIG_DIR, TryExcept, colorstr, downloads, emojis,
|
||||||
@ -292,8 +291,10 @@ def check_yolo(verbose=True):
|
|||||||
gib = 1 << 30 # bytes per GiB
|
gib = 1 << 30 # bytes per GiB
|
||||||
ram = psutil.virtual_memory().total
|
ram = psutil.virtual_memory().total
|
||||||
total, used, free = shutil.disk_usage("/")
|
total, used, free = shutil.disk_usage("/")
|
||||||
display.clear_output()
|
|
||||||
s = f'({os.cpu_count()} CPUs, {ram / gib:.1f} GB RAM, {(total - free) / gib:.1f}/{total / gib:.1f} GB disk)'
|
s = f'({os.cpu_count()} CPUs, {ram / gib:.1f} GB RAM, {(total - free) / gib:.1f}/{total / gib:.1f} GB disk)'
|
||||||
|
with contextlib.suppress(Exception): # clear display if ipython is installed
|
||||||
|
from IPython import display
|
||||||
|
display.clear_output()
|
||||||
else:
|
else:
|
||||||
s = ''
|
s = ''
|
||||||
|
|
||||||
|
@ -3,3 +3,5 @@
|
|||||||
from ultralytics.yolo.v8.classify.predict import ClassificationPredictor, predict
|
from ultralytics.yolo.v8.classify.predict import ClassificationPredictor, predict
|
||||||
from ultralytics.yolo.v8.classify.train import ClassificationTrainer, train
|
from ultralytics.yolo.v8.classify.train import ClassificationTrainer, train
|
||||||
from ultralytics.yolo.v8.classify.val import ClassificationValidator, val
|
from ultralytics.yolo.v8.classify.val import ClassificationValidator, val
|
||||||
|
|
||||||
|
__all__ = ["ClassificationPredictor", "predict", "ClassificationTrainer", "train", "ClassificationValidator", "val"]
|
||||||
|
@ -3,3 +3,5 @@
|
|||||||
from .predict import DetectionPredictor, predict
|
from .predict import DetectionPredictor, predict
|
||||||
from .train import DetectionTrainer, train
|
from .train import DetectionTrainer, train
|
||||||
from .val import DetectionValidator, val
|
from .val import DetectionValidator, val
|
||||||
|
|
||||||
|
__all__ = ["DetectionPredictor", "predict", "DetectionTrainer", "train", "DetectionValidator", "val"]
|
||||||
|
@ -3,3 +3,5 @@
|
|||||||
from .predict import SegmentationPredictor, predict
|
from .predict import SegmentationPredictor, predict
|
||||||
from .train import SegmentationTrainer, train
|
from .train import SegmentationTrainer, train
|
||||||
from .val import SegmentationValidator, val
|
from .val import SegmentationValidator, val
|
||||||
|
|
||||||
|
__all__ = ["SegmentationPredictor", "predict", "SegmentationTrainer", "train", "SegmentationValidator", "val"]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user