mirror of
https://github.com/THU-MIG/yolov10.git
synced 2025-05-23 21:44:22 +08:00
ultralytics 8.0.157
fix unzip_file()
bug (#4407)
Co-authored-by: Nadim Bou Alwan <64587372+nadinator@users.noreply.github.com>
This commit is contained in:
parent
4885538693
commit
aaba14e6b2
@ -198,7 +198,7 @@ The ideal scenario is for all class ratios to be reasonably similar for each spl
|
|||||||
|
|
||||||
with open(dataset_yaml, 'w') as ds_y:
|
with open(dataset_yaml, 'w') as ds_y:
|
||||||
yaml.safe_dump({
|
yaml.safe_dump({
|
||||||
'path': save_path.as_posix(),
|
'path': split_dir.as_posix(),
|
||||||
'train': 'train',
|
'train': 'train',
|
||||||
'val': 'val',
|
'val': 'val',
|
||||||
'names': classes
|
'names': classes
|
||||||
|
@ -53,14 +53,6 @@ keywords: Ultralytics, data utils, YOLO, img2label_paths, exif_size, polygon2mas
|
|||||||
## ::: ultralytics.data.utils.compress_one_image
|
## ::: ultralytics.data.utils.compress_one_image
|
||||||
<br><br>
|
<br><br>
|
||||||
|
|
||||||
---
|
|
||||||
## ::: ultralytics.data.utils.delete_dsstore
|
|
||||||
<br><br>
|
|
||||||
|
|
||||||
---
|
|
||||||
## ::: ultralytics.data.utils.zip_directory
|
|
||||||
<br><br>
|
|
||||||
|
|
||||||
---
|
---
|
||||||
## ::: ultralytics.data.utils.autosplit
|
## ::: ultralytics.data.utils.autosplit
|
||||||
<br><br>
|
<br><br>
|
||||||
|
@ -13,6 +13,10 @@ keywords: Ultralytics, YOLO, download utilities, is_url, check_disk_space, get_g
|
|||||||
## ::: ultralytics.utils.downloads.is_url
|
## ::: ultralytics.utils.downloads.is_url
|
||||||
<br><br>
|
<br><br>
|
||||||
|
|
||||||
|
---
|
||||||
|
## ::: ultralytics.utils.downloads.delete_dsstore
|
||||||
|
<br><br>
|
||||||
|
|
||||||
---
|
---
|
||||||
## ::: ultralytics.utils.downloads.zip_directory
|
## ::: ultralytics.utils.downloads.zip_directory
|
||||||
<br><br>
|
<br><br>
|
||||||
|
@ -7,7 +7,7 @@ import pytest
|
|||||||
|
|
||||||
from ultralytics.utils import ROOT, SETTINGS
|
from ultralytics.utils import ROOT, SETTINGS
|
||||||
|
|
||||||
WEIGHT_DIR = Path(SETTINGS['weights_dir'])
|
WEIGHTS_DIR = Path(SETTINGS['weights_dir'])
|
||||||
TASK_ARGS = [
|
TASK_ARGS = [
|
||||||
('detect', 'yolov8n', 'coco8.yaml'),
|
('detect', 'yolov8n', 'coco8.yaml'),
|
||||||
('segment', 'yolov8n-seg', 'coco8-seg.yaml'),
|
('segment', 'yolov8n-seg', 'coco8-seg.yaml'),
|
||||||
@ -40,17 +40,17 @@ def test_train(task, model, data):
|
|||||||
|
|
||||||
@pytest.mark.parametrize('task,model,data', TASK_ARGS)
|
@pytest.mark.parametrize('task,model,data', TASK_ARGS)
|
||||||
def test_val(task, model, data):
|
def test_val(task, model, data):
|
||||||
run(f'yolo val {task} model={WEIGHT_DIR / model}.pt data={data} imgsz=32')
|
run(f'yolo val {task} model={WEIGHTS_DIR / model}.pt data={data} imgsz=32')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('task,model,data', TASK_ARGS)
|
@pytest.mark.parametrize('task,model,data', TASK_ARGS)
|
||||||
def test_predict(task, model, data):
|
def test_predict(task, model, data):
|
||||||
run(f"yolo predict model={WEIGHT_DIR / model}.pt source={ROOT / 'assets'} imgsz=32 save save_crop save_txt")
|
run(f"yolo predict model={WEIGHTS_DIR / model}.pt source={ROOT / 'assets'} imgsz=32 save save_crop save_txt")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model,format', EXPORT_ARGS)
|
@pytest.mark.parametrize('model,format', EXPORT_ARGS)
|
||||||
def test_export(model, format):
|
def test_export(model, format):
|
||||||
run(f'yolo export model={WEIGHT_DIR / model}.pt format={format} imgsz=32')
|
run(f'yolo export model={WEIGHTS_DIR / model}.pt format={format} imgsz=32')
|
||||||
|
|
||||||
|
|
||||||
def test_rtdetr(task='detect', model='yolov8n-rtdetr.yaml', data='coco8.yaml'):
|
def test_rtdetr(task='detect', model='yolov8n-rtdetr.yaml', data='coco8.yaml'):
|
||||||
@ -59,7 +59,7 @@ def test_rtdetr(task='detect', model='yolov8n-rtdetr.yaml', data='coco8.yaml'):
|
|||||||
run(f"yolo predict {task} model={model} source={ROOT / 'assets/bus.jpg'} imgsz=640 save save_crop save_txt")
|
run(f"yolo predict {task} model={model} source={ROOT / 'assets/bus.jpg'} imgsz=640 save save_crop save_txt")
|
||||||
|
|
||||||
|
|
||||||
def test_fastsam(task='segment', model=WEIGHT_DIR / 'FastSAM-s.pt', data='coco8-seg.yaml'):
|
def test_fastsam(task='segment', model=WEIGHTS_DIR / 'FastSAM-s.pt', data='coco8-seg.yaml'):
|
||||||
source = ROOT / 'assets/bus.jpg'
|
source = ROOT / 'assets/bus.jpg'
|
||||||
|
|
||||||
run(f'yolo segment val {task} model={model} data={data} imgsz=32')
|
run(f'yolo segment val {task} model={model} data={data} imgsz=32')
|
||||||
@ -95,7 +95,7 @@ def test_mobilesam():
|
|||||||
from ultralytics import SAM
|
from ultralytics import SAM
|
||||||
|
|
||||||
# Load the model
|
# Load the model
|
||||||
model = SAM(WEIGHT_DIR / 'mobile_sam.pt')
|
model = SAM(WEIGHTS_DIR / 'mobile_sam.pt')
|
||||||
|
|
||||||
# Source
|
# Source
|
||||||
source = ROOT / 'assets/zidane.jpg'
|
source = ROOT / 'assets/zidane.jpg'
|
||||||
|
@ -26,7 +26,7 @@ TMP = (ROOT / '../tests/tmp').resolve() # temp directory for test files
|
|||||||
|
|
||||||
def test_model_forward():
|
def test_model_forward():
|
||||||
model = YOLO(CFG)
|
model = YOLO(CFG)
|
||||||
model(SOURCE, imgsz=32)
|
model(SOURCE, imgsz=32, augment=True)
|
||||||
|
|
||||||
|
|
||||||
def test_model_info():
|
def test_model_info():
|
||||||
@ -120,8 +120,8 @@ def test_train_scratch():
|
|||||||
|
|
||||||
|
|
||||||
def test_train_pretrained():
|
def test_train_pretrained():
|
||||||
model = YOLO(MODEL)
|
model = YOLO(WEIGHTS_DIR / 'yolov8n-seg.pt')
|
||||||
model.train(data='coco8.yaml', epochs=1, imgsz=32, cache='ram') # test RAM caching
|
model.train(data='coco8-seg.yaml', epochs=1, imgsz=32, cache='ram', copy_paste=0.5, mixup=0.5) # test RAM caching
|
||||||
model(SOURCE)
|
model(SOURCE)
|
||||||
|
|
||||||
|
|
||||||
@ -232,7 +232,8 @@ def test_results():
|
|||||||
@pytest.mark.skipif(not ONLINE, reason='environment is offline')
|
@pytest.mark.skipif(not ONLINE, reason='environment is offline')
|
||||||
def test_data_utils():
|
def test_data_utils():
|
||||||
# Test functions in ultralytics/data/utils.py
|
# Test functions in ultralytics/data/utils.py
|
||||||
from ultralytics.data.utils import HUBDatasetStats, autosplit, zip_directory
|
from ultralytics.data.utils import HUBDatasetStats, autosplit
|
||||||
|
from ultralytics.utils.downloads import zip_directory
|
||||||
|
|
||||||
# from ultralytics.utils.files import WorkingDirectory
|
# from ultralytics.utils.files import WorkingDirectory
|
||||||
# with WorkingDirectory(ROOT.parent / 'tests'):
|
# with WorkingDirectory(ROOT.parent / 'tests'):
|
||||||
@ -240,7 +241,7 @@ def test_data_utils():
|
|||||||
download('https://github.com/ultralytics/hub/raw/master/example_datasets/coco8.zip', unzip=False)
|
download('https://github.com/ultralytics/hub/raw/master/example_datasets/coco8.zip', unzip=False)
|
||||||
shutil.move('coco8.zip', TMP)
|
shutil.move('coco8.zip', TMP)
|
||||||
stats = HUBDatasetStats(TMP / 'coco8.zip', task='detect')
|
stats = HUBDatasetStats(TMP / 'coco8.zip', task='detect')
|
||||||
stats.get_json(save=False)
|
stats.get_json(save=True)
|
||||||
stats.process_images()
|
stats.process_images()
|
||||||
|
|
||||||
autosplit(TMP / 'coco8')
|
autosplit(TMP / 'coco8')
|
||||||
@ -267,3 +268,11 @@ def test_events():
|
|||||||
cfg = copy(DEFAULT_CFG) # does not require deepcopy
|
cfg = copy(DEFAULT_CFG) # does not require deepcopy
|
||||||
cfg.mode = 'test'
|
cfg.mode = 'test'
|
||||||
events(cfg)
|
events(cfg)
|
||||||
|
|
||||||
|
|
||||||
|
def test_utils_checks():
|
||||||
|
from ultralytics.utils.checks import check_yolov5u_filename, git_describe
|
||||||
|
|
||||||
|
check_yolov5u_filename('yolov5.pt')
|
||||||
|
# check_imshow(warn=True)
|
||||||
|
git_describe(ROOT)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
||||||
|
|
||||||
__version__ = '8.0.156'
|
__version__ = '8.0.157'
|
||||||
|
|
||||||
from ultralytics.hub import start
|
from ultralytics.hub import start
|
||||||
from ultralytics.models import RTDETR, SAM, YOLO
|
from ultralytics.models import RTDETR, SAM, YOLO
|
||||||
|
@ -499,58 +499,6 @@ def compress_one_image(f, f_new=None, max_dim=1920, quality=50):
|
|||||||
cv2.imwrite(str(f_new or f), im)
|
cv2.imwrite(str(f_new or f), im)
|
||||||
|
|
||||||
|
|
||||||
def delete_dsstore(path):
|
|
||||||
"""
|
|
||||||
Deletes all ".DS_store" files under a specified directory.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str, optional): The directory path where the ".DS_store" files should be deleted.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```python
|
|
||||||
from ultralytics.data.utils import delete_dsstore
|
|
||||||
|
|
||||||
delete_dsstore('path/to/dir')
|
|
||||||
```
|
|
||||||
|
|
||||||
Note:
|
|
||||||
".DS_store" files are created by the Apple operating system and contain metadata about folders and files. They
|
|
||||||
are hidden system files and can cause issues when transferring files between different operating systems.
|
|
||||||
"""
|
|
||||||
# Delete Apple .DS_store files
|
|
||||||
files = list(Path(path).rglob('.DS_store'))
|
|
||||||
LOGGER.info(f'Deleting *.DS_store files: {files}')
|
|
||||||
for f in files:
|
|
||||||
f.unlink()
|
|
||||||
|
|
||||||
|
|
||||||
def zip_directory(dir, use_zipfile_library=True):
|
|
||||||
"""
|
|
||||||
Zips a directory and saves the archive to the specified output path. Equivalent to 'zip -r coco8.zip coco8/'
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dir (str): The path to the directory to be zipped.
|
|
||||||
use_zipfile_library (bool): Whether to use zipfile library or shutil for zipping.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```python
|
|
||||||
from ultralytics.data.utils import zip_directory
|
|
||||||
|
|
||||||
zip_directory('/path/to/dir')
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
delete_dsstore(dir)
|
|
||||||
if use_zipfile_library:
|
|
||||||
dir = Path(dir)
|
|
||||||
with zipfile.ZipFile(dir.with_suffix('.zip'), 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
|
||||||
for file_path in dir.glob('**/*'):
|
|
||||||
if file_path.is_file():
|
|
||||||
zip_file.write(file_path, file_path.relative_to(dir))
|
|
||||||
else:
|
|
||||||
import shutil
|
|
||||||
shutil.make_archive(dir, 'zip', dir)
|
|
||||||
|
|
||||||
|
|
||||||
def autosplit(path=DATASETS_DIR / 'coco8/images', weights=(0.9, 0.1, 0.0), annotated_only=False):
|
def autosplit(path=DATASETS_DIR / 'coco8/images', weights=(0.9, 0.1, 0.0), annotated_only=False):
|
||||||
"""
|
"""
|
||||||
Automatically split a dataset into train/val/test splits and save the resulting splits into autosplit_*.txt files.
|
Automatically split a dataset into train/val/test splits and save the resulting splits into autosplit_*.txt files.
|
||||||
|
@ -164,7 +164,6 @@ def check_latest_pypi_version(package_name='ultralytics'):
|
|||||||
response = requests.get(f'https://pypi.org/pypi/{package_name}/json', timeout=3)
|
response = requests.get(f'https://pypi.org/pypi/{package_name}/json', timeout=3)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return response.json()['info']['version']
|
return response.json()['info']['version']
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def check_pip_update_available():
|
def check_pip_update_available():
|
||||||
|
@ -39,6 +39,31 @@ def is_url(url, check=True):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def delete_dsstore(path):
|
||||||
|
"""
|
||||||
|
Deletes all ".DS_store" files under a specified directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str, optional): The directory path where the ".DS_store" files should be deleted.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
from ultralytics.data.utils import delete_dsstore
|
||||||
|
|
||||||
|
delete_dsstore('path/to/dir')
|
||||||
|
```
|
||||||
|
|
||||||
|
Note:
|
||||||
|
".DS_store" files are created by the Apple operating system and contain metadata about folders and files. They
|
||||||
|
are hidden system files and can cause issues when transferring files between different operating systems.
|
||||||
|
"""
|
||||||
|
# Delete Apple .DS_store files
|
||||||
|
files = list(Path(path).rglob('.DS_store'))
|
||||||
|
LOGGER.info(f'Deleting *.DS_store files: {files}')
|
||||||
|
for f in files:
|
||||||
|
f.unlink()
|
||||||
|
|
||||||
|
|
||||||
def zip_directory(directory, compress=True, exclude=('.DS_Store', '__MACOSX'), progress=True):
|
def zip_directory(directory, compress=True, exclude=('.DS_Store', '__MACOSX'), progress=True):
|
||||||
"""
|
"""
|
||||||
Zips the contents of a directory, excluding files containing strings in the exclude list.
|
Zips the contents of a directory, excluding files containing strings in the exclude list.
|
||||||
@ -62,6 +87,7 @@ def zip_directory(directory, compress=True, exclude=('.DS_Store', '__MACOSX'), p
|
|||||||
"""
|
"""
|
||||||
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
||||||
|
|
||||||
|
delete_dsstore(directory)
|
||||||
directory = Path(directory)
|
directory = Path(directory)
|
||||||
if not directory.is_dir():
|
if not directory.is_dir():
|
||||||
raise FileNotFoundError(f"Directory '{directory}' does not exist.")
|
raise FileNotFoundError(f"Directory '{directory}' does not exist.")
|
||||||
@ -117,20 +143,22 @@ def unzip_file(file, path=None, exclude=('.DS_Store', '__MACOSX'), exist_ok=Fals
|
|||||||
files = [f for f in zipObj.namelist() if all(x not in f for x in exclude)]
|
files = [f for f in zipObj.namelist() if all(x not in f for x in exclude)]
|
||||||
top_level_dirs = {Path(f).parts[0] for f in files}
|
top_level_dirs = {Path(f).parts[0] for f in files}
|
||||||
|
|
||||||
if len(top_level_dirs) > 1 or not files[0].endswith('/'):
|
if len(top_level_dirs) > 1 or not files[0].endswith('/'): # zip has multiple files at top level
|
||||||
path = Path(path) / Path(file).stem # define new unzip directory
|
path = extract_path = Path(path) / Path(file).stem # i.e. ../datasets/coco8
|
||||||
|
else: # zip has 1 top-level directory
|
||||||
|
extract_path = path # i.e. ../datasets
|
||||||
|
path = Path(path) / list(top_level_dirs)[0] # i.e. ../datasets/coco8
|
||||||
|
|
||||||
# Check if destination directory already exists and contains files
|
# Check if destination directory already exists and contains files
|
||||||
extract_path = Path(path) / list(top_level_dirs)[0]
|
if path.exists() and any(path.iterdir()) and not exist_ok:
|
||||||
if extract_path.exists() and any(extract_path.iterdir()) and not exist_ok:
|
|
||||||
# If it exists and is not empty, return the path without unzipping
|
# If it exists and is not empty, return the path without unzipping
|
||||||
LOGGER.info(f'Skipping {file} unzip (already unzipped)')
|
LOGGER.info(f'Skipping {file} unzip (already unzipped)')
|
||||||
return extract_path
|
return path
|
||||||
|
|
||||||
for f in tqdm(files, desc=f'Unzipping {file} to {Path(path).resolve()}...', unit='file', disable=not progress):
|
for f in tqdm(files, desc=f'Unzipping {file} to {Path(path).resolve()}...', unit='file', disable=not progress):
|
||||||
zipObj.extract(f, path=path)
|
zipObj.extract(f, path=extract_path)
|
||||||
|
|
||||||
return extract_path # return unzip dir
|
return path # return unzip dir
|
||||||
|
|
||||||
|
|
||||||
def check_disk_space(url='https://ultralytics.com/assets/coco128.zip', sf=1.5, hard=True):
|
def check_disk_space(url='https://ultralytics.com/assets/coco128.zip', sf=1.5, hard=True):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user