Update .pre-commit-config.yaml (#2019)
* Update .pre-commit-config.yaml * Update __init__.py * Update .pre-commit-config.yaml * Precommit updates
This commit is contained in:
parent
a0a4012739
commit
527ce02916
@ -1,8 +1,5 @@
|
|||||||
# Define hooks for code formations
|
# Ultralytics YOLO 🚀, GPL-3.0 license
|
||||||
# Will be applied on any updated commit files if a user has installed and linked commit hook
|
# Pre-commit hooks. For more information see https://github.com/pre-commit/pre-commit-hooks/blob/main/README.md
|
||||||
|
|
||||||
default_language_version:
|
|
||||||
python: python3.8
|
|
||||||
|
|
||||||
exclude: 'docs/'
|
exclude: 'docs/'
|
||||||
# Define bot property if installed via https://github.com/marketplace/pre-commit-ci
|
# Define bot property if installed via https://github.com/marketplace/pre-commit-ci
|
||||||
@ -16,13 +13,13 @@ repos:
|
|||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
# - id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: check-toml
|
|
||||||
- id: pretty-format-json
|
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
|
- id: double-quote-string-fixer
|
||||||
|
- id: detect-private-key
|
||||||
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v3.3.1
|
rev: v3.3.1
|
||||||
@ -31,11 +28,11 @@ repos:
|
|||||||
name: Upgrade code
|
name: Upgrade code
|
||||||
args: [--py37-plus]
|
args: [--py37-plus]
|
||||||
|
|
||||||
# - repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
# rev: 5.11.4
|
rev: 5.12.0
|
||||||
# hooks:
|
hooks:
|
||||||
# - id: isort
|
- id: isort
|
||||||
# name: Sort imports
|
name: Sort imports
|
||||||
|
|
||||||
- repo: https://github.com/google/yapf
|
- repo: https://github.com/google/yapf
|
||||||
rev: v0.32.0
|
rev: v0.32.0
|
||||||
@ -59,12 +56,12 @@ repos:
|
|||||||
- id: flake8
|
- id: flake8
|
||||||
name: PEP8
|
name: PEP8
|
||||||
|
|
||||||
#- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
# rev: v2.2.2
|
rev: v2.2.2
|
||||||
# hooks:
|
hooks:
|
||||||
# - id: codespell
|
- id: codespell
|
||||||
# args:
|
args:
|
||||||
# - --ignore-words-list=crate,nd
|
- --ignore-words-list=crate,nd,strack,dota
|
||||||
|
|
||||||
#- repo: https://github.com/asottile/yesqa
|
#- repo: https://github.com/asottile/yesqa
|
||||||
# rev: v1.4.0
|
# rev: v1.4.0
|
||||||
|
|||||||
@ -164,6 +164,6 @@ def main(opt):
|
|||||||
test(**vars(opt)) if opt.test else run(**vars(opt))
|
test(**vars(opt)) if opt.test else run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -179,7 +179,7 @@ def run(
|
|||||||
vid_writer[i].write(im0)
|
vid_writer[i].write(im0)
|
||||||
|
|
||||||
# Print time (inference-only)
|
# Print time (inference-only)
|
||||||
LOGGER.info(f"{s}{dt[1].dt * 1E3:.1f}ms")
|
LOGGER.info(f'{s}{dt[1].dt * 1E3:.1f}ms')
|
||||||
|
|
||||||
# Print results
|
# Print results
|
||||||
t = tuple(x.t / seen * 1E3 for x in dt) # speeds per image
|
t = tuple(x.t / seen * 1E3 for x in dt) # speeds per image
|
||||||
@ -221,6 +221,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -220,11 +220,11 @@ def train(opt, device):
|
|||||||
|
|
||||||
# Log
|
# Log
|
||||||
metrics = {
|
metrics = {
|
||||||
"train/loss": tloss,
|
'train/loss': tloss,
|
||||||
f"{val}/loss": vloss,
|
f'{val}/loss': vloss,
|
||||||
"metrics/accuracy_top1": top1,
|
'metrics/accuracy_top1': top1,
|
||||||
"metrics/accuracy_top5": top5,
|
'metrics/accuracy_top5': top5,
|
||||||
"lr/0": optimizer.param_groups[0]['lr']} # learning rate
|
'lr/0': optimizer.param_groups[0]['lr']} # learning rate
|
||||||
logger.log_metrics(metrics, epoch)
|
logger.log_metrics(metrics, epoch)
|
||||||
|
|
||||||
# Save model
|
# Save model
|
||||||
@ -251,11 +251,11 @@ def train(opt, device):
|
|||||||
if RANK in {-1, 0} and final_epoch:
|
if RANK in {-1, 0} and final_epoch:
|
||||||
LOGGER.info(f'\nTraining complete ({(time.time() - t0) / 3600:.3f} hours)'
|
LOGGER.info(f'\nTraining complete ({(time.time() - t0) / 3600:.3f} hours)'
|
||||||
f"\nResults saved to {colorstr('bold', save_dir)}"
|
f"\nResults saved to {colorstr('bold', save_dir)}"
|
||||||
f"\nPredict: python classify/predict.py --weights {best} --source im.jpg"
|
f'\nPredict: python classify/predict.py --weights {best} --source im.jpg'
|
||||||
f"\nValidate: python classify/val.py --weights {best} --data {data_dir}"
|
f'\nValidate: python classify/val.py --weights {best} --data {data_dir}'
|
||||||
f"\nExport: python export.py --weights {best} --include onnx"
|
f'\nExport: python export.py --weights {best} --include onnx'
|
||||||
f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{best}')"
|
f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{best}')"
|
||||||
f"\nVisualize: https://netron.app\n")
|
f'\nVisualize: https://netron.app\n')
|
||||||
|
|
||||||
# Plot examples
|
# Plot examples
|
||||||
images, labels = (x[:25] for x in next(iter(testloader))) # first 25 images and labels
|
images, labels = (x[:25] for x in next(iter(testloader))) # first 25 images and labels
|
||||||
@ -263,7 +263,7 @@ def train(opt, device):
|
|||||||
file = imshow_cls(images, labels, pred, model.names, verbose=False, f=save_dir / 'test_images.jpg')
|
file = imshow_cls(images, labels, pred, model.names, verbose=False, f=save_dir / 'test_images.jpg')
|
||||||
|
|
||||||
# Log results
|
# Log results
|
||||||
meta = {"epochs": epochs, "top1_acc": best_fitness, "date": datetime.now().isoformat()}
|
meta = {'epochs': epochs, 'top1_acc': best_fitness, 'date': datetime.now().isoformat()}
|
||||||
logger.log_images(file, name='Test Examples (true-predicted)', epoch=epoch)
|
logger.log_images(file, name='Test Examples (true-predicted)', epoch=epoch)
|
||||||
logger.log_model(best, epochs, metadata=meta)
|
logger.log_model(best, epochs, metadata=meta)
|
||||||
|
|
||||||
@ -310,7 +310,7 @@ def main(opt):
|
|||||||
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
||||||
torch.cuda.set_device(LOCAL_RANK)
|
torch.cuda.set_device(LOCAL_RANK)
|
||||||
device = torch.device('cuda', LOCAL_RANK)
|
device = torch.device('cuda', LOCAL_RANK)
|
||||||
dist.init_process_group(backend="nccl" if dist.is_nccl_available() else "gloo")
|
dist.init_process_group(backend='nccl' if dist.is_nccl_available() else 'gloo')
|
||||||
|
|
||||||
# Parameters
|
# Parameters
|
||||||
opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok) # increment run
|
opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok) # increment run
|
||||||
@ -328,6 +328,6 @@ def run(**kwargs):
|
|||||||
return opt
|
return opt
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -100,7 +100,7 @@ def run(
|
|||||||
pred, targets, loss, dt = [], [], 0, (Profile(), Profile(), Profile())
|
pred, targets, loss, dt = [], [], 0, (Profile(), Profile(), Profile())
|
||||||
n = len(dataloader) # number of batches
|
n = len(dataloader) # number of batches
|
||||||
action = 'validating' if dataloader.dataset.root.stem == 'val' else 'testing'
|
action = 'validating' if dataloader.dataset.root.stem == 'val' else 'testing'
|
||||||
desc = f"{pbar.desc[:-36]}{action:>36}" if pbar else f"{action}"
|
desc = f'{pbar.desc[:-36]}{action:>36}' if pbar else f'{action}'
|
||||||
bar = tqdm(dataloader, desc, n, not training, bar_format=TQDM_BAR_FORMAT, position=0)
|
bar = tqdm(dataloader, desc, n, not training, bar_format=TQDM_BAR_FORMAT, position=0)
|
||||||
with torch.cuda.amp.autocast(enabled=device.type != 'cpu'):
|
with torch.cuda.amp.autocast(enabled=device.type != 'cpu'):
|
||||||
for images, labels in bar:
|
for images, labels in bar:
|
||||||
@ -123,14 +123,14 @@ def run(
|
|||||||
top1, top5 = acc.mean(0).tolist()
|
top1, top5 = acc.mean(0).tolist()
|
||||||
|
|
||||||
if pbar:
|
if pbar:
|
||||||
pbar.desc = f"{pbar.desc[:-36]}{loss:>12.3g}{top1:>12.3g}{top5:>12.3g}"
|
pbar.desc = f'{pbar.desc[:-36]}{loss:>12.3g}{top1:>12.3g}{top5:>12.3g}'
|
||||||
if verbose: # all classes
|
if verbose: # all classes
|
||||||
LOGGER.info(f"{'Class':>24}{'Images':>12}{'top1_acc':>12}{'top5_acc':>12}")
|
LOGGER.info(f"{'Class':>24}{'Images':>12}{'top1_acc':>12}{'top5_acc':>12}")
|
||||||
LOGGER.info(f"{'all':>24}{targets.shape[0]:>12}{top1:>12.3g}{top5:>12.3g}")
|
LOGGER.info(f"{'all':>24}{targets.shape[0]:>12}{top1:>12.3g}{top5:>12.3g}")
|
||||||
for i, c in model.names.items():
|
for i, c in model.names.items():
|
||||||
acc_i = acc[targets == i]
|
acc_i = acc[targets == i]
|
||||||
top1i, top5i = acc_i.mean(0).tolist()
|
top1i, top5i = acc_i.mean(0).tolist()
|
||||||
LOGGER.info(f"{c:>24}{acc_i.shape[0]:>12}{top1i:>12.3g}{top5i:>12.3g}")
|
LOGGER.info(f'{c:>24}{acc_i.shape[0]:>12}{top1i:>12.3g}{top5i:>12.3g}')
|
||||||
|
|
||||||
# Print results
|
# Print results
|
||||||
t = tuple(x.t / len(dataloader.dataset.samples) * 1E3 for x in dt) # speeds per image
|
t = tuple(x.t / len(dataloader.dataset.samples) * 1E3 for x in dt) # speeds per image
|
||||||
@ -165,6 +165,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -260,6 +260,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
18
export.py
18
export.py
@ -120,7 +120,7 @@ def export_torchscript(model, im, file, optimize, prefix=colorstr('TorchScript:'
|
|||||||
f = file.with_suffix('.torchscript')
|
f = file.with_suffix('.torchscript')
|
||||||
|
|
||||||
ts = torch.jit.trace(model, im, strict=False)
|
ts = torch.jit.trace(model, im, strict=False)
|
||||||
d = {"shape": im.shape, "stride": int(max(model.stride)), "names": model.names}
|
d = {'shape': im.shape, 'stride': int(max(model.stride)), 'names': model.names}
|
||||||
extra_files = {'config.txt': json.dumps(d)} # torch._C.ExtraFilesMap()
|
extra_files = {'config.txt': json.dumps(d)} # torch._C.ExtraFilesMap()
|
||||||
if optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html
|
if optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html
|
||||||
optimize_for_mobile(ts)._save_for_lite_interpreter(str(f), _extra_files=extra_files)
|
optimize_for_mobile(ts)._save_for_lite_interpreter(str(f), _extra_files=extra_files)
|
||||||
@ -230,7 +230,7 @@ def export_coreml(model, im, file, int8, half, prefix=colorstr('CoreML:')):
|
|||||||
if bits < 32:
|
if bits < 32:
|
||||||
if MACOS: # quantization only supported on macOS
|
if MACOS: # quantization only supported on macOS
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.filterwarnings("ignore", category=DeprecationWarning) # suppress numpy==1.20 float warning
|
warnings.filterwarnings('ignore', category=DeprecationWarning) # suppress numpy==1.20 float warning
|
||||||
ct_model = ct.models.neural_network.quantization_utils.quantize_weights(ct_model, bits, mode)
|
ct_model = ct.models.neural_network.quantization_utils.quantize_weights(ct_model, bits, mode)
|
||||||
else:
|
else:
|
||||||
print(f'{prefix} quantization only supported on macOS, skipping...')
|
print(f'{prefix} quantization only supported on macOS, skipping...')
|
||||||
@ -286,7 +286,7 @@ def export_engine(model, im, file, half, dynamic, simplify, workspace=4, verbose
|
|||||||
|
|
||||||
if dynamic:
|
if dynamic:
|
||||||
if im.shape[0] <= 1:
|
if im.shape[0] <= 1:
|
||||||
LOGGER.warning(f"{prefix} WARNING ⚠️ --dynamic model requires maximum --batch-size argument")
|
LOGGER.warning(f'{prefix} WARNING ⚠️ --dynamic model requires maximum --batch-size argument')
|
||||||
profile = builder.create_optimization_profile()
|
profile = builder.create_optimization_profile()
|
||||||
for inp in inputs:
|
for inp in inputs:
|
||||||
profile.set_shape(inp.name, (1, *im.shape[1:]), (max(1, im.shape[0] // 2), *im.shape[1:]), im.shape)
|
profile.set_shape(inp.name, (1, *im.shape[1:]), (max(1, im.shape[0] // 2), *im.shape[1:]), im.shape)
|
||||||
@ -396,7 +396,7 @@ def export_tflite(keras_model, im, file, int8, data, nms, agnostic_nms, prefix=c
|
|||||||
converter.target_spec.supported_ops.append(tf.lite.OpsSet.SELECT_TF_OPS)
|
converter.target_spec.supported_ops.append(tf.lite.OpsSet.SELECT_TF_OPS)
|
||||||
|
|
||||||
tflite_model = converter.convert()
|
tflite_model = converter.convert()
|
||||||
open(f, "wb").write(tflite_model)
|
open(f, 'wb').write(tflite_model)
|
||||||
return f, None
|
return f, None
|
||||||
|
|
||||||
|
|
||||||
@ -420,7 +420,7 @@ def export_edgetpu(file, prefix=colorstr('Edge TPU:')):
|
|||||||
f = str(file).replace('.pt', '-int8_edgetpu.tflite') # Edge TPU model
|
f = str(file).replace('.pt', '-int8_edgetpu.tflite') # Edge TPU model
|
||||||
f_tfl = str(file).replace('.pt', '-int8.tflite') # TFLite model
|
f_tfl = str(file).replace('.pt', '-int8.tflite') # TFLite model
|
||||||
|
|
||||||
cmd = f"edgetpu_compiler -s -d -k 10 --out_dir {file.parent} {f_tfl}"
|
cmd = f'edgetpu_compiler -s -d -k 10 --out_dir {file.parent} {f_tfl}'
|
||||||
subprocess.run(cmd.split(), check=True)
|
subprocess.run(cmd.split(), check=True)
|
||||||
return f, None
|
return f, None
|
||||||
|
|
||||||
@ -601,14 +601,14 @@ def run(
|
|||||||
det &= not seg # segmentation models inherit from SegmentationModel(DetectionModel)
|
det &= not seg # segmentation models inherit from SegmentationModel(DetectionModel)
|
||||||
dir = Path('segment' if seg else 'classify' if cls else '')
|
dir = Path('segment' if seg else 'classify' if cls else '')
|
||||||
h = '--half' if half else '' # --half FP16 inference arg
|
h = '--half' if half else '' # --half FP16 inference arg
|
||||||
s = "# WARNING ⚠️ ClassificationModel not yet supported for PyTorch Hub AutoShape inference" if cls else \
|
s = '# WARNING ⚠️ ClassificationModel not yet supported for PyTorch Hub AutoShape inference' if cls else \
|
||||||
"# WARNING ⚠️ SegmentationModel not yet supported for PyTorch Hub AutoShape inference" if seg else ''
|
'# WARNING ⚠️ SegmentationModel not yet supported for PyTorch Hub AutoShape inference' if seg else ''
|
||||||
LOGGER.info(f'\nExport complete ({time.time() - t:.1f}s)'
|
LOGGER.info(f'\nExport complete ({time.time() - t:.1f}s)'
|
||||||
f"\nResults saved to {colorstr('bold', file.parent.resolve())}"
|
f"\nResults saved to {colorstr('bold', file.parent.resolve())}"
|
||||||
f"\nDetect: python {dir / ('detect.py' if det else 'predict.py')} --weights {f[-1]} {h}"
|
f"\nDetect: python {dir / ('detect.py' if det else 'predict.py')} --weights {f[-1]} {h}"
|
||||||
f"\nValidate: python {dir / 'val.py'} --weights {f[-1]} {h}"
|
f"\nValidate: python {dir / 'val.py'} --weights {f[-1]} {h}"
|
||||||
f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{f[-1]}') {s}"
|
f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{f[-1]}') {s}"
|
||||||
f"\nVisualize: https://netron.app")
|
f'\nVisualize: https://netron.app')
|
||||||
return f # return list of exported files/dirs
|
return f # return list of exported files/dirs
|
||||||
|
|
||||||
|
|
||||||
@ -650,6 +650,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -380,11 +380,11 @@ class DetectMultiBackend(nn.Module):
|
|||||||
w = next(Path(w).glob('*.xml')) # get *.xml file from *_openvino_model dir
|
w = next(Path(w).glob('*.xml')) # get *.xml file from *_openvino_model dir
|
||||||
network = ie.read_model(model=w, weights=Path(w).with_suffix('.bin'))
|
network = ie.read_model(model=w, weights=Path(w).with_suffix('.bin'))
|
||||||
if network.get_parameters()[0].get_layout().empty:
|
if network.get_parameters()[0].get_layout().empty:
|
||||||
network.get_parameters()[0].set_layout(Layout("NCHW"))
|
network.get_parameters()[0].set_layout(Layout('NCHW'))
|
||||||
batch_dim = get_batch(network)
|
batch_dim = get_batch(network)
|
||||||
if batch_dim.is_static:
|
if batch_dim.is_static:
|
||||||
batch_size = batch_dim.get_length()
|
batch_size = batch_dim.get_length()
|
||||||
executable_network = ie.compile_model(network, device_name="CPU") # device_name="MYRIAD" for Intel NCS2
|
executable_network = ie.compile_model(network, device_name='CPU') # device_name="MYRIAD" for Intel NCS2
|
||||||
stride, names = self._load_metadata(Path(w).with_suffix('.yaml')) # load metadata
|
stride, names = self._load_metadata(Path(w).with_suffix('.yaml')) # load metadata
|
||||||
elif engine: # TensorRT
|
elif engine: # TensorRT
|
||||||
LOGGER.info(f'Loading {w} for TensorRT inference...')
|
LOGGER.info(f'Loading {w} for TensorRT inference...')
|
||||||
@ -431,7 +431,7 @@ class DetectMultiBackend(nn.Module):
|
|||||||
import tensorflow as tf
|
import tensorflow as tf
|
||||||
|
|
||||||
def wrap_frozen_graph(gd, inputs, outputs):
|
def wrap_frozen_graph(gd, inputs, outputs):
|
||||||
x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped
|
x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=''), []) # wrapped
|
||||||
ge = x.graph.as_graph_element
|
ge = x.graph.as_graph_element
|
||||||
return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs))
|
return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs))
|
||||||
|
|
||||||
@ -445,7 +445,7 @@ class DetectMultiBackend(nn.Module):
|
|||||||
gd = tf.Graph().as_graph_def() # TF GraphDef
|
gd = tf.Graph().as_graph_def() # TF GraphDef
|
||||||
with open(w, 'rb') as f:
|
with open(w, 'rb') as f:
|
||||||
gd.ParseFromString(f.read())
|
gd.ParseFromString(f.read())
|
||||||
frozen_func = wrap_frozen_graph(gd, inputs="x:0", outputs=gd_outputs(gd))
|
frozen_func = wrap_frozen_graph(gd, inputs='x:0', outputs=gd_outputs(gd))
|
||||||
elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python
|
elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python
|
||||||
try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu
|
try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu
|
||||||
from tflite_runtime.interpreter import Interpreter, load_delegate
|
from tflite_runtime.interpreter import Interpreter, load_delegate
|
||||||
@ -467,9 +467,9 @@ class DetectMultiBackend(nn.Module):
|
|||||||
output_details = interpreter.get_output_details() # outputs
|
output_details = interpreter.get_output_details() # outputs
|
||||||
# load metadata
|
# load metadata
|
||||||
with contextlib.suppress(zipfile.BadZipFile):
|
with contextlib.suppress(zipfile.BadZipFile):
|
||||||
with zipfile.ZipFile(w, "r") as model:
|
with zipfile.ZipFile(w, 'r') as model:
|
||||||
meta_file = model.namelist()[0]
|
meta_file = model.namelist()[0]
|
||||||
meta = ast.literal_eval(model.read(meta_file).decode("utf-8"))
|
meta = ast.literal_eval(model.read(meta_file).decode('utf-8'))
|
||||||
stride, names = int(meta['stride']), meta['names']
|
stride, names = int(meta['stride']), meta['names']
|
||||||
elif tfjs: # TF.js
|
elif tfjs: # TF.js
|
||||||
raise NotImplementedError('ERROR: YOLOv3 TF.js inference is not supported')
|
raise NotImplementedError('ERROR: YOLOv3 TF.js inference is not supported')
|
||||||
@ -491,7 +491,7 @@ class DetectMultiBackend(nn.Module):
|
|||||||
check_requirements('tritonclient[all]')
|
check_requirements('tritonclient[all]')
|
||||||
from utils.triton import TritonRemoteModel
|
from utils.triton import TritonRemoteModel
|
||||||
model = TritonRemoteModel(url=w)
|
model = TritonRemoteModel(url=w)
|
||||||
nhwc = model.runtime.startswith("tensorflow")
|
nhwc = model.runtime.startswith('tensorflow')
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(f'ERROR: {w} is not a supported format')
|
raise NotImplementedError(f'ERROR: {w} is not a supported format')
|
||||||
|
|
||||||
@ -608,7 +608,7 @@ class DetectMultiBackend(nn.Module):
|
|||||||
url = urlparse(p) # if url may be Triton inference server
|
url = urlparse(p) # if url may be Triton inference server
|
||||||
types = [s in Path(p).name for s in sf]
|
types = [s in Path(p).name for s in sf]
|
||||||
types[8] &= not types[9] # tflite &= not edgetpu
|
types[8] &= not types[9] # tflite &= not edgetpu
|
||||||
triton = not any(types) and all([any(s in url.scheme for s in ["http", "grpc"]), url.netloc])
|
triton = not any(types) and all([any(s in url.scheme for s in ['http', 'grpc']), url.netloc])
|
||||||
return types + [triton]
|
return types + [triton]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
12
models/tf.py
12
models/tf.py
@ -356,7 +356,7 @@ class TFUpsample(keras.layers.Layer):
|
|||||||
# TF version of torch.nn.Upsample()
|
# TF version of torch.nn.Upsample()
|
||||||
def __init__(self, size, scale_factor, mode, w=None): # warning: all arguments needed including 'w'
|
def __init__(self, size, scale_factor, mode, w=None): # warning: all arguments needed including 'w'
|
||||||
super().__init__()
|
super().__init__()
|
||||||
assert scale_factor % 2 == 0, "scale_factor must be multiple of 2"
|
assert scale_factor % 2 == 0, 'scale_factor must be multiple of 2'
|
||||||
self.upsample = lambda x: tf.image.resize(x, (x.shape[1] * scale_factor, x.shape[2] * scale_factor), mode)
|
self.upsample = lambda x: tf.image.resize(x, (x.shape[1] * scale_factor, x.shape[2] * scale_factor), mode)
|
||||||
# self.upsample = keras.layers.UpSampling2D(size=scale_factor, interpolation=mode)
|
# self.upsample = keras.layers.UpSampling2D(size=scale_factor, interpolation=mode)
|
||||||
# with default arguments: align_corners=False, half_pixel_centers=False
|
# with default arguments: align_corners=False, half_pixel_centers=False
|
||||||
@ -371,7 +371,7 @@ class TFConcat(keras.layers.Layer):
|
|||||||
# TF version of torch.concat()
|
# TF version of torch.concat()
|
||||||
def __init__(self, dimension=1, w=None):
|
def __init__(self, dimension=1, w=None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
assert dimension == 1, "convert only NCHW to NHWC concat"
|
assert dimension == 1, 'convert only NCHW to NHWC concat'
|
||||||
self.d = 3
|
self.d = 3
|
||||||
|
|
||||||
def call(self, inputs):
|
def call(self, inputs):
|
||||||
@ -523,17 +523,17 @@ class AgnosticNMS(keras.layers.Layer):
|
|||||||
selected_boxes = tf.gather(boxes, selected_inds)
|
selected_boxes = tf.gather(boxes, selected_inds)
|
||||||
padded_boxes = tf.pad(selected_boxes,
|
padded_boxes = tf.pad(selected_boxes,
|
||||||
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]], [0, 0]],
|
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]], [0, 0]],
|
||||||
mode="CONSTANT",
|
mode='CONSTANT',
|
||||||
constant_values=0.0)
|
constant_values=0.0)
|
||||||
selected_scores = tf.gather(scores_inp, selected_inds)
|
selected_scores = tf.gather(scores_inp, selected_inds)
|
||||||
padded_scores = tf.pad(selected_scores,
|
padded_scores = tf.pad(selected_scores,
|
||||||
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]],
|
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]],
|
||||||
mode="CONSTANT",
|
mode='CONSTANT',
|
||||||
constant_values=-1.0)
|
constant_values=-1.0)
|
||||||
selected_classes = tf.gather(class_inds, selected_inds)
|
selected_classes = tf.gather(class_inds, selected_inds)
|
||||||
padded_classes = tf.pad(selected_classes,
|
padded_classes = tf.pad(selected_classes,
|
||||||
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]],
|
paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]],
|
||||||
mode="CONSTANT",
|
mode='CONSTANT',
|
||||||
constant_values=-1.0)
|
constant_values=-1.0)
|
||||||
valid_detections = tf.shape(selected_inds)[0]
|
valid_detections = tf.shape(selected_inds)[0]
|
||||||
return padded_boxes, padded_scores, padded_classes, valid_detections
|
return padded_boxes, padded_scores, padded_classes, valid_detections
|
||||||
@ -603,6 +603,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -279,6 +279,6 @@ def main(opt):
|
|||||||
run(**vars(opt))
|
run(**vars(opt))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -138,7 +138,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||||||
# Batch size
|
# Batch size
|
||||||
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
|
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
|
||||||
batch_size = check_train_batch_size(model, imgsz, amp)
|
batch_size = check_train_batch_size(model, imgsz, amp)
|
||||||
logger.update_params({"batch_size": batch_size})
|
logger.update_params({'batch_size': batch_size})
|
||||||
# loggers.on_params_update({"batch_size": batch_size})
|
# loggers.on_params_update({"batch_size": batch_size})
|
||||||
|
|
||||||
# Optimizer
|
# Optimizer
|
||||||
@ -340,10 +340,10 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||||||
# Mosaic plots
|
# Mosaic plots
|
||||||
if plots:
|
if plots:
|
||||||
if ni < 3:
|
if ni < 3:
|
||||||
plot_images_and_masks(imgs, targets, masks, paths, save_dir / f"train_batch{ni}.jpg")
|
plot_images_and_masks(imgs, targets, masks, paths, save_dir / f'train_batch{ni}.jpg')
|
||||||
if ni == 10:
|
if ni == 10:
|
||||||
files = sorted(save_dir.glob('train*.jpg'))
|
files = sorted(save_dir.glob('train*.jpg'))
|
||||||
logger.log_images(files, "Mosaics", epoch)
|
logger.log_images(files, 'Mosaics', epoch)
|
||||||
# end batch ------------------------------------------------------------------------------------------------
|
# end batch ------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
# Scheduler
|
# Scheduler
|
||||||
@ -453,8 +453,8 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||||||
files = ['results.png', 'confusion_matrix.png', *(f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R'))]
|
files = ['results.png', 'confusion_matrix.png', *(f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R'))]
|
||||||
files = [(save_dir / f) for f in files if (save_dir / f).exists()] # filter
|
files = [(save_dir / f) for f in files if (save_dir / f).exists()] # filter
|
||||||
LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}")
|
LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}")
|
||||||
logger.log_images(files, "Results", epoch + 1)
|
logger.log_images(files, 'Results', epoch + 1)
|
||||||
logger.log_images(sorted(save_dir.glob('val*.jpg')), "Validation", epoch + 1)
|
logger.log_images(sorted(save_dir.glob('val*.jpg')), 'Validation', epoch + 1)
|
||||||
torch.cuda.empty_cache()
|
torch.cuda.empty_cache()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@ -547,7 +547,7 @@ def main(opt, callbacks=Callbacks()):
|
|||||||
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
||||||
torch.cuda.set_device(LOCAL_RANK)
|
torch.cuda.set_device(LOCAL_RANK)
|
||||||
device = torch.device('cuda', LOCAL_RANK)
|
device = torch.device('cuda', LOCAL_RANK)
|
||||||
dist.init_process_group(backend="nccl" if dist.is_nccl_available() else "gloo")
|
dist.init_process_group(backend='nccl' if dist.is_nccl_available() else 'gloo')
|
||||||
|
|
||||||
# Train
|
# Train
|
||||||
if not opt.evolve:
|
if not opt.evolve:
|
||||||
@ -654,6 +654,6 @@ def run(**kwargs):
|
|||||||
return opt
|
return opt
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -70,8 +70,8 @@ def save_one_json(predn, jdict, path, class_map, pred_masks):
|
|||||||
from pycocotools.mask import encode
|
from pycocotools.mask import encode
|
||||||
|
|
||||||
def single_encode(x):
|
def single_encode(x):
|
||||||
rle = encode(np.asarray(x[:, :, None], order="F", dtype="uint8"))[0]
|
rle = encode(np.asarray(x[:, :, None], order='F', dtype='uint8'))[0]
|
||||||
rle["counts"] = rle["counts"].decode("utf-8")
|
rle['counts'] = rle['counts'].decode('utf-8')
|
||||||
return rle
|
return rle
|
||||||
|
|
||||||
image_id = int(path.stem) if path.stem.isnumeric() else path.stem
|
image_id = int(path.stem) if path.stem.isnumeric() else path.stem
|
||||||
@ -105,7 +105,7 @@ def process_batch(detections, labels, iouv, pred_masks=None, gt_masks=None, over
|
|||||||
gt_masks = gt_masks.repeat(nl, 1, 1) # shape(1,640,640) -> (n,640,640)
|
gt_masks = gt_masks.repeat(nl, 1, 1) # shape(1,640,640) -> (n,640,640)
|
||||||
gt_masks = torch.where(gt_masks == index, 1.0, 0.0)
|
gt_masks = torch.where(gt_masks == index, 1.0, 0.0)
|
||||||
if gt_masks.shape[1:] != pred_masks.shape[1:]:
|
if gt_masks.shape[1:] != pred_masks.shape[1:]:
|
||||||
gt_masks = F.interpolate(gt_masks[None], pred_masks.shape[1:], mode="bilinear", align_corners=False)[0]
|
gt_masks = F.interpolate(gt_masks[None], pred_masks.shape[1:], mode='bilinear', align_corners=False)[0]
|
||||||
gt_masks = gt_masks.gt_(0.5)
|
gt_masks = gt_masks.gt_(0.5)
|
||||||
iou = mask_iou(gt_masks.view(gt_masks.shape[0], -1), pred_masks.view(pred_masks.shape[0], -1))
|
iou = mask_iou(gt_masks.view(gt_masks.shape[0], -1), pred_masks.view(pred_masks.shape[0], -1))
|
||||||
else: # boxes
|
else: # boxes
|
||||||
@ -231,8 +231,8 @@ def run(
|
|||||||
if isinstance(names, (list, tuple)): # old format
|
if isinstance(names, (list, tuple)): # old format
|
||||||
names = dict(enumerate(names))
|
names = dict(enumerate(names))
|
||||||
class_map = coco80_to_coco91_class() if is_coco else list(range(1000))
|
class_map = coco80_to_coco91_class() if is_coco else list(range(1000))
|
||||||
s = ('%22s' + '%11s' * 10) % ('Class', 'Images', 'Instances', 'Box(P', "R", "mAP50", "mAP50-95)", "Mask(P", "R",
|
s = ('%22s' + '%11s' * 10) % ('Class', 'Images', 'Instances', 'Box(P', 'R', 'mAP50', 'mAP50-95)', 'Mask(P', 'R',
|
||||||
"mAP50", "mAP50-95)")
|
'mAP50', 'mAP50-95)')
|
||||||
dt = Profile(), Profile(), Profile()
|
dt = Profile(), Profile(), Profile()
|
||||||
metrics = Metrics()
|
metrics = Metrics()
|
||||||
loss = torch.zeros(4, device=device)
|
loss = torch.zeros(4, device=device)
|
||||||
@ -343,7 +343,7 @@ def run(
|
|||||||
|
|
||||||
# Print results
|
# Print results
|
||||||
pf = '%22s' + '%11i' * 2 + '%11.3g' * 8 # print format
|
pf = '%22s' + '%11i' * 2 + '%11.3g' * 8 # print format
|
||||||
LOGGER.info(pf % ("all", seen, nt.sum(), *metrics.mean_results()))
|
LOGGER.info(pf % ('all', seen, nt.sum(), *metrics.mean_results()))
|
||||||
if nt.sum() == 0:
|
if nt.sum() == 0:
|
||||||
LOGGER.warning(f'WARNING ⚠️ no labels found in {task} set, can not compute metrics without labels')
|
LOGGER.warning(f'WARNING ⚠️ no labels found in {task} set, can not compute metrics without labels')
|
||||||
|
|
||||||
@ -369,7 +369,7 @@ def run(
|
|||||||
if save_json and len(jdict):
|
if save_json and len(jdict):
|
||||||
w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights
|
w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights
|
||||||
anno_json = str(Path('../datasets/coco/annotations/instances_val2017.json')) # annotations
|
anno_json = str(Path('../datasets/coco/annotations/instances_val2017.json')) # annotations
|
||||||
pred_json = str(save_dir / f"{w}_predictions.json") # predictions
|
pred_json = str(save_dir / f'{w}_predictions.json') # predictions
|
||||||
LOGGER.info(f'\nEvaluating pycocotools mAP... saving {pred_json}...')
|
LOGGER.info(f'\nEvaluating pycocotools mAP... saving {pred_json}...')
|
||||||
with open(pred_json, 'w') as f:
|
with open(pred_json, 'w') as f:
|
||||||
json.dump(jdict, f)
|
json.dump(jdict, f)
|
||||||
@ -468,6 +468,6 @@ def main(opt):
|
|||||||
raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")')
|
raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
6
train.py
6
train.py
@ -147,7 +147,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||||||
# Batch size
|
# Batch size
|
||||||
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
|
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
|
||||||
batch_size = check_train_batch_size(model, imgsz, amp)
|
batch_size = check_train_batch_size(model, imgsz, amp)
|
||||||
loggers.on_params_update({"batch_size": batch_size})
|
loggers.on_params_update({'batch_size': batch_size})
|
||||||
|
|
||||||
# Optimizer
|
# Optimizer
|
||||||
nbs = 64 # nominal batch size
|
nbs = 64 # nominal batch size
|
||||||
@ -521,7 +521,7 @@ def main(opt, callbacks=Callbacks()):
|
|||||||
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command'
|
||||||
torch.cuda.set_device(LOCAL_RANK)
|
torch.cuda.set_device(LOCAL_RANK)
|
||||||
device = torch.device('cuda', LOCAL_RANK)
|
device = torch.device('cuda', LOCAL_RANK)
|
||||||
dist.init_process_group(backend="nccl" if dist.is_nccl_available() else "gloo")
|
dist.init_process_group(backend='nccl' if dist.is_nccl_available() else 'gloo')
|
||||||
|
|
||||||
# Train
|
# Train
|
||||||
if not opt.evolve:
|
if not opt.evolve:
|
||||||
@ -630,6 +630,6 @@ def run(**kwargs):
|
|||||||
return opt
|
return opt
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
@ -69,7 +69,7 @@ def notebook_init(verbose=True):
|
|||||||
if verbose:
|
if verbose:
|
||||||
gb = 1 << 30 # bytes to GiB (1024 ** 3)
|
gb = 1 << 30 # bytes to GiB (1024 ** 3)
|
||||||
ram = psutil.virtual_memory().total
|
ram = psutil.virtual_memory().total
|
||||||
total, used, free = shutil.disk_usage("/")
|
total, used, free = shutil.disk_usage('/')
|
||||||
display.clear_output()
|
display.clear_output()
|
||||||
s = f'({os.cpu_count()} CPUs, {ram / gb:.1f} GB RAM, {(total - free) / gb:.1f}/{total / gb:.1f} GB disk)'
|
s = f'({os.cpu_count()} CPUs, {ram / gb:.1f} GB RAM, {(total - free) / gb:.1f}/{total / gb:.1f} GB disk)'
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -89,7 +89,7 @@ def exif_transpose(image):
|
|||||||
if method is not None:
|
if method is not None:
|
||||||
image = image.transpose(method)
|
image = image.transpose(method)
|
||||||
del exif[0x0112]
|
del exif[0x0112]
|
||||||
image.info["exif"] = exif.tobytes()
|
image.info['exif'] = exif.tobytes()
|
||||||
return image
|
return image
|
||||||
|
|
||||||
|
|
||||||
@ -212,11 +212,11 @@ class LoadScreenshots:
|
|||||||
|
|
||||||
# Parse monitor shape
|
# Parse monitor shape
|
||||||
monitor = self.sct.monitors[self.screen]
|
monitor = self.sct.monitors[self.screen]
|
||||||
self.top = monitor["top"] if top is None else (monitor["top"] + top)
|
self.top = monitor['top'] if top is None else (monitor['top'] + top)
|
||||||
self.left = monitor["left"] if left is None else (monitor["left"] + left)
|
self.left = monitor['left'] if left is None else (monitor['left'] + left)
|
||||||
self.width = width or monitor["width"]
|
self.width = width or monitor['width']
|
||||||
self.height = height or monitor["height"]
|
self.height = height or monitor['height']
|
||||||
self.monitor = {"left": self.left, "top": self.top, "width": self.width, "height": self.height}
|
self.monitor = {'left': self.left, 'top': self.top, 'width': self.width, 'height': self.height}
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
@ -224,7 +224,7 @@ class LoadScreenshots:
|
|||||||
def __next__(self):
|
def __next__(self):
|
||||||
# mss screen capture: get raw pixels from the screen as np array
|
# mss screen capture: get raw pixels from the screen as np array
|
||||||
im0 = np.array(self.sct.grab(self.monitor))[:, :, :3] # [:, :, :3] BGRA to BGR
|
im0 = np.array(self.sct.grab(self.monitor))[:, :, :3] # [:, :, :3] BGRA to BGR
|
||||||
s = f"screen {self.screen} (LTWH): {self.left},{self.top},{self.width},{self.height}: "
|
s = f'screen {self.screen} (LTWH): {self.left},{self.top},{self.width},{self.height}: '
|
||||||
|
|
||||||
if self.transforms:
|
if self.transforms:
|
||||||
im = self.transforms(im0) # transforms
|
im = self.transforms(im0) # transforms
|
||||||
@ -239,7 +239,7 @@ class LoadScreenshots:
|
|||||||
class LoadImages:
|
class LoadImages:
|
||||||
# YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4`
|
# YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4`
|
||||||
def __init__(self, path, img_size=640, stride=32, auto=True, transforms=None, vid_stride=1):
|
def __init__(self, path, img_size=640, stride=32, auto=True, transforms=None, vid_stride=1):
|
||||||
if isinstance(path, str) and Path(path).suffix == ".txt": # *.txt file with img/vid/dir on each line
|
if isinstance(path, str) and Path(path).suffix == '.txt': # *.txt file with img/vid/dir on each line
|
||||||
path = Path(path).read_text().rsplit()
|
path = Path(path).read_text().rsplit()
|
||||||
files = []
|
files = []
|
||||||
for p in sorted(path) if isinstance(path, (list, tuple)) else [path]:
|
for p in sorted(path) if isinstance(path, (list, tuple)) else [path]:
|
||||||
@ -358,7 +358,7 @@ class LoadStreams:
|
|||||||
# YouTube format i.e. 'https://www.youtube.com/watch?v=Zgi9g1ksQHc' or 'https://youtu.be/Zgi9g1ksQHc'
|
# YouTube format i.e. 'https://www.youtube.com/watch?v=Zgi9g1ksQHc' or 'https://youtu.be/Zgi9g1ksQHc'
|
||||||
check_requirements(('pafy', 'youtube_dl==2020.12.2'))
|
check_requirements(('pafy', 'youtube_dl==2020.12.2'))
|
||||||
import pafy
|
import pafy
|
||||||
s = pafy.new(s).getbest(preftype="mp4").url # YouTube URL
|
s = pafy.new(s).getbest(preftype='mp4').url # YouTube URL
|
||||||
s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam
|
s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam
|
||||||
if s == 0:
|
if s == 0:
|
||||||
assert not is_colab(), '--source 0 webcam unsupported on Colab. Rerun command in a local environment.'
|
assert not is_colab(), '--source 0 webcam unsupported on Colab. Rerun command in a local environment.'
|
||||||
@ -373,7 +373,7 @@ class LoadStreams:
|
|||||||
|
|
||||||
_, self.imgs[i] = cap.read() # guarantee first frame
|
_, self.imgs[i] = cap.read() # guarantee first frame
|
||||||
self.threads[i] = Thread(target=self.update, args=([i, cap, s]), daemon=True)
|
self.threads[i] = Thread(target=self.update, args=([i, cap, s]), daemon=True)
|
||||||
LOGGER.info(f"{st} Success ({self.frames[i]} frames {w}x{h} at {self.fps[i]:.2f} FPS)")
|
LOGGER.info(f'{st} Success ({self.frames[i]} frames {w}x{h} at {self.fps[i]:.2f} FPS)')
|
||||||
self.threads[i].start()
|
self.threads[i].start()
|
||||||
LOGGER.info('') # newline
|
LOGGER.info('') # newline
|
||||||
|
|
||||||
@ -495,7 +495,7 @@ class LoadImagesAndLabels(Dataset):
|
|||||||
# Display cache
|
# Display cache
|
||||||
nf, nm, ne, nc, n = cache.pop('results') # found, missing, empty, corrupt, total
|
nf, nm, ne, nc, n = cache.pop('results') # found, missing, empty, corrupt, total
|
||||||
if exists and LOCAL_RANK in {-1, 0}:
|
if exists and LOCAL_RANK in {-1, 0}:
|
||||||
d = f"Scanning {cache_path}... {nf} images, {nm + ne} backgrounds, {nc} corrupt"
|
d = f'Scanning {cache_path}... {nf} images, {nm + ne} backgrounds, {nc} corrupt'
|
||||||
tqdm(None, desc=prefix + d, total=n, initial=n, bar_format=TQDM_BAR_FORMAT) # display cache results
|
tqdm(None, desc=prefix + d, total=n, initial=n, bar_format=TQDM_BAR_FORMAT) # display cache results
|
||||||
if cache['msgs']:
|
if cache['msgs']:
|
||||||
LOGGER.info('\n'.join(cache['msgs'])) # display warnings
|
LOGGER.info('\n'.join(cache['msgs'])) # display warnings
|
||||||
@ -598,8 +598,8 @@ class LoadImagesAndLabels(Dataset):
|
|||||||
mem = psutil.virtual_memory()
|
mem = psutil.virtual_memory()
|
||||||
cache = mem_required * (1 + safety_margin) < mem.available # to cache or not to cache, that is the question
|
cache = mem_required * (1 + safety_margin) < mem.available # to cache or not to cache, that is the question
|
||||||
if not cache:
|
if not cache:
|
||||||
LOGGER.info(f"{prefix}{mem_required / gb:.1f}GB RAM required, "
|
LOGGER.info(f'{prefix}{mem_required / gb:.1f}GB RAM required, '
|
||||||
f"{mem.available / gb:.1f}/{mem.total / gb:.1f}GB available, "
|
f'{mem.available / gb:.1f}/{mem.total / gb:.1f}GB available, '
|
||||||
f"{'caching images ✅' if cache else 'not caching images ⚠️'}")
|
f"{'caching images ✅' if cache else 'not caching images ⚠️'}")
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
@ -607,7 +607,7 @@ class LoadImagesAndLabels(Dataset):
|
|||||||
# Cache dataset labels, check images and read shapes
|
# Cache dataset labels, check images and read shapes
|
||||||
x = {} # dict
|
x = {} # dict
|
||||||
nm, nf, ne, nc, msgs = 0, 0, 0, 0, [] # number missing, found, empty, corrupt, messages
|
nm, nf, ne, nc, msgs = 0, 0, 0, 0, [] # number missing, found, empty, corrupt, messages
|
||||||
desc = f"{prefix}Scanning {path.parent / path.stem}..."
|
desc = f'{prefix}Scanning {path.parent / path.stem}...'
|
||||||
with Pool(NUM_THREADS) as pool:
|
with Pool(NUM_THREADS) as pool:
|
||||||
pbar = tqdm(pool.imap(verify_image_label, zip(self.im_files, self.label_files, repeat(prefix))),
|
pbar = tqdm(pool.imap(verify_image_label, zip(self.im_files, self.label_files, repeat(prefix))),
|
||||||
desc=desc,
|
desc=desc,
|
||||||
@ -622,7 +622,7 @@ class LoadImagesAndLabels(Dataset):
|
|||||||
x[im_file] = [lb, shape, segments]
|
x[im_file] = [lb, shape, segments]
|
||||||
if msg:
|
if msg:
|
||||||
msgs.append(msg)
|
msgs.append(msg)
|
||||||
pbar.desc = f"{desc} {nf} images, {nm + ne} backgrounds, {nc} corrupt"
|
pbar.desc = f'{desc} {nf} images, {nm + ne} backgrounds, {nc} corrupt'
|
||||||
|
|
||||||
pbar.close()
|
pbar.close()
|
||||||
if msgs:
|
if msgs:
|
||||||
@ -1063,7 +1063,7 @@ class HUBDatasetStats():
|
|||||||
if zipped:
|
if zipped:
|
||||||
data['path'] = data_dir
|
data['path'] = data_dir
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception("error/HUB/dataset_stats/yaml_load") from e
|
raise Exception('error/HUB/dataset_stats/yaml_load') from e
|
||||||
|
|
||||||
check_dataset(data, autodownload) # download dataset if missing
|
check_dataset(data, autodownload) # download dataset if missing
|
||||||
self.hub_dir = Path(data['path'] + '-hub')
|
self.hub_dir = Path(data['path'] + '-hub')
|
||||||
@ -1188,7 +1188,7 @@ class ClassificationDataset(torchvision.datasets.ImageFolder):
|
|||||||
else: # read image
|
else: # read image
|
||||||
im = cv2.imread(f) # BGR
|
im = cv2.imread(f) # BGR
|
||||||
if self.album_transforms:
|
if self.album_transforms:
|
||||||
sample = self.album_transforms(image=cv2.cvtColor(im, cv2.COLOR_BGR2RGB))["image"]
|
sample = self.album_transforms(image=cv2.cvtColor(im, cv2.COLOR_BGR2RGB))['image']
|
||||||
else:
|
else:
|
||||||
sample = self.torch_transforms(im)
|
sample = self.torch_transforms(im)
|
||||||
return sample, j
|
return sample, j
|
||||||
|
|||||||
@ -55,7 +55,7 @@ def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''):
|
|||||||
if not file.exists() or file.stat().st_size < min_bytes: # check
|
if not file.exists() or file.stat().st_size < min_bytes: # check
|
||||||
if file.exists():
|
if file.exists():
|
||||||
file.unlink() # remove partial downloads
|
file.unlink() # remove partial downloads
|
||||||
LOGGER.info(f"ERROR: {assert_msg}\n{error_msg}")
|
LOGGER.info(f'ERROR: {assert_msg}\n{error_msg}')
|
||||||
LOGGER.info('')
|
LOGGER.info('')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -7,13 +7,13 @@ import pprint
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
DETECTION_URL = "http://localhost:5000/v1/object-detection/yolov5s"
|
DETECTION_URL = 'http://localhost:5000/v1/object-detection/yolov5s'
|
||||||
IMAGE = "zidane.jpg"
|
IMAGE = 'zidane.jpg'
|
||||||
|
|
||||||
# Read image
|
# Read image
|
||||||
with open(IMAGE, "rb") as f:
|
with open(IMAGE, 'rb') as f:
|
||||||
image_data = f.read()
|
image_data = f.read()
|
||||||
|
|
||||||
response = requests.post(DETECTION_URL, files={"image": image_data}).json()
|
response = requests.post(DETECTION_URL, files={'image': image_data}).json()
|
||||||
|
|
||||||
pprint.pprint(response)
|
pprint.pprint(response)
|
||||||
|
|||||||
@ -13,36 +13,36 @@ from PIL import Image
|
|||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
models = {}
|
models = {}
|
||||||
|
|
||||||
DETECTION_URL = "/v1/object-detection/<model>"
|
DETECTION_URL = '/v1/object-detection/<model>'
|
||||||
|
|
||||||
|
|
||||||
@app.route(DETECTION_URL, methods=["POST"])
|
@app.route(DETECTION_URL, methods=['POST'])
|
||||||
def predict(model):
|
def predict(model):
|
||||||
if request.method != "POST":
|
if request.method != 'POST':
|
||||||
return
|
return
|
||||||
|
|
||||||
if request.files.get("image"):
|
if request.files.get('image'):
|
||||||
# Method 1
|
# Method 1
|
||||||
# with request.files["image"] as f:
|
# with request.files["image"] as f:
|
||||||
# im = Image.open(io.BytesIO(f.read()))
|
# im = Image.open(io.BytesIO(f.read()))
|
||||||
|
|
||||||
# Method 2
|
# Method 2
|
||||||
im_file = request.files["image"]
|
im_file = request.files['image']
|
||||||
im_bytes = im_file.read()
|
im_bytes = im_file.read()
|
||||||
im = Image.open(io.BytesIO(im_bytes))
|
im = Image.open(io.BytesIO(im_bytes))
|
||||||
|
|
||||||
if model in models:
|
if model in models:
|
||||||
results = models[model](im, size=640) # reduce size=320 for faster inference
|
results = models[model](im, size=640) # reduce size=320 for faster inference
|
||||||
return results.pandas().xyxy[0].to_json(orient="records")
|
return results.pandas().xyxy[0].to_json(orient='records')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(description="Flask API exposing YOLOv5 model")
|
parser = argparse.ArgumentParser(description='Flask API exposing YOLOv5 model')
|
||||||
parser.add_argument("--port", default=5000, type=int, help="port number")
|
parser.add_argument('--port', default=5000, type=int, help='port number')
|
||||||
parser.add_argument('--model', nargs='+', default=['yolov5s'], help='model(s) to run, i.e. --model yolov5n yolov5s')
|
parser.add_argument('--model', nargs='+', default=['yolov5s'], help='model(s) to run, i.e. --model yolov5n yolov5s')
|
||||||
opt = parser.parse_args()
|
opt = parser.parse_args()
|
||||||
|
|
||||||
for m in opt.model:
|
for m in opt.model:
|
||||||
models[m] = torch.hub.load("ultralytics/yolov5", m, force_reload=True, skip_validation=True)
|
models[m] = torch.hub.load('ultralytics/yolov5', m, force_reload=True, skip_validation=True)
|
||||||
|
|
||||||
app.run(host="0.0.0.0", port=opt.port) # debug=True causes Restarting with stat
|
app.run(host='0.0.0.0', port=opt.port) # debug=True causes Restarting with stat
|
||||||
|
|||||||
@ -90,11 +90,11 @@ def is_kaggle():
|
|||||||
|
|
||||||
def is_docker() -> bool:
|
def is_docker() -> bool:
|
||||||
"""Check if the process runs inside a docker container."""
|
"""Check if the process runs inside a docker container."""
|
||||||
if Path("/.dockerenv").exists():
|
if Path('/.dockerenv').exists():
|
||||||
return True
|
return True
|
||||||
try: # check if docker is in control groups
|
try: # check if docker is in control groups
|
||||||
with open("/proc/self/cgroup") as file:
|
with open('/proc/self/cgroup') as file:
|
||||||
return any("docker" in line for line in file)
|
return any('docker' in line for line in file)
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -113,7 +113,7 @@ def is_writeable(dir, test=False):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
LOGGING_NAME = "yolov5"
|
LOGGING_NAME = 'yolov5'
|
||||||
|
|
||||||
|
|
||||||
def set_logging(name=LOGGING_NAME, verbose=True):
|
def set_logging(name=LOGGING_NAME, verbose=True):
|
||||||
@ -121,21 +121,21 @@ def set_logging(name=LOGGING_NAME, verbose=True):
|
|||||||
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
|
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
|
||||||
level = logging.INFO if verbose and rank in {-1, 0} else logging.ERROR
|
level = logging.INFO if verbose and rank in {-1, 0} else logging.ERROR
|
||||||
logging.config.dictConfig({
|
logging.config.dictConfig({
|
||||||
"version": 1,
|
'version': 1,
|
||||||
"disable_existing_loggers": False,
|
'disable_existing_loggers': False,
|
||||||
"formatters": {
|
'formatters': {
|
||||||
name: {
|
name: {
|
||||||
"format": "%(message)s"}},
|
'format': '%(message)s'}},
|
||||||
"handlers": {
|
'handlers': {
|
||||||
name: {
|
name: {
|
||||||
"class": "logging.StreamHandler",
|
'class': 'logging.StreamHandler',
|
||||||
"formatter": name,
|
'formatter': name,
|
||||||
"level": level,}},
|
'level': level,}},
|
||||||
"loggers": {
|
'loggers': {
|
||||||
name: {
|
name: {
|
||||||
"level": level,
|
'level': level,
|
||||||
"handlers": [name],
|
'handlers': [name],
|
||||||
"propagate": False,}}})
|
'propagate': False,}}})
|
||||||
|
|
||||||
|
|
||||||
set_logging(LOGGING_NAME) # run before defining LOGGER
|
set_logging(LOGGING_NAME) # run before defining LOGGER
|
||||||
@ -218,7 +218,7 @@ class WorkingDirectory(contextlib.ContextDecorator):
|
|||||||
|
|
||||||
def methods(instance):
|
def methods(instance):
|
||||||
# Get class/instance methods
|
# Get class/instance methods
|
||||||
return [f for f in dir(instance) if callable(getattr(instance, f)) and not f.startswith("__")]
|
return [f for f in dir(instance) if callable(getattr(instance, f)) and not f.startswith('__')]
|
||||||
|
|
||||||
|
|
||||||
def print_args(args: Optional[dict] = None, show_file=True, show_func=False):
|
def print_args(args: Optional[dict] = None, show_file=True, show_func=False):
|
||||||
@ -299,7 +299,7 @@ def check_online():
|
|||||||
def run_once():
|
def run_once():
|
||||||
# Check once
|
# Check once
|
||||||
try:
|
try:
|
||||||
socket.create_connection(("1.1.1.1", 443), 5) # check host accessibility
|
socket.create_connection(('1.1.1.1', 443), 5) # check host accessibility
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
@ -386,7 +386,7 @@ def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), insta
|
|||||||
check_python() # check python version
|
check_python() # check python version
|
||||||
if isinstance(requirements, Path): # requirements.txt file
|
if isinstance(requirements, Path): # requirements.txt file
|
||||||
file = requirements.resolve()
|
file = requirements.resolve()
|
||||||
assert file.exists(), f"{prefix} {file} not found, check failed."
|
assert file.exists(), f'{prefix} {file} not found, check failed.'
|
||||||
with file.open() as f:
|
with file.open() as f:
|
||||||
requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(f) if x.name not in exclude]
|
requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(f) if x.name not in exclude]
|
||||||
elif isinstance(requirements, str):
|
elif isinstance(requirements, str):
|
||||||
@ -450,7 +450,7 @@ def check_suffix(file='yolov5s.pt', suffix=('.pt',), msg=''):
|
|||||||
for f in file if isinstance(file, (list, tuple)) else [file]:
|
for f in file if isinstance(file, (list, tuple)) else [file]:
|
||||||
s = Path(f).suffix.lower() # file suffix
|
s = Path(f).suffix.lower() # file suffix
|
||||||
if len(s):
|
if len(s):
|
||||||
assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}"
|
assert s in suffix, f'{msg}{f} acceptable suffix is {suffix}'
|
||||||
|
|
||||||
|
|
||||||
def check_yaml(file, suffix=('.yaml', '.yml')):
|
def check_yaml(file, suffix=('.yaml', '.yml')):
|
||||||
@ -556,8 +556,8 @@ def check_dataset(data, autodownload=True):
|
|||||||
else: # python script
|
else: # python script
|
||||||
r = exec(s, {'yaml': data}) # return None
|
r = exec(s, {'yaml': data}) # return None
|
||||||
dt = f'({round(time.time() - t, 1)}s)'
|
dt = f'({round(time.time() - t, 1)}s)'
|
||||||
s = f"success ✅ {dt}, saved to {colorstr('bold', DATASETS_DIR)}" if r in (0, None) else f"failure {dt} ❌"
|
s = f"success ✅ {dt}, saved to {colorstr('bold', DATASETS_DIR)}" if r in (0, None) else f'failure {dt} ❌'
|
||||||
LOGGER.info(f"Dataset download {s}")
|
LOGGER.info(f'Dataset download {s}')
|
||||||
check_font('Arial.ttf' if is_ascii(data['names']) else 'Arial.Unicode.ttf', progress=True) # download fonts
|
check_font('Arial.ttf' if is_ascii(data['names']) else 'Arial.Unicode.ttf', progress=True) # download fonts
|
||||||
return data # dictionary
|
return data # dictionary
|
||||||
|
|
||||||
@ -675,7 +675,7 @@ def make_divisible(x, divisor):
|
|||||||
|
|
||||||
def clean_str(s):
|
def clean_str(s):
|
||||||
# Cleans a string by replacing special characters with underscore _
|
# Cleans a string by replacing special characters with underscore _
|
||||||
return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s)
|
return re.sub(pattern='[|@#!¡·$€%&()=?¿^*;:,¨´><+]', repl='_', string=s)
|
||||||
|
|
||||||
|
|
||||||
def one_cycle(y1=0.0, y2=1.0, steps=100):
|
def one_cycle(y1=0.0, y2=1.0, steps=100):
|
||||||
|
|||||||
@ -121,8 +121,8 @@ class Loggers():
|
|||||||
|
|
||||||
# Comet
|
# Comet
|
||||||
if comet_ml and 'comet' in self.include:
|
if comet_ml and 'comet' in self.include:
|
||||||
if isinstance(self.opt.resume, str) and self.opt.resume.startswith("comet://"):
|
if isinstance(self.opt.resume, str) and self.opt.resume.startswith('comet://'):
|
||||||
run_id = self.opt.resume.split("/")[-1]
|
run_id = self.opt.resume.split('/')[-1]
|
||||||
self.comet_logger = CometLogger(self.opt, self.hyp, run_id=run_id)
|
self.comet_logger = CometLogger(self.opt, self.hyp, run_id=run_id)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -158,7 +158,7 @@ class Loggers():
|
|||||||
plot_labels(labels, names, self.save_dir)
|
plot_labels(labels, names, self.save_dir)
|
||||||
paths = self.save_dir.glob('*labels*.jpg') # training labels
|
paths = self.save_dir.glob('*labels*.jpg') # training labels
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
self.wandb.log({"Labels": [wandb.Image(str(x), caption=x.name) for x in paths]})
|
self.wandb.log({'Labels': [wandb.Image(str(x), caption=x.name) for x in paths]})
|
||||||
# if self.clearml:
|
# if self.clearml:
|
||||||
# pass # ClearML saves these images automatically using hooks
|
# pass # ClearML saves these images automatically using hooks
|
||||||
if self.comet_logger:
|
if self.comet_logger:
|
||||||
@ -212,7 +212,7 @@ class Loggers():
|
|||||||
if self.wandb or self.clearml:
|
if self.wandb or self.clearml:
|
||||||
files = sorted(self.save_dir.glob('val*.jpg'))
|
files = sorted(self.save_dir.glob('val*.jpg'))
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
self.wandb.log({"Validation": [wandb.Image(str(f), caption=f.name) for f in files]})
|
self.wandb.log({'Validation': [wandb.Image(str(f), caption=f.name) for f in files]})
|
||||||
if self.clearml:
|
if self.clearml:
|
||||||
self.clearml.log_debug_samples(files, title='Validation')
|
self.clearml.log_debug_samples(files, title='Validation')
|
||||||
|
|
||||||
@ -279,7 +279,7 @@ class Loggers():
|
|||||||
|
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
self.wandb.log(dict(zip(self.keys[3:10], results)))
|
self.wandb.log(dict(zip(self.keys[3:10], results)))
|
||||||
self.wandb.log({"Results": [wandb.Image(str(f), caption=f.name) for f in files]})
|
self.wandb.log({'Results': [wandb.Image(str(f), caption=f.name) for f in files]})
|
||||||
# Calling wandb.log. TODO: Refactor this into WandbLogger.log_model
|
# Calling wandb.log. TODO: Refactor this into WandbLogger.log_model
|
||||||
if not self.opt.evolve:
|
if not self.opt.evolve:
|
||||||
wandb.log_artifact(str(best if best.exists() else last),
|
wandb.log_artifact(str(best if best.exists() else last),
|
||||||
@ -329,7 +329,7 @@ class GenericLogger:
|
|||||||
|
|
||||||
if wandb and 'wandb' in self.include:
|
if wandb and 'wandb' in self.include:
|
||||||
self.wandb = wandb.init(project=web_project_name(str(opt.project)),
|
self.wandb = wandb.init(project=web_project_name(str(opt.project)),
|
||||||
name=None if opt.name == "exp" else opt.name,
|
name=None if opt.name == 'exp' else opt.name,
|
||||||
config=opt)
|
config=opt)
|
||||||
else:
|
else:
|
||||||
self.wandb = None
|
self.wandb = None
|
||||||
@ -370,12 +370,12 @@ class GenericLogger:
|
|||||||
def log_model(self, model_path, epoch=0, metadata={}):
|
def log_model(self, model_path, epoch=0, metadata={}):
|
||||||
# Log model to all loggers
|
# Log model to all loggers
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
art = wandb.Artifact(name=f"run_{wandb.run.id}_model", type="model", metadata=metadata)
|
art = wandb.Artifact(name=f'run_{wandb.run.id}_model', type='model', metadata=metadata)
|
||||||
art.add_file(str(model_path))
|
art.add_file(str(model_path))
|
||||||
wandb.log_artifact(art)
|
wandb.log_artifact(art)
|
||||||
|
|
||||||
def update_params(self, params):
|
def update_params(self, params):
|
||||||
# Update the paramters logged
|
# Update the parameters logged
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
wandb.run.config.update(params, allow_val_change=True)
|
wandb.run.config.update(params, allow_val_change=True)
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ def construct_dataset(clearml_info_string):
|
|||||||
dataset_root_path = Path(dataset.get_local_copy())
|
dataset_root_path = Path(dataset.get_local_copy())
|
||||||
|
|
||||||
# We'll search for the yaml file definition in the dataset
|
# We'll search for the yaml file definition in the dataset
|
||||||
yaml_filenames = list(glob.glob(str(dataset_root_path / "*.yaml")) + glob.glob(str(dataset_root_path / "*.yml")))
|
yaml_filenames = list(glob.glob(str(dataset_root_path / '*.yaml')) + glob.glob(str(dataset_root_path / '*.yml')))
|
||||||
if len(yaml_filenames) > 1:
|
if len(yaml_filenames) > 1:
|
||||||
raise ValueError('More than one yaml file was found in the dataset root, cannot determine which one contains '
|
raise ValueError('More than one yaml file was found in the dataset root, cannot determine which one contains '
|
||||||
'the dataset definition this way.')
|
'the dataset definition this way.')
|
||||||
@ -100,7 +100,7 @@ class ClearmlLogger:
|
|||||||
self.task.connect(opt, name='Args')
|
self.task.connect(opt, name='Args')
|
||||||
|
|
||||||
# Make sure the code is easily remotely runnable by setting the docker image to use by the remote agent
|
# Make sure the code is easily remotely runnable by setting the docker image to use by the remote agent
|
||||||
self.task.set_base_docker("ultralytics/yolov5:latest",
|
self.task.set_base_docker('ultralytics/yolov5:latest',
|
||||||
docker_arguments='--ipc=host -e="CLEARML_AGENT_SKIP_PYTHON_ENV_INSTALL=1"',
|
docker_arguments='--ipc=host -e="CLEARML_AGENT_SKIP_PYTHON_ENV_INSTALL=1"',
|
||||||
docker_setup_bash_script='pip install clearml')
|
docker_setup_bash_script='pip install clearml')
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ class ClearmlLogger:
|
|||||||
|
|
||||||
class_name = class_names[int(class_nr)]
|
class_name = class_names[int(class_nr)]
|
||||||
confidence_percentage = round(float(conf) * 100, 2)
|
confidence_percentage = round(float(conf) * 100, 2)
|
||||||
label = f"{class_name}: {confidence_percentage}%"
|
label = f'{class_name}: {confidence_percentage}%'
|
||||||
|
|
||||||
if conf > conf_threshold:
|
if conf > conf_threshold:
|
||||||
annotator.rectangle(box.cpu().numpy(), outline=color)
|
annotator.rectangle(box.cpu().numpy(), outline=color)
|
||||||
|
|||||||
@ -17,7 +17,7 @@ try:
|
|||||||
|
|
||||||
# Project Configuration
|
# Project Configuration
|
||||||
config = comet_ml.config.get_config()
|
config = comet_ml.config.get_config()
|
||||||
COMET_PROJECT_NAME = config.get_string(os.getenv("COMET_PROJECT_NAME"), "comet.project_name", default="yolov5")
|
COMET_PROJECT_NAME = config.get_string(os.getenv('COMET_PROJECT_NAME'), 'comet.project_name', default='yolov5')
|
||||||
except (ModuleNotFoundError, ImportError):
|
except (ModuleNotFoundError, ImportError):
|
||||||
comet_ml = None
|
comet_ml = None
|
||||||
COMET_PROJECT_NAME = None
|
COMET_PROJECT_NAME = None
|
||||||
@ -31,32 +31,32 @@ from utils.dataloaders import img2label_paths
|
|||||||
from utils.general import check_dataset, scale_boxes, xywh2xyxy
|
from utils.general import check_dataset, scale_boxes, xywh2xyxy
|
||||||
from utils.metrics import box_iou
|
from utils.metrics import box_iou
|
||||||
|
|
||||||
COMET_PREFIX = "comet://"
|
COMET_PREFIX = 'comet://'
|
||||||
|
|
||||||
COMET_MODE = os.getenv("COMET_MODE", "online")
|
COMET_MODE = os.getenv('COMET_MODE', 'online')
|
||||||
|
|
||||||
# Model Saving Settings
|
# Model Saving Settings
|
||||||
COMET_MODEL_NAME = os.getenv("COMET_MODEL_NAME", "yolov5")
|
COMET_MODEL_NAME = os.getenv('COMET_MODEL_NAME', 'yolov5')
|
||||||
|
|
||||||
# Dataset Artifact Settings
|
# Dataset Artifact Settings
|
||||||
COMET_UPLOAD_DATASET = os.getenv("COMET_UPLOAD_DATASET", "false").lower() == "true"
|
COMET_UPLOAD_DATASET = os.getenv('COMET_UPLOAD_DATASET', 'false').lower() == 'true'
|
||||||
|
|
||||||
# Evaluation Settings
|
# Evaluation Settings
|
||||||
COMET_LOG_CONFUSION_MATRIX = os.getenv("COMET_LOG_CONFUSION_MATRIX", "true").lower() == "true"
|
COMET_LOG_CONFUSION_MATRIX = os.getenv('COMET_LOG_CONFUSION_MATRIX', 'true').lower() == 'true'
|
||||||
COMET_LOG_PREDICTIONS = os.getenv("COMET_LOG_PREDICTIONS", "true").lower() == "true"
|
COMET_LOG_PREDICTIONS = os.getenv('COMET_LOG_PREDICTIONS', 'true').lower() == 'true'
|
||||||
COMET_MAX_IMAGE_UPLOADS = int(os.getenv("COMET_MAX_IMAGE_UPLOADS", 100))
|
COMET_MAX_IMAGE_UPLOADS = int(os.getenv('COMET_MAX_IMAGE_UPLOADS', 100))
|
||||||
|
|
||||||
# Confusion Matrix Settings
|
# Confusion Matrix Settings
|
||||||
CONF_THRES = float(os.getenv("CONF_THRES", 0.001))
|
CONF_THRES = float(os.getenv('CONF_THRES', 0.001))
|
||||||
IOU_THRES = float(os.getenv("IOU_THRES", 0.6))
|
IOU_THRES = float(os.getenv('IOU_THRES', 0.6))
|
||||||
|
|
||||||
# Batch Logging Settings
|
# Batch Logging Settings
|
||||||
COMET_LOG_BATCH_METRICS = os.getenv("COMET_LOG_BATCH_METRICS", "false").lower() == "true"
|
COMET_LOG_BATCH_METRICS = os.getenv('COMET_LOG_BATCH_METRICS', 'false').lower() == 'true'
|
||||||
COMET_BATCH_LOGGING_INTERVAL = os.getenv("COMET_BATCH_LOGGING_INTERVAL", 1)
|
COMET_BATCH_LOGGING_INTERVAL = os.getenv('COMET_BATCH_LOGGING_INTERVAL', 1)
|
||||||
COMET_PREDICTION_LOGGING_INTERVAL = os.getenv("COMET_PREDICTION_LOGGING_INTERVAL", 1)
|
COMET_PREDICTION_LOGGING_INTERVAL = os.getenv('COMET_PREDICTION_LOGGING_INTERVAL', 1)
|
||||||
COMET_LOG_PER_CLASS_METRICS = os.getenv("COMET_LOG_PER_CLASS_METRICS", "false").lower() == "true"
|
COMET_LOG_PER_CLASS_METRICS = os.getenv('COMET_LOG_PER_CLASS_METRICS', 'false').lower() == 'true'
|
||||||
|
|
||||||
RANK = int(os.getenv("RANK", -1))
|
RANK = int(os.getenv('RANK', -1))
|
||||||
|
|
||||||
to_pil = T.ToPILImage()
|
to_pil = T.ToPILImage()
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ class CometLogger:
|
|||||||
with Comet
|
with Comet
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, opt, hyp, run_id=None, job_type="Training", **experiment_kwargs) -> None:
|
def __init__(self, opt, hyp, run_id=None, job_type='Training', **experiment_kwargs) -> None:
|
||||||
self.job_type = job_type
|
self.job_type = job_type
|
||||||
self.opt = opt
|
self.opt = opt
|
||||||
self.hyp = hyp
|
self.hyp = hyp
|
||||||
@ -87,52 +87,52 @@ class CometLogger:
|
|||||||
|
|
||||||
# Default parameters to pass to Experiment objects
|
# Default parameters to pass to Experiment objects
|
||||||
self.default_experiment_kwargs = {
|
self.default_experiment_kwargs = {
|
||||||
"log_code": False,
|
'log_code': False,
|
||||||
"log_env_gpu": True,
|
'log_env_gpu': True,
|
||||||
"log_env_cpu": True,
|
'log_env_cpu': True,
|
||||||
"project_name": COMET_PROJECT_NAME,}
|
'project_name': COMET_PROJECT_NAME,}
|
||||||
self.default_experiment_kwargs.update(experiment_kwargs)
|
self.default_experiment_kwargs.update(experiment_kwargs)
|
||||||
self.experiment = self._get_experiment(self.comet_mode, run_id)
|
self.experiment = self._get_experiment(self.comet_mode, run_id)
|
||||||
|
|
||||||
self.data_dict = self.check_dataset(self.opt.data)
|
self.data_dict = self.check_dataset(self.opt.data)
|
||||||
self.class_names = self.data_dict["names"]
|
self.class_names = self.data_dict['names']
|
||||||
self.num_classes = self.data_dict["nc"]
|
self.num_classes = self.data_dict['nc']
|
||||||
|
|
||||||
self.logged_images_count = 0
|
self.logged_images_count = 0
|
||||||
self.max_images = COMET_MAX_IMAGE_UPLOADS
|
self.max_images = COMET_MAX_IMAGE_UPLOADS
|
||||||
|
|
||||||
if run_id is None:
|
if run_id is None:
|
||||||
self.experiment.log_other("Created from", "YOLOv5")
|
self.experiment.log_other('Created from', 'YOLOv5')
|
||||||
if not isinstance(self.experiment, comet_ml.OfflineExperiment):
|
if not isinstance(self.experiment, comet_ml.OfflineExperiment):
|
||||||
workspace, project_name, experiment_id = self.experiment.url.split("/")[-3:]
|
workspace, project_name, experiment_id = self.experiment.url.split('/')[-3:]
|
||||||
self.experiment.log_other(
|
self.experiment.log_other(
|
||||||
"Run Path",
|
'Run Path',
|
||||||
f"{workspace}/{project_name}/{experiment_id}",
|
f'{workspace}/{project_name}/{experiment_id}',
|
||||||
)
|
)
|
||||||
self.log_parameters(vars(opt))
|
self.log_parameters(vars(opt))
|
||||||
self.log_parameters(self.opt.hyp)
|
self.log_parameters(self.opt.hyp)
|
||||||
self.log_asset_data(
|
self.log_asset_data(
|
||||||
self.opt.hyp,
|
self.opt.hyp,
|
||||||
name="hyperparameters.json",
|
name='hyperparameters.json',
|
||||||
metadata={"type": "hyp-config-file"},
|
metadata={'type': 'hyp-config-file'},
|
||||||
)
|
)
|
||||||
self.log_asset(
|
self.log_asset(
|
||||||
f"{self.opt.save_dir}/opt.yaml",
|
f'{self.opt.save_dir}/opt.yaml',
|
||||||
metadata={"type": "opt-config-file"},
|
metadata={'type': 'opt-config-file'},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.comet_log_confusion_matrix = COMET_LOG_CONFUSION_MATRIX
|
self.comet_log_confusion_matrix = COMET_LOG_CONFUSION_MATRIX
|
||||||
|
|
||||||
if hasattr(self.opt, "conf_thres"):
|
if hasattr(self.opt, 'conf_thres'):
|
||||||
self.conf_thres = self.opt.conf_thres
|
self.conf_thres = self.opt.conf_thres
|
||||||
else:
|
else:
|
||||||
self.conf_thres = CONF_THRES
|
self.conf_thres = CONF_THRES
|
||||||
if hasattr(self.opt, "iou_thres"):
|
if hasattr(self.opt, 'iou_thres'):
|
||||||
self.iou_thres = self.opt.iou_thres
|
self.iou_thres = self.opt.iou_thres
|
||||||
else:
|
else:
|
||||||
self.iou_thres = IOU_THRES
|
self.iou_thres = IOU_THRES
|
||||||
|
|
||||||
self.log_parameters({"val_iou_threshold": self.iou_thres, "val_conf_threshold": self.conf_thres})
|
self.log_parameters({'val_iou_threshold': self.iou_thres, 'val_conf_threshold': self.conf_thres})
|
||||||
|
|
||||||
self.comet_log_predictions = COMET_LOG_PREDICTIONS
|
self.comet_log_predictions = COMET_LOG_PREDICTIONS
|
||||||
if self.opt.bbox_interval == -1:
|
if self.opt.bbox_interval == -1:
|
||||||
@ -147,22 +147,22 @@ class CometLogger:
|
|||||||
self.comet_log_per_class_metrics = COMET_LOG_PER_CLASS_METRICS
|
self.comet_log_per_class_metrics = COMET_LOG_PER_CLASS_METRICS
|
||||||
|
|
||||||
self.experiment.log_others({
|
self.experiment.log_others({
|
||||||
"comet_mode": COMET_MODE,
|
'comet_mode': COMET_MODE,
|
||||||
"comet_max_image_uploads": COMET_MAX_IMAGE_UPLOADS,
|
'comet_max_image_uploads': COMET_MAX_IMAGE_UPLOADS,
|
||||||
"comet_log_per_class_metrics": COMET_LOG_PER_CLASS_METRICS,
|
'comet_log_per_class_metrics': COMET_LOG_PER_CLASS_METRICS,
|
||||||
"comet_log_batch_metrics": COMET_LOG_BATCH_METRICS,
|
'comet_log_batch_metrics': COMET_LOG_BATCH_METRICS,
|
||||||
"comet_log_confusion_matrix": COMET_LOG_CONFUSION_MATRIX,
|
'comet_log_confusion_matrix': COMET_LOG_CONFUSION_MATRIX,
|
||||||
"comet_model_name": COMET_MODEL_NAME,})
|
'comet_model_name': COMET_MODEL_NAME,})
|
||||||
|
|
||||||
# Check if running the Experiment with the Comet Optimizer
|
# Check if running the Experiment with the Comet Optimizer
|
||||||
if hasattr(self.opt, "comet_optimizer_id"):
|
if hasattr(self.opt, 'comet_optimizer_id'):
|
||||||
self.experiment.log_other("optimizer_id", self.opt.comet_optimizer_id)
|
self.experiment.log_other('optimizer_id', self.opt.comet_optimizer_id)
|
||||||
self.experiment.log_other("optimizer_objective", self.opt.comet_optimizer_objective)
|
self.experiment.log_other('optimizer_objective', self.opt.comet_optimizer_objective)
|
||||||
self.experiment.log_other("optimizer_metric", self.opt.comet_optimizer_metric)
|
self.experiment.log_other('optimizer_metric', self.opt.comet_optimizer_metric)
|
||||||
self.experiment.log_other("optimizer_parameters", json.dumps(self.hyp))
|
self.experiment.log_other('optimizer_parameters', json.dumps(self.hyp))
|
||||||
|
|
||||||
def _get_experiment(self, mode, experiment_id=None):
|
def _get_experiment(self, mode, experiment_id=None):
|
||||||
if mode == "offline":
|
if mode == 'offline':
|
||||||
if experiment_id is not None:
|
if experiment_id is not None:
|
||||||
return comet_ml.ExistingOfflineExperiment(
|
return comet_ml.ExistingOfflineExperiment(
|
||||||
previous_experiment=experiment_id,
|
previous_experiment=experiment_id,
|
||||||
@ -182,11 +182,11 @@ class CometLogger:
|
|||||||
return comet_ml.Experiment(**self.default_experiment_kwargs)
|
return comet_ml.Experiment(**self.default_experiment_kwargs)
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warning("COMET WARNING: "
|
logger.warning('COMET WARNING: '
|
||||||
"Comet credentials have not been set. "
|
'Comet credentials have not been set. '
|
||||||
"Comet will default to offline logging. "
|
'Comet will default to offline logging. '
|
||||||
"Please set your credentials to enable online logging.")
|
'Please set your credentials to enable online logging.')
|
||||||
return self._get_experiment("offline", experiment_id)
|
return self._get_experiment('offline', experiment_id)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -210,12 +210,12 @@ class CometLogger:
|
|||||||
return
|
return
|
||||||
|
|
||||||
model_metadata = {
|
model_metadata = {
|
||||||
"fitness_score": fitness_score[-1],
|
'fitness_score': fitness_score[-1],
|
||||||
"epochs_trained": epoch + 1,
|
'epochs_trained': epoch + 1,
|
||||||
"save_period": opt.save_period,
|
'save_period': opt.save_period,
|
||||||
"total_epochs": opt.epochs,}
|
'total_epochs': opt.epochs,}
|
||||||
|
|
||||||
model_files = glob.glob(f"{path}/*.pt")
|
model_files = glob.glob(f'{path}/*.pt')
|
||||||
for model_path in model_files:
|
for model_path in model_files:
|
||||||
name = Path(model_path).name
|
name = Path(model_path).name
|
||||||
|
|
||||||
@ -232,12 +232,12 @@ class CometLogger:
|
|||||||
data_config = yaml.safe_load(f)
|
data_config = yaml.safe_load(f)
|
||||||
|
|
||||||
if data_config['path'].startswith(COMET_PREFIX):
|
if data_config['path'].startswith(COMET_PREFIX):
|
||||||
path = data_config['path'].replace(COMET_PREFIX, "")
|
path = data_config['path'].replace(COMET_PREFIX, '')
|
||||||
data_dict = self.download_dataset_artifact(path)
|
data_dict = self.download_dataset_artifact(path)
|
||||||
|
|
||||||
return data_dict
|
return data_dict
|
||||||
|
|
||||||
self.log_asset(self.opt.data, metadata={"type": "data-config-file"})
|
self.log_asset(self.opt.data, metadata={'type': 'data-config-file'})
|
||||||
|
|
||||||
return check_dataset(data_file)
|
return check_dataset(data_file)
|
||||||
|
|
||||||
@ -253,8 +253,8 @@ class CometLogger:
|
|||||||
filtered_detections = detections[mask]
|
filtered_detections = detections[mask]
|
||||||
filtered_labels = labelsn[mask]
|
filtered_labels = labelsn[mask]
|
||||||
|
|
||||||
image_id = path.split("/")[-1].split(".")[0]
|
image_id = path.split('/')[-1].split('.')[0]
|
||||||
image_name = f"{image_id}_curr_epoch_{self.experiment.curr_epoch}"
|
image_name = f'{image_id}_curr_epoch_{self.experiment.curr_epoch}'
|
||||||
if image_name not in self.logged_image_names:
|
if image_name not in self.logged_image_names:
|
||||||
native_scale_image = PIL.Image.open(path)
|
native_scale_image = PIL.Image.open(path)
|
||||||
self.log_image(native_scale_image, name=image_name)
|
self.log_image(native_scale_image, name=image_name)
|
||||||
@ -263,22 +263,22 @@ class CometLogger:
|
|||||||
metadata = []
|
metadata = []
|
||||||
for cls, *xyxy in filtered_labels.tolist():
|
for cls, *xyxy in filtered_labels.tolist():
|
||||||
metadata.append({
|
metadata.append({
|
||||||
"label": f"{self.class_names[int(cls)]}-gt",
|
'label': f'{self.class_names[int(cls)]}-gt',
|
||||||
"score": 100,
|
'score': 100,
|
||||||
"box": {
|
'box': {
|
||||||
"x": xyxy[0],
|
'x': xyxy[0],
|
||||||
"y": xyxy[1],
|
'y': xyxy[1],
|
||||||
"x2": xyxy[2],
|
'x2': xyxy[2],
|
||||||
"y2": xyxy[3]},})
|
'y2': xyxy[3]},})
|
||||||
for *xyxy, conf, cls in filtered_detections.tolist():
|
for *xyxy, conf, cls in filtered_detections.tolist():
|
||||||
metadata.append({
|
metadata.append({
|
||||||
"label": f"{self.class_names[int(cls)]}",
|
'label': f'{self.class_names[int(cls)]}',
|
||||||
"score": conf * 100,
|
'score': conf * 100,
|
||||||
"box": {
|
'box': {
|
||||||
"x": xyxy[0],
|
'x': xyxy[0],
|
||||||
"y": xyxy[1],
|
'y': xyxy[1],
|
||||||
"x2": xyxy[2],
|
'x2': xyxy[2],
|
||||||
"y2": xyxy[3]},})
|
'y2': xyxy[3]},})
|
||||||
|
|
||||||
self.metadata_dict[image_name] = metadata
|
self.metadata_dict[image_name] = metadata
|
||||||
self.logged_images_count += 1
|
self.logged_images_count += 1
|
||||||
@ -305,35 +305,35 @@ class CometLogger:
|
|||||||
return predn, labelsn
|
return predn, labelsn
|
||||||
|
|
||||||
def add_assets_to_artifact(self, artifact, path, asset_path, split):
|
def add_assets_to_artifact(self, artifact, path, asset_path, split):
|
||||||
img_paths = sorted(glob.glob(f"{asset_path}/*"))
|
img_paths = sorted(glob.glob(f'{asset_path}/*'))
|
||||||
label_paths = img2label_paths(img_paths)
|
label_paths = img2label_paths(img_paths)
|
||||||
|
|
||||||
for image_file, label_file in zip(img_paths, label_paths):
|
for image_file, label_file in zip(img_paths, label_paths):
|
||||||
image_logical_path, label_logical_path = map(lambda x: os.path.relpath(x, path), [image_file, label_file])
|
image_logical_path, label_logical_path = map(lambda x: os.path.relpath(x, path), [image_file, label_file])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
artifact.add(image_file, logical_path=image_logical_path, metadata={"split": split})
|
artifact.add(image_file, logical_path=image_logical_path, metadata={'split': split})
|
||||||
artifact.add(label_file, logical_path=label_logical_path, metadata={"split": split})
|
artifact.add(label_file, logical_path=label_logical_path, metadata={'split': split})
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logger.error('COMET ERROR: Error adding file to Artifact. Skipping file.')
|
logger.error('COMET ERROR: Error adding file to Artifact. Skipping file.')
|
||||||
logger.error(f"COMET ERROR: {e}")
|
logger.error(f'COMET ERROR: {e}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return artifact
|
return artifact
|
||||||
|
|
||||||
def upload_dataset_artifact(self):
|
def upload_dataset_artifact(self):
|
||||||
dataset_name = self.data_dict.get("dataset_name", "yolov5-dataset")
|
dataset_name = self.data_dict.get('dataset_name', 'yolov5-dataset')
|
||||||
path = str((ROOT / Path(self.data_dict["path"])).resolve())
|
path = str((ROOT / Path(self.data_dict['path'])).resolve())
|
||||||
|
|
||||||
metadata = self.data_dict.copy()
|
metadata = self.data_dict.copy()
|
||||||
for key in ["train", "val", "test"]:
|
for key in ['train', 'val', 'test']:
|
||||||
split_path = metadata.get(key)
|
split_path = metadata.get(key)
|
||||||
if split_path is not None:
|
if split_path is not None:
|
||||||
metadata[key] = split_path.replace(path, "")
|
metadata[key] = split_path.replace(path, '')
|
||||||
|
|
||||||
artifact = comet_ml.Artifact(name=dataset_name, artifact_type="dataset", metadata=metadata)
|
artifact = comet_ml.Artifact(name=dataset_name, artifact_type='dataset', metadata=metadata)
|
||||||
for key in metadata.keys():
|
for key in metadata.keys():
|
||||||
if key in ["train", "val", "test"]:
|
if key in ['train', 'val', 'test']:
|
||||||
if isinstance(self.upload_dataset, str) and (key != self.upload_dataset):
|
if isinstance(self.upload_dataset, str) and (key != self.upload_dataset):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -352,13 +352,13 @@ class CometLogger:
|
|||||||
|
|
||||||
metadata = logged_artifact.metadata
|
metadata = logged_artifact.metadata
|
||||||
data_dict = metadata.copy()
|
data_dict = metadata.copy()
|
||||||
data_dict["path"] = artifact_save_dir
|
data_dict['path'] = artifact_save_dir
|
||||||
|
|
||||||
metadata_names = metadata.get("names")
|
metadata_names = metadata.get('names')
|
||||||
if type(metadata_names) == dict:
|
if type(metadata_names) == dict:
|
||||||
data_dict["names"] = {int(k): v for k, v in metadata.get("names").items()}
|
data_dict['names'] = {int(k): v for k, v in metadata.get('names').items()}
|
||||||
elif type(metadata_names) == list:
|
elif type(metadata_names) == list:
|
||||||
data_dict["names"] = {int(k): v for k, v in zip(range(len(metadata_names)), metadata_names)}
|
data_dict['names'] = {int(k): v for k, v in zip(range(len(metadata_names)), metadata_names)}
|
||||||
else:
|
else:
|
||||||
raise "Invalid 'names' field in dataset yaml file. Please use a list or dictionary"
|
raise "Invalid 'names' field in dataset yaml file. Please use a list or dictionary"
|
||||||
|
|
||||||
@ -366,13 +366,13 @@ class CometLogger:
|
|||||||
return data_dict
|
return data_dict
|
||||||
|
|
||||||
def update_data_paths(self, data_dict):
|
def update_data_paths(self, data_dict):
|
||||||
path = data_dict.get("path", "")
|
path = data_dict.get('path', '')
|
||||||
|
|
||||||
for split in ["train", "val", "test"]:
|
for split in ['train', 'val', 'test']:
|
||||||
if data_dict.get(split):
|
if data_dict.get(split):
|
||||||
split_path = data_dict.get(split)
|
split_path = data_dict.get(split)
|
||||||
data_dict[split] = (f"{path}/{split_path}" if isinstance(split, str) else [
|
data_dict[split] = (f'{path}/{split_path}' if isinstance(split, str) else [
|
||||||
f"{path}/{x}" for x in split_path])
|
f'{path}/{x}' for x in split_path])
|
||||||
|
|
||||||
return data_dict
|
return data_dict
|
||||||
|
|
||||||
@ -413,11 +413,11 @@ class CometLogger:
|
|||||||
def on_train_end(self, files, save_dir, last, best, epoch, results):
|
def on_train_end(self, files, save_dir, last, best, epoch, results):
|
||||||
if self.comet_log_predictions:
|
if self.comet_log_predictions:
|
||||||
curr_epoch = self.experiment.curr_epoch
|
curr_epoch = self.experiment.curr_epoch
|
||||||
self.experiment.log_asset_data(self.metadata_dict, "image-metadata.json", epoch=curr_epoch)
|
self.experiment.log_asset_data(self.metadata_dict, 'image-metadata.json', epoch=curr_epoch)
|
||||||
|
|
||||||
for f in files:
|
for f in files:
|
||||||
self.log_asset(f, metadata={"epoch": epoch})
|
self.log_asset(f, metadata={'epoch': epoch})
|
||||||
self.log_asset(f"{save_dir}/results.csv", metadata={"epoch": epoch})
|
self.log_asset(f'{save_dir}/results.csv', metadata={'epoch': epoch})
|
||||||
|
|
||||||
if not self.opt.evolve:
|
if not self.opt.evolve:
|
||||||
model_path = str(best if best.exists() else last)
|
model_path = str(best if best.exists() else last)
|
||||||
@ -481,7 +481,7 @@ class CometLogger:
|
|||||||
if self.comet_log_confusion_matrix:
|
if self.comet_log_confusion_matrix:
|
||||||
epoch = self.experiment.curr_epoch
|
epoch = self.experiment.curr_epoch
|
||||||
class_names = list(self.class_names.values())
|
class_names = list(self.class_names.values())
|
||||||
class_names.append("background")
|
class_names.append('background')
|
||||||
num_classes = len(class_names)
|
num_classes = len(class_names)
|
||||||
|
|
||||||
self.experiment.log_confusion_matrix(
|
self.experiment.log_confusion_matrix(
|
||||||
@ -491,7 +491,7 @@ class CometLogger:
|
|||||||
epoch=epoch,
|
epoch=epoch,
|
||||||
column_label='Actual Category',
|
column_label='Actual Category',
|
||||||
row_label='Predicted Category',
|
row_label='Predicted Category',
|
||||||
file_name=f"confusion-matrix-epoch-{epoch}.json",
|
file_name=f'confusion-matrix-epoch-{epoch}.json',
|
||||||
)
|
)
|
||||||
|
|
||||||
def on_fit_epoch_end(self, result, epoch):
|
def on_fit_epoch_end(self, result, epoch):
|
||||||
|
|||||||
@ -11,28 +11,28 @@ import yaml
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
COMET_PREFIX = "comet://"
|
COMET_PREFIX = 'comet://'
|
||||||
COMET_MODEL_NAME = os.getenv("COMET_MODEL_NAME", "yolov5")
|
COMET_MODEL_NAME = os.getenv('COMET_MODEL_NAME', 'yolov5')
|
||||||
COMET_DEFAULT_CHECKPOINT_FILENAME = os.getenv("COMET_DEFAULT_CHECKPOINT_FILENAME", "last.pt")
|
COMET_DEFAULT_CHECKPOINT_FILENAME = os.getenv('COMET_DEFAULT_CHECKPOINT_FILENAME', 'last.pt')
|
||||||
|
|
||||||
|
|
||||||
def download_model_checkpoint(opt, experiment):
|
def download_model_checkpoint(opt, experiment):
|
||||||
model_dir = f"{opt.project}/{experiment.name}"
|
model_dir = f'{opt.project}/{experiment.name}'
|
||||||
os.makedirs(model_dir, exist_ok=True)
|
os.makedirs(model_dir, exist_ok=True)
|
||||||
|
|
||||||
model_name = COMET_MODEL_NAME
|
model_name = COMET_MODEL_NAME
|
||||||
model_asset_list = experiment.get_model_asset_list(model_name)
|
model_asset_list = experiment.get_model_asset_list(model_name)
|
||||||
|
|
||||||
if len(model_asset_list) == 0:
|
if len(model_asset_list) == 0:
|
||||||
logger.error(f"COMET ERROR: No checkpoints found for model name : {model_name}")
|
logger.error(f'COMET ERROR: No checkpoints found for model name : {model_name}')
|
||||||
return
|
return
|
||||||
|
|
||||||
model_asset_list = sorted(
|
model_asset_list = sorted(
|
||||||
model_asset_list,
|
model_asset_list,
|
||||||
key=lambda x: x["step"],
|
key=lambda x: x['step'],
|
||||||
reverse=True,
|
reverse=True,
|
||||||
)
|
)
|
||||||
logged_checkpoint_map = {asset["fileName"]: asset["assetId"] for asset in model_asset_list}
|
logged_checkpoint_map = {asset['fileName']: asset['assetId'] for asset in model_asset_list}
|
||||||
|
|
||||||
resource_url = urlparse(opt.weights)
|
resource_url = urlparse(opt.weights)
|
||||||
checkpoint_filename = resource_url.query
|
checkpoint_filename = resource_url.query
|
||||||
@ -44,22 +44,22 @@ def download_model_checkpoint(opt, experiment):
|
|||||||
checkpoint_filename = COMET_DEFAULT_CHECKPOINT_FILENAME
|
checkpoint_filename = COMET_DEFAULT_CHECKPOINT_FILENAME
|
||||||
|
|
||||||
if asset_id is None:
|
if asset_id is None:
|
||||||
logger.error(f"COMET ERROR: Checkpoint {checkpoint_filename} not found in the given Experiment")
|
logger.error(f'COMET ERROR: Checkpoint {checkpoint_filename} not found in the given Experiment')
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.info(f"COMET INFO: Downloading checkpoint {checkpoint_filename}")
|
logger.info(f'COMET INFO: Downloading checkpoint {checkpoint_filename}')
|
||||||
asset_filename = checkpoint_filename
|
asset_filename = checkpoint_filename
|
||||||
|
|
||||||
model_binary = experiment.get_asset(asset_id, return_type="binary", stream=False)
|
model_binary = experiment.get_asset(asset_id, return_type='binary', stream=False)
|
||||||
model_download_path = f"{model_dir}/{asset_filename}"
|
model_download_path = f'{model_dir}/{asset_filename}'
|
||||||
with open(model_download_path, "wb") as f:
|
with open(model_download_path, 'wb') as f:
|
||||||
f.write(model_binary)
|
f.write(model_binary)
|
||||||
|
|
||||||
opt.weights = model_download_path
|
opt.weights = model_download_path
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("COMET WARNING: Unable to download checkpoint from Comet")
|
logger.warning('COMET WARNING: Unable to download checkpoint from Comet')
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
|
|
||||||
|
|
||||||
@ -75,9 +75,9 @@ def set_opt_parameters(opt, experiment):
|
|||||||
resume_string = opt.resume
|
resume_string = opt.resume
|
||||||
|
|
||||||
for asset in asset_list:
|
for asset in asset_list:
|
||||||
if asset["fileName"] == "opt.yaml":
|
if asset['fileName'] == 'opt.yaml':
|
||||||
asset_id = asset["assetId"]
|
asset_id = asset['assetId']
|
||||||
asset_binary = experiment.get_asset(asset_id, return_type="binary", stream=False)
|
asset_binary = experiment.get_asset(asset_id, return_type='binary', stream=False)
|
||||||
opt_dict = yaml.safe_load(asset_binary)
|
opt_dict = yaml.safe_load(asset_binary)
|
||||||
for key, value in opt_dict.items():
|
for key, value in opt_dict.items():
|
||||||
setattr(opt, key, value)
|
setattr(opt, key, value)
|
||||||
@ -85,11 +85,11 @@ def set_opt_parameters(opt, experiment):
|
|||||||
|
|
||||||
# Save hyperparameters to YAML file
|
# Save hyperparameters to YAML file
|
||||||
# Necessary to pass checks in training script
|
# Necessary to pass checks in training script
|
||||||
save_dir = f"{opt.project}/{experiment.name}"
|
save_dir = f'{opt.project}/{experiment.name}'
|
||||||
os.makedirs(save_dir, exist_ok=True)
|
os.makedirs(save_dir, exist_ok=True)
|
||||||
|
|
||||||
hyp_yaml_path = f"{save_dir}/hyp.yaml"
|
hyp_yaml_path = f'{save_dir}/hyp.yaml'
|
||||||
with open(hyp_yaml_path, "w") as f:
|
with open(hyp_yaml_path, 'w') as f:
|
||||||
yaml.dump(opt.hyp, f)
|
yaml.dump(opt.hyp, f)
|
||||||
opt.hyp = hyp_yaml_path
|
opt.hyp = hyp_yaml_path
|
||||||
|
|
||||||
@ -113,7 +113,7 @@ def check_comet_weights(opt):
|
|||||||
if opt.weights.startswith(COMET_PREFIX):
|
if opt.weights.startswith(COMET_PREFIX):
|
||||||
api = comet_ml.API()
|
api = comet_ml.API()
|
||||||
resource = urlparse(opt.weights)
|
resource = urlparse(opt.weights)
|
||||||
experiment_path = f"{resource.netloc}{resource.path}"
|
experiment_path = f'{resource.netloc}{resource.path}'
|
||||||
experiment = api.get(experiment_path)
|
experiment = api.get(experiment_path)
|
||||||
download_model_checkpoint(opt, experiment)
|
download_model_checkpoint(opt, experiment)
|
||||||
return True
|
return True
|
||||||
@ -140,7 +140,7 @@ def check_comet_resume(opt):
|
|||||||
if opt.resume.startswith(COMET_PREFIX):
|
if opt.resume.startswith(COMET_PREFIX):
|
||||||
api = comet_ml.API()
|
api = comet_ml.API()
|
||||||
resource = urlparse(opt.resume)
|
resource = urlparse(opt.resume)
|
||||||
experiment_path = f"{resource.netloc}{resource.path}"
|
experiment_path = f'{resource.netloc}{resource.path}'
|
||||||
experiment = api.get(experiment_path)
|
experiment = api.get(experiment_path)
|
||||||
set_opt_parameters(opt, experiment)
|
set_opt_parameters(opt, experiment)
|
||||||
download_model_checkpoint(opt, experiment)
|
download_model_checkpoint(opt, experiment)
|
||||||
|
|||||||
@ -21,7 +21,7 @@ from utils.torch_utils import select_device
|
|||||||
|
|
||||||
# Project Configuration
|
# Project Configuration
|
||||||
config = comet_ml.config.get_config()
|
config = comet_ml.config.get_config()
|
||||||
COMET_PROJECT_NAME = config.get_string(os.getenv("COMET_PROJECT_NAME"), "comet.project_name", default="yolov5")
|
COMET_PROJECT_NAME = config.get_string(os.getenv('COMET_PROJECT_NAME'), 'comet.project_name', default='yolov5')
|
||||||
|
|
||||||
|
|
||||||
def get_args(known=False):
|
def get_args(known=False):
|
||||||
@ -68,30 +68,30 @@ def get_args(known=False):
|
|||||||
parser.add_argument('--artifact_alias', type=str, default='latest', help='W&B: Version of dataset artifact to use')
|
parser.add_argument('--artifact_alias', type=str, default='latest', help='W&B: Version of dataset artifact to use')
|
||||||
|
|
||||||
# Comet Arguments
|
# Comet Arguments
|
||||||
parser.add_argument("--comet_optimizer_config", type=str, help="Comet: Path to a Comet Optimizer Config File.")
|
parser.add_argument('--comet_optimizer_config', type=str, help='Comet: Path to a Comet Optimizer Config File.')
|
||||||
parser.add_argument("--comet_optimizer_id", type=str, help="Comet: ID of the Comet Optimizer sweep.")
|
parser.add_argument('--comet_optimizer_id', type=str, help='Comet: ID of the Comet Optimizer sweep.')
|
||||||
parser.add_argument("--comet_optimizer_objective", type=str, help="Comet: Set to 'minimize' or 'maximize'.")
|
parser.add_argument('--comet_optimizer_objective', type=str, help="Comet: Set to 'minimize' or 'maximize'.")
|
||||||
parser.add_argument("--comet_optimizer_metric", type=str, help="Comet: Metric to Optimize.")
|
parser.add_argument('--comet_optimizer_metric', type=str, help='Comet: Metric to Optimize.')
|
||||||
parser.add_argument("--comet_optimizer_workers",
|
parser.add_argument('--comet_optimizer_workers',
|
||||||
type=int,
|
type=int,
|
||||||
default=1,
|
default=1,
|
||||||
help="Comet: Number of Parallel Workers to use with the Comet Optimizer.")
|
help='Comet: Number of Parallel Workers to use with the Comet Optimizer.')
|
||||||
|
|
||||||
return parser.parse_known_args()[0] if known else parser.parse_args()
|
return parser.parse_known_args()[0] if known else parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def run(parameters, opt):
|
def run(parameters, opt):
|
||||||
hyp_dict = {k: v for k, v in parameters.items() if k not in ["epochs", "batch_size"]}
|
hyp_dict = {k: v for k, v in parameters.items() if k not in ['epochs', 'batch_size']}
|
||||||
|
|
||||||
opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok or opt.evolve))
|
opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok or opt.evolve))
|
||||||
opt.batch_size = parameters.get("batch_size")
|
opt.batch_size = parameters.get('batch_size')
|
||||||
opt.epochs = parameters.get("epochs")
|
opt.epochs = parameters.get('epochs')
|
||||||
|
|
||||||
device = select_device(opt.device, batch_size=opt.batch_size)
|
device = select_device(opt.device, batch_size=opt.batch_size)
|
||||||
train(hyp_dict, opt, device, callbacks=Callbacks())
|
train(hyp_dict, opt, device, callbacks=Callbacks())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = get_args(known=True)
|
opt = get_args(known=True)
|
||||||
|
|
||||||
opt.weights = str(opt.weights)
|
opt.weights = str(opt.weights)
|
||||||
@ -99,7 +99,7 @@ if __name__ == "__main__":
|
|||||||
opt.data = str(opt.data)
|
opt.data = str(opt.data)
|
||||||
opt.project = str(opt.project)
|
opt.project = str(opt.project)
|
||||||
|
|
||||||
optimizer_id = os.getenv("COMET_OPTIMIZER_ID")
|
optimizer_id = os.getenv('COMET_OPTIMIZER_ID')
|
||||||
if optimizer_id is None:
|
if optimizer_id is None:
|
||||||
with open(opt.comet_optimizer_config) as f:
|
with open(opt.comet_optimizer_config) as f:
|
||||||
optimizer_config = json.load(f)
|
optimizer_config = json.load(f)
|
||||||
@ -110,9 +110,9 @@ if __name__ == "__main__":
|
|||||||
opt.comet_optimizer_id = optimizer.id
|
opt.comet_optimizer_id = optimizer.id
|
||||||
status = optimizer.status()
|
status = optimizer.status()
|
||||||
|
|
||||||
opt.comet_optimizer_objective = status["spec"]["objective"]
|
opt.comet_optimizer_objective = status['spec']['objective']
|
||||||
opt.comet_optimizer_metric = status["spec"]["metric"]
|
opt.comet_optimizer_metric = status['spec']['metric']
|
||||||
|
|
||||||
logger.info("COMET INFO: Starting Hyperparameter Sweep")
|
logger.info('COMET INFO: Starting Hyperparameter Sweep')
|
||||||
for parameter in optimizer.get_parameters():
|
for parameter in optimizer.get_parameters():
|
||||||
run(parameter["parameters"], opt)
|
run(parameter['parameters'], opt)
|
||||||
|
|||||||
@ -17,7 +17,7 @@ if str(ROOT) not in sys.path:
|
|||||||
sys.path.append(str(ROOT)) # add ROOT to PATH
|
sys.path.append(str(ROOT)) # add ROOT to PATH
|
||||||
RANK = int(os.getenv('RANK', -1))
|
RANK = int(os.getenv('RANK', -1))
|
||||||
DEPRECATION_WARNING = f"{colorstr('wandb')}: WARNING ⚠️ wandb is deprecated and will be removed in a future release. " \
|
DEPRECATION_WARNING = f"{colorstr('wandb')}: WARNING ⚠️ wandb is deprecated and will be removed in a future release. " \
|
||||||
f"See supported integrations at https://github.com/ultralytics/yolov5#integrations."
|
f'See supported integrations at https://github.com/ultralytics/yolov5#integrations.'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import wandb
|
import wandb
|
||||||
@ -65,7 +65,7 @@ class WandbLogger():
|
|||||||
self.data_dict = None
|
self.data_dict = None
|
||||||
if self.wandb:
|
if self.wandb:
|
||||||
self.wandb_run = wandb.init(config=opt,
|
self.wandb_run = wandb.init(config=opt,
|
||||||
resume="allow",
|
resume='allow',
|
||||||
project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem,
|
project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem,
|
||||||
entity=opt.entity,
|
entity=opt.entity,
|
||||||
name=opt.name if opt.name != 'exp' else None,
|
name=opt.name if opt.name != 'exp' else None,
|
||||||
@ -97,7 +97,7 @@ class WandbLogger():
|
|||||||
if isinstance(opt.resume, str):
|
if isinstance(opt.resume, str):
|
||||||
model_dir, _ = self.download_model_artifact(opt)
|
model_dir, _ = self.download_model_artifact(opt)
|
||||||
if model_dir:
|
if model_dir:
|
||||||
self.weights = Path(model_dir) / "last.pt"
|
self.weights = Path(model_dir) / 'last.pt'
|
||||||
config = self.wandb_run.config
|
config = self.wandb_run.config
|
||||||
opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp, opt.imgsz = str(
|
opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp, opt.imgsz = str(
|
||||||
self.weights), config.save_period, config.batch_size, config.bbox_interval, config.epochs, \
|
self.weights), config.save_period, config.batch_size, config.bbox_interval, config.epochs, \
|
||||||
@ -131,7 +131,7 @@ class WandbLogger():
|
|||||||
model_artifact.add_file(str(path / 'last.pt'), name='last.pt')
|
model_artifact.add_file(str(path / 'last.pt'), name='last.pt')
|
||||||
wandb.log_artifact(model_artifact,
|
wandb.log_artifact(model_artifact,
|
||||||
aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else ''])
|
aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else ''])
|
||||||
LOGGER.info(f"Saving model artifact on epoch {epoch + 1}")
|
LOGGER.info(f'Saving model artifact on epoch {epoch + 1}')
|
||||||
|
|
||||||
def val_one_image(self, pred, predn, path, names, im):
|
def val_one_image(self, pred, predn, path, names, im):
|
||||||
pass
|
pass
|
||||||
@ -160,7 +160,7 @@ class WandbLogger():
|
|||||||
wandb.log(self.log_dict)
|
wandb.log(self.log_dict)
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
f"An error occurred in wandb logger. The training will proceed without interruption. More info\n{e}"
|
f'An error occurred in wandb logger. The training will proceed without interruption. More info\n{e}'
|
||||||
)
|
)
|
||||||
self.wandb_run.finish()
|
self.wandb_run.finish()
|
||||||
self.wandb_run = None
|
self.wandb_run = None
|
||||||
|
|||||||
@ -28,7 +28,7 @@ def smooth(y, f=0.05):
|
|||||||
return np.convolve(yp, np.ones(nf) / nf, mode='valid') # y-smoothed
|
return np.convolve(yp, np.ones(nf) / nf, mode='valid') # y-smoothed
|
||||||
|
|
||||||
|
|
||||||
def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16, prefix=""):
|
def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16, prefix=''):
|
||||||
""" Compute the average precision, given the recall and precision curves.
|
""" Compute the average precision, given the recall and precision curves.
|
||||||
Source: https://github.com/rafaelpadilla/Object-Detection-Metrics.
|
Source: https://github.com/rafaelpadilla/Object-Detection-Metrics.
|
||||||
# Arguments
|
# Arguments
|
||||||
@ -194,14 +194,14 @@ class ConfusionMatrix:
|
|||||||
nc, nn = self.nc, len(names) # number of classes, names
|
nc, nn = self.nc, len(names) # number of classes, names
|
||||||
sn.set(font_scale=1.0 if nc < 50 else 0.8) # for label size
|
sn.set(font_scale=1.0 if nc < 50 else 0.8) # for label size
|
||||||
labels = (0 < nn < 99) and (nn == nc) # apply names to ticklabels
|
labels = (0 < nn < 99) and (nn == nc) # apply names to ticklabels
|
||||||
ticklabels = (names + ['background']) if labels else "auto"
|
ticklabels = (names + ['background']) if labels else 'auto'
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.simplefilter('ignore') # suppress empty matrix RuntimeWarning: All-NaN slice encountered
|
warnings.simplefilter('ignore') # suppress empty matrix RuntimeWarning: All-NaN slice encountered
|
||||||
sn.heatmap(array,
|
sn.heatmap(array,
|
||||||
ax=ax,
|
ax=ax,
|
||||||
annot=nc < 30,
|
annot=nc < 30,
|
||||||
annot_kws={
|
annot_kws={
|
||||||
"size": 8},
|
'size': 8},
|
||||||
cmap='Blues',
|
cmap='Blues',
|
||||||
fmt='.2f',
|
fmt='.2f',
|
||||||
square=True,
|
square=True,
|
||||||
@ -331,7 +331,7 @@ def plot_pr_curve(px, py, ap, save_dir=Path('pr_curve.png'), names=()):
|
|||||||
ax.set_ylabel('Precision')
|
ax.set_ylabel('Precision')
|
||||||
ax.set_xlim(0, 1)
|
ax.set_xlim(0, 1)
|
||||||
ax.set_ylim(0, 1)
|
ax.set_ylim(0, 1)
|
||||||
ax.legend(bbox_to_anchor=(1.04, 1), loc="upper left")
|
ax.legend(bbox_to_anchor=(1.04, 1), loc='upper left')
|
||||||
ax.set_title('Precision-Recall Curve')
|
ax.set_title('Precision-Recall Curve')
|
||||||
fig.savefig(save_dir, dpi=250)
|
fig.savefig(save_dir, dpi=250)
|
||||||
plt.close(fig)
|
plt.close(fig)
|
||||||
@ -354,7 +354,7 @@ def plot_mc_curve(px, py, save_dir=Path('mc_curve.png'), names=(), xlabel='Confi
|
|||||||
ax.set_ylabel(ylabel)
|
ax.set_ylabel(ylabel)
|
||||||
ax.set_xlim(0, 1)
|
ax.set_xlim(0, 1)
|
||||||
ax.set_ylim(0, 1)
|
ax.set_ylim(0, 1)
|
||||||
ax.legend(bbox_to_anchor=(1.04, 1), loc="upper left")
|
ax.legend(bbox_to_anchor=(1.04, 1), loc='upper left')
|
||||||
ax.set_title(f'{ylabel}-Confidence Curve')
|
ax.set_title(f'{ylabel}-Confidence Curve')
|
||||||
fig.savefig(save_dir, dpi=250)
|
fig.savefig(save_dir, dpi=250)
|
||||||
plt.close(fig)
|
plt.close(fig)
|
||||||
|
|||||||
@ -450,7 +450,7 @@ def imshow_cls(im, labels=None, pred=None, names=None, nmax=25, verbose=False, f
|
|||||||
plt.savefig(f, dpi=300, bbox_inches='tight')
|
plt.savefig(f, dpi=300, bbox_inches='tight')
|
||||||
plt.close()
|
plt.close()
|
||||||
if verbose:
|
if verbose:
|
||||||
LOGGER.info(f"Saving {f}")
|
LOGGER.info(f'Saving {f}')
|
||||||
if labels is not None:
|
if labels is not None:
|
||||||
LOGGER.info('True: ' + ' '.join(f'{names[i]:3s}' for i in labels[:nmax]))
|
LOGGER.info('True: ' + ' '.join(f'{names[i]:3s}' for i in labels[:nmax]))
|
||||||
if pred is not None:
|
if pred is not None:
|
||||||
|
|||||||
@ -95,7 +95,7 @@ class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing
|
|||||||
stride=32,
|
stride=32,
|
||||||
pad=0,
|
pad=0,
|
||||||
min_items=0,
|
min_items=0,
|
||||||
prefix="",
|
prefix='',
|
||||||
downsample_ratio=1,
|
downsample_ratio=1,
|
||||||
overlap=False,
|
overlap=False,
|
||||||
):
|
):
|
||||||
@ -116,7 +116,7 @@ class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing
|
|||||||
shapes = None
|
shapes = None
|
||||||
|
|
||||||
# MixUp augmentation
|
# MixUp augmentation
|
||||||
if random.random() < hyp["mixup"]:
|
if random.random() < hyp['mixup']:
|
||||||
img, labels, segments = mixup(img, labels, segments, *self.load_mosaic(random.randint(0, self.n - 1)))
|
img, labels, segments = mixup(img, labels, segments, *self.load_mosaic(random.randint(0, self.n - 1)))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -147,11 +147,11 @@ class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing
|
|||||||
img, labels, segments = random_perspective(img,
|
img, labels, segments = random_perspective(img,
|
||||||
labels,
|
labels,
|
||||||
segments=segments,
|
segments=segments,
|
||||||
degrees=hyp["degrees"],
|
degrees=hyp['degrees'],
|
||||||
translate=hyp["translate"],
|
translate=hyp['translate'],
|
||||||
scale=hyp["scale"],
|
scale=hyp['scale'],
|
||||||
shear=hyp["shear"],
|
shear=hyp['shear'],
|
||||||
perspective=hyp["perspective"])
|
perspective=hyp['perspective'])
|
||||||
|
|
||||||
nl = len(labels) # number of labels
|
nl = len(labels) # number of labels
|
||||||
if nl:
|
if nl:
|
||||||
@ -177,17 +177,17 @@ class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing
|
|||||||
nl = len(labels) # update after albumentations
|
nl = len(labels) # update after albumentations
|
||||||
|
|
||||||
# HSV color-space
|
# HSV color-space
|
||||||
augment_hsv(img, hgain=hyp["hsv_h"], sgain=hyp["hsv_s"], vgain=hyp["hsv_v"])
|
augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v'])
|
||||||
|
|
||||||
# Flip up-down
|
# Flip up-down
|
||||||
if random.random() < hyp["flipud"]:
|
if random.random() < hyp['flipud']:
|
||||||
img = np.flipud(img)
|
img = np.flipud(img)
|
||||||
if nl:
|
if nl:
|
||||||
labels[:, 2] = 1 - labels[:, 2]
|
labels[:, 2] = 1 - labels[:, 2]
|
||||||
masks = torch.flip(masks, dims=[1])
|
masks = torch.flip(masks, dims=[1])
|
||||||
|
|
||||||
# Flip left-right
|
# Flip left-right
|
||||||
if random.random() < hyp["fliplr"]:
|
if random.random() < hyp['fliplr']:
|
||||||
img = np.fliplr(img)
|
img = np.fliplr(img)
|
||||||
if nl:
|
if nl:
|
||||||
labels[:, 1] = 1 - labels[:, 1]
|
labels[:, 1] = 1 - labels[:, 1]
|
||||||
@ -251,15 +251,15 @@ class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing
|
|||||||
# img4, labels4 = replicate(img4, labels4) # replicate
|
# img4, labels4 = replicate(img4, labels4) # replicate
|
||||||
|
|
||||||
# Augment
|
# Augment
|
||||||
img4, labels4, segments4 = copy_paste(img4, labels4, segments4, p=self.hyp["copy_paste"])
|
img4, labels4, segments4 = copy_paste(img4, labels4, segments4, p=self.hyp['copy_paste'])
|
||||||
img4, labels4, segments4 = random_perspective(img4,
|
img4, labels4, segments4 = random_perspective(img4,
|
||||||
labels4,
|
labels4,
|
||||||
segments4,
|
segments4,
|
||||||
degrees=self.hyp["degrees"],
|
degrees=self.hyp['degrees'],
|
||||||
translate=self.hyp["translate"],
|
translate=self.hyp['translate'],
|
||||||
scale=self.hyp["scale"],
|
scale=self.hyp['scale'],
|
||||||
shear=self.hyp["shear"],
|
shear=self.hyp['shear'],
|
||||||
perspective=self.hyp["perspective"],
|
perspective=self.hyp['perspective'],
|
||||||
border=self.mosaic_border) # border to remove
|
border=self.mosaic_border) # border to remove
|
||||||
return img4, labels4, segments4
|
return img4, labels4, segments4
|
||||||
|
|
||||||
|
|||||||
@ -83,7 +83,7 @@ class ComputeLoss:
|
|||||||
|
|
||||||
# Mask regression
|
# Mask regression
|
||||||
if tuple(masks.shape[-2:]) != (mask_h, mask_w): # downsample
|
if tuple(masks.shape[-2:]) != (mask_h, mask_w): # downsample
|
||||||
masks = F.interpolate(masks[None], (mask_h, mask_w), mode="nearest")[0]
|
masks = F.interpolate(masks[None], (mask_h, mask_w), mode='nearest')[0]
|
||||||
marea = xywhn[i][:, 2:].prod(1) # mask width, height normalized
|
marea = xywhn[i][:, 2:].prod(1) # mask width, height normalized
|
||||||
mxyxy = xywh2xyxy(xywhn[i] * torch.tensor([mask_w, mask_h, mask_w, mask_h], device=self.device))
|
mxyxy = xywh2xyxy(xywhn[i] * torch.tensor([mask_w, mask_h, mask_w, mask_h], device=self.device))
|
||||||
for bi in b.unique():
|
for bi in b.unique():
|
||||||
@ -101,10 +101,10 @@ class ComputeLoss:
|
|||||||
|
|
||||||
if self.autobalance:
|
if self.autobalance:
|
||||||
self.balance = [x / self.balance[self.ssi] for x in self.balance]
|
self.balance = [x / self.balance[self.ssi] for x in self.balance]
|
||||||
lbox *= self.hyp["box"]
|
lbox *= self.hyp['box']
|
||||||
lobj *= self.hyp["obj"]
|
lobj *= self.hyp['obj']
|
||||||
lcls *= self.hyp["cls"]
|
lcls *= self.hyp['cls']
|
||||||
lseg *= self.hyp["box"] / bs
|
lseg *= self.hyp['box'] / bs
|
||||||
|
|
||||||
loss = lbox + lobj + lcls + lseg
|
loss = lbox + lobj + lcls + lseg
|
||||||
return loss * bs, torch.cat((lbox, lseg, lobj, lcls)).detach()
|
return loss * bs, torch.cat((lbox, lseg, lobj, lcls)).detach()
|
||||||
@ -112,7 +112,7 @@ class ComputeLoss:
|
|||||||
def single_mask_loss(self, gt_mask, pred, proto, xyxy, area):
|
def single_mask_loss(self, gt_mask, pred, proto, xyxy, area):
|
||||||
# Mask loss for one image
|
# Mask loss for one image
|
||||||
pred_mask = (pred @ proto.view(self.nm, -1)).view(-1, *proto.shape[1:]) # (n,32) @ (32,80,80) -> (n,80,80)
|
pred_mask = (pred @ proto.view(self.nm, -1)).view(-1, *proto.shape[1:]) # (n,32) @ (32,80,80) -> (n,80,80)
|
||||||
loss = F.binary_cross_entropy_with_logits(pred_mask, gt_mask, reduction="none")
|
loss = F.binary_cross_entropy_with_logits(pred_mask, gt_mask, reduction='none')
|
||||||
return (crop_mask(loss, xyxy).mean(dim=(1, 2)) / area).mean()
|
return (crop_mask(loss, xyxy).mean(dim=(1, 2)) / area).mean()
|
||||||
|
|
||||||
def build_targets(self, p, targets):
|
def build_targets(self, p, targets):
|
||||||
|
|||||||
@ -21,7 +21,7 @@ def ap_per_class_box_and_mask(
|
|||||||
pred_cls,
|
pred_cls,
|
||||||
target_cls,
|
target_cls,
|
||||||
plot=False,
|
plot=False,
|
||||||
save_dir=".",
|
save_dir='.',
|
||||||
names=(),
|
names=(),
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@ -37,7 +37,7 @@ def ap_per_class_box_and_mask(
|
|||||||
plot=plot,
|
plot=plot,
|
||||||
save_dir=save_dir,
|
save_dir=save_dir,
|
||||||
names=names,
|
names=names,
|
||||||
prefix="Box")[2:]
|
prefix='Box')[2:]
|
||||||
results_masks = ap_per_class(tp_m,
|
results_masks = ap_per_class(tp_m,
|
||||||
conf,
|
conf,
|
||||||
pred_cls,
|
pred_cls,
|
||||||
@ -45,21 +45,21 @@ def ap_per_class_box_and_mask(
|
|||||||
plot=plot,
|
plot=plot,
|
||||||
save_dir=save_dir,
|
save_dir=save_dir,
|
||||||
names=names,
|
names=names,
|
||||||
prefix="Mask")[2:]
|
prefix='Mask')[2:]
|
||||||
|
|
||||||
results = {
|
results = {
|
||||||
"boxes": {
|
'boxes': {
|
||||||
"p": results_boxes[0],
|
'p': results_boxes[0],
|
||||||
"r": results_boxes[1],
|
'r': results_boxes[1],
|
||||||
"ap": results_boxes[3],
|
'ap': results_boxes[3],
|
||||||
"f1": results_boxes[2],
|
'f1': results_boxes[2],
|
||||||
"ap_class": results_boxes[4]},
|
'ap_class': results_boxes[4]},
|
||||||
"masks": {
|
'masks': {
|
||||||
"p": results_masks[0],
|
'p': results_masks[0],
|
||||||
"r": results_masks[1],
|
'r': results_masks[1],
|
||||||
"ap": results_masks[3],
|
'ap': results_masks[3],
|
||||||
"f1": results_masks[2],
|
'f1': results_masks[2],
|
||||||
"ap_class": results_masks[4]}}
|
'ap_class': results_masks[4]}}
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -159,8 +159,8 @@ class Metrics:
|
|||||||
Args:
|
Args:
|
||||||
results: Dict{'boxes': Dict{}, 'masks': Dict{}}
|
results: Dict{'boxes': Dict{}, 'masks': Dict{}}
|
||||||
"""
|
"""
|
||||||
self.metric_box.update(list(results["boxes"].values()))
|
self.metric_box.update(list(results['boxes'].values()))
|
||||||
self.metric_mask.update(list(results["masks"].values()))
|
self.metric_mask.update(list(results['masks'].values()))
|
||||||
|
|
||||||
def mean_results(self):
|
def mean_results(self):
|
||||||
return self.metric_box.mean_results() + self.metric_mask.mean_results()
|
return self.metric_box.mean_results() + self.metric_mask.mean_results()
|
||||||
@ -178,33 +178,33 @@ class Metrics:
|
|||||||
|
|
||||||
|
|
||||||
KEYS = [
|
KEYS = [
|
||||||
"train/box_loss",
|
'train/box_loss',
|
||||||
"train/seg_loss", # train loss
|
'train/seg_loss', # train loss
|
||||||
"train/obj_loss",
|
'train/obj_loss',
|
||||||
"train/cls_loss",
|
'train/cls_loss',
|
||||||
"metrics/precision(B)",
|
'metrics/precision(B)',
|
||||||
"metrics/recall(B)",
|
'metrics/recall(B)',
|
||||||
"metrics/mAP_0.5(B)",
|
'metrics/mAP_0.5(B)',
|
||||||
"metrics/mAP_0.5:0.95(B)", # metrics
|
'metrics/mAP_0.5:0.95(B)', # metrics
|
||||||
"metrics/precision(M)",
|
'metrics/precision(M)',
|
||||||
"metrics/recall(M)",
|
'metrics/recall(M)',
|
||||||
"metrics/mAP_0.5(M)",
|
'metrics/mAP_0.5(M)',
|
||||||
"metrics/mAP_0.5:0.95(M)", # metrics
|
'metrics/mAP_0.5:0.95(M)', # metrics
|
||||||
"val/box_loss",
|
'val/box_loss',
|
||||||
"val/seg_loss", # val loss
|
'val/seg_loss', # val loss
|
||||||
"val/obj_loss",
|
'val/obj_loss',
|
||||||
"val/cls_loss",
|
'val/cls_loss',
|
||||||
"x/lr0",
|
'x/lr0',
|
||||||
"x/lr1",
|
'x/lr1',
|
||||||
"x/lr2",]
|
'x/lr2',]
|
||||||
|
|
||||||
BEST_KEYS = [
|
BEST_KEYS = [
|
||||||
"best/epoch",
|
'best/epoch',
|
||||||
"best/precision(B)",
|
'best/precision(B)',
|
||||||
"best/recall(B)",
|
'best/recall(B)',
|
||||||
"best/mAP_0.5(B)",
|
'best/mAP_0.5(B)',
|
||||||
"best/mAP_0.5:0.95(B)",
|
'best/mAP_0.5:0.95(B)',
|
||||||
"best/precision(M)",
|
'best/precision(M)',
|
||||||
"best/recall(M)",
|
'best/recall(M)',
|
||||||
"best/mAP_0.5(M)",
|
'best/mAP_0.5(M)',
|
||||||
"best/mAP_0.5:0.95(M)",]
|
'best/mAP_0.5:0.95(M)',]
|
||||||
|
|||||||
@ -108,13 +108,13 @@ def plot_images_and_masks(images, targets, masks, paths=None, fname='images.jpg'
|
|||||||
annotator.im.save(fname) # save
|
annotator.im.save(fname) # save
|
||||||
|
|
||||||
|
|
||||||
def plot_results_with_masks(file="path/to/results.csv", dir="", best=True):
|
def plot_results_with_masks(file='path/to/results.csv', dir='', best=True):
|
||||||
# Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv')
|
# Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv')
|
||||||
save_dir = Path(file).parent if file else Path(dir)
|
save_dir = Path(file).parent if file else Path(dir)
|
||||||
fig, ax = plt.subplots(2, 8, figsize=(18, 6), tight_layout=True)
|
fig, ax = plt.subplots(2, 8, figsize=(18, 6), tight_layout=True)
|
||||||
ax = ax.ravel()
|
ax = ax.ravel()
|
||||||
files = list(save_dir.glob("results*.csv"))
|
files = list(save_dir.glob('results*.csv'))
|
||||||
assert len(files), f"No results.csv files found in {save_dir.resolve()}, nothing to plot."
|
assert len(files), f'No results.csv files found in {save_dir.resolve()}, nothing to plot.'
|
||||||
for f in files:
|
for f in files:
|
||||||
try:
|
try:
|
||||||
data = pd.read_csv(f)
|
data = pd.read_csv(f)
|
||||||
@ -125,19 +125,19 @@ def plot_results_with_masks(file="path/to/results.csv", dir="", best=True):
|
|||||||
for i, j in enumerate([1, 2, 3, 4, 5, 6, 9, 10, 13, 14, 15, 16, 7, 8, 11, 12]):
|
for i, j in enumerate([1, 2, 3, 4, 5, 6, 9, 10, 13, 14, 15, 16, 7, 8, 11, 12]):
|
||||||
y = data.values[:, j]
|
y = data.values[:, j]
|
||||||
# y[y == 0] = np.nan # don't show zero values
|
# y[y == 0] = np.nan # don't show zero values
|
||||||
ax[i].plot(x, y, marker=".", label=f.stem, linewidth=2, markersize=2)
|
ax[i].plot(x, y, marker='.', label=f.stem, linewidth=2, markersize=2)
|
||||||
if best:
|
if best:
|
||||||
# best
|
# best
|
||||||
ax[i].scatter(index, y[index], color="r", label=f"best:{index}", marker="*", linewidth=3)
|
ax[i].scatter(index, y[index], color='r', label=f'best:{index}', marker='*', linewidth=3)
|
||||||
ax[i].set_title(s[j] + f"\n{round(y[index], 5)}")
|
ax[i].set_title(s[j] + f'\n{round(y[index], 5)}')
|
||||||
else:
|
else:
|
||||||
# last
|
# last
|
||||||
ax[i].scatter(x[-1], y[-1], color="r", label="last", marker="*", linewidth=3)
|
ax[i].scatter(x[-1], y[-1], color='r', label='last', marker='*', linewidth=3)
|
||||||
ax[i].set_title(s[j] + f"\n{round(y[-1], 5)}")
|
ax[i].set_title(s[j] + f'\n{round(y[-1], 5)}')
|
||||||
# if j in [8, 9, 10]: # share train and val loss y axes
|
# if j in [8, 9, 10]: # share train and val loss y axes
|
||||||
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
|
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Warning: Plotting error for {f}: {e}")
|
print(f'Warning: Plotting error for {f}: {e}')
|
||||||
ax[1].legend()
|
ax[1].legend()
|
||||||
fig.savefig(save_dir / "results.png", dpi=200)
|
fig.savefig(save_dir / 'results.png', dpi=200)
|
||||||
plt.close()
|
plt.close()
|
||||||
|
|||||||
@ -291,7 +291,7 @@ def model_info(model, verbose=False, imgsz=640):
|
|||||||
fs = ''
|
fs = ''
|
||||||
|
|
||||||
name = Path(model.yaml_file).stem.replace('yolov5', 'YOLOv3') if hasattr(model, 'yaml_file') else 'Model'
|
name = Path(model.yaml_file).stem.replace('yolov5', 'YOLOv3') if hasattr(model, 'yaml_file') else 'Model'
|
||||||
LOGGER.info(f"{name} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}")
|
LOGGER.info(f'{name} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}')
|
||||||
|
|
||||||
|
|
||||||
def scale_img(img, ratio=1.0, same_shape=False, gs=32): # img(16,3,256,416)
|
def scale_img(img, ratio=1.0, same_shape=False, gs=32): # img(16,3,256,416)
|
||||||
@ -342,7 +342,7 @@ def smart_optimizer(model, name='Adam', lr=0.001, momentum=0.9, decay=1e-5):
|
|||||||
optimizer.add_param_group({'params': g[0], 'weight_decay': decay}) # add g0 with weight_decay
|
optimizer.add_param_group({'params': g[0], 'weight_decay': decay}) # add g0 with weight_decay
|
||||||
optimizer.add_param_group({'params': g[1], 'weight_decay': 0.0}) # add g1 (BatchNorm2d weights)
|
optimizer.add_param_group({'params': g[1], 'weight_decay': 0.0}) # add g1 (BatchNorm2d weights)
|
||||||
LOGGER.info(f"{colorstr('optimizer:')} {type(optimizer).__name__}(lr={lr}) with parameter groups "
|
LOGGER.info(f"{colorstr('optimizer:')} {type(optimizer).__name__}(lr={lr}) with parameter groups "
|
||||||
f"{len(g[1])} weight(decay=0.0), {len(g[0])} weight(decay={decay}), {len(g[2])} bias")
|
f'{len(g[1])} weight(decay=0.0), {len(g[0])} weight(decay={decay}), {len(g[2])} bias')
|
||||||
return optimizer
|
return optimizer
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -21,7 +21,7 @@ class TritonRemoteModel:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
parsed_url = urlparse(url)
|
parsed_url = urlparse(url)
|
||||||
if parsed_url.scheme == "grpc":
|
if parsed_url.scheme == 'grpc':
|
||||||
from tritonclient.grpc import InferenceServerClient, InferInput
|
from tritonclient.grpc import InferenceServerClient, InferInput
|
||||||
|
|
||||||
self.client = InferenceServerClient(parsed_url.netloc) # Triton GRPC client
|
self.client = InferenceServerClient(parsed_url.netloc) # Triton GRPC client
|
||||||
@ -31,7 +31,7 @@ class TritonRemoteModel:
|
|||||||
|
|
||||||
def create_input_placeholders() -> typing.List[InferInput]:
|
def create_input_placeholders() -> typing.List[InferInput]:
|
||||||
return [
|
return [
|
||||||
InferInput(i['name'], [int(s) for s in i["shape"]], i['datatype']) for i in self.metadata['inputs']]
|
InferInput(i['name'], [int(s) for s in i['shape']], i['datatype']) for i in self.metadata['inputs']]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
from tritonclient.http import InferenceServerClient, InferInput
|
from tritonclient.http import InferenceServerClient, InferInput
|
||||||
@ -43,14 +43,14 @@ class TritonRemoteModel:
|
|||||||
|
|
||||||
def create_input_placeholders() -> typing.List[InferInput]:
|
def create_input_placeholders() -> typing.List[InferInput]:
|
||||||
return [
|
return [
|
||||||
InferInput(i['name'], [int(s) for s in i["shape"]], i['datatype']) for i in self.metadata['inputs']]
|
InferInput(i['name'], [int(s) for s in i['shape']], i['datatype']) for i in self.metadata['inputs']]
|
||||||
|
|
||||||
self._create_input_placeholders_fn = create_input_placeholders
|
self._create_input_placeholders_fn = create_input_placeholders
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def runtime(self):
|
def runtime(self):
|
||||||
"""Returns the model runtime"""
|
"""Returns the model runtime"""
|
||||||
return self.metadata.get("backend", self.metadata.get("platform"))
|
return self.metadata.get('backend', self.metadata.get('platform'))
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs) -> typing.Union[torch.Tensor, typing.Tuple[torch.Tensor, ...]]:
|
def __call__(self, *args, **kwargs) -> typing.Union[torch.Tensor, typing.Tuple[torch.Tensor, ...]]:
|
||||||
""" Invokes the model. Parameters can be provided via args or kwargs.
|
""" Invokes the model. Parameters can be provided via args or kwargs.
|
||||||
@ -68,14 +68,14 @@ class TritonRemoteModel:
|
|||||||
def _create_inputs(self, *args, **kwargs):
|
def _create_inputs(self, *args, **kwargs):
|
||||||
args_len, kwargs_len = len(args), len(kwargs)
|
args_len, kwargs_len = len(args), len(kwargs)
|
||||||
if not args_len and not kwargs_len:
|
if not args_len and not kwargs_len:
|
||||||
raise RuntimeError("No inputs provided.")
|
raise RuntimeError('No inputs provided.')
|
||||||
if args_len and kwargs_len:
|
if args_len and kwargs_len:
|
||||||
raise RuntimeError("Cannot specify args and kwargs at the same time")
|
raise RuntimeError('Cannot specify args and kwargs at the same time')
|
||||||
|
|
||||||
placeholders = self._create_input_placeholders_fn()
|
placeholders = self._create_input_placeholders_fn()
|
||||||
if args_len:
|
if args_len:
|
||||||
if args_len != len(placeholders):
|
if args_len != len(placeholders):
|
||||||
raise RuntimeError(f"Expected {len(placeholders)} inputs, got {args_len}.")
|
raise RuntimeError(f'Expected {len(placeholders)} inputs, got {args_len}.')
|
||||||
for input, value in zip(placeholders, args):
|
for input, value in zip(placeholders, args):
|
||||||
input.set_data_from_numpy(value.cpu().numpy())
|
input.set_data_from_numpy(value.cpu().numpy())
|
||||||
else:
|
else:
|
||||||
|
|||||||
4
val.py
4
val.py
@ -304,7 +304,7 @@ def run(
|
|||||||
if save_json and len(jdict):
|
if save_json and len(jdict):
|
||||||
w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights
|
w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights
|
||||||
anno_json = str(Path('../datasets/coco/annotations/instances_val2017.json')) # annotations
|
anno_json = str(Path('../datasets/coco/annotations/instances_val2017.json')) # annotations
|
||||||
pred_json = str(save_dir / f"{w}_predictions.json") # predictions
|
pred_json = str(save_dir / f'{w}_predictions.json') # predictions
|
||||||
LOGGER.info(f'\nEvaluating pycocotools mAP... saving {pred_json}...')
|
LOGGER.info(f'\nEvaluating pycocotools mAP... saving {pred_json}...')
|
||||||
with open(pred_json, 'w') as f:
|
with open(pred_json, 'w') as f:
|
||||||
json.dump(jdict, f)
|
json.dump(jdict, f)
|
||||||
@ -404,6 +404,6 @@ def main(opt):
|
|||||||
raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")')
|
raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
opt = parse_opt()
|
opt = parse_opt()
|
||||||
main(opt)
|
main(opt)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user