Fix a GPU memory leak in detect. No need to calculate gradients in inference. (#900)
Co-authored-by: JKO095 <juho-pekka.koponen@wartsila.com>
This commit is contained in:
parent
8b616af63a
commit
072f76c72c
@ -84,6 +84,7 @@ def detect(save_img=False):
|
|||||||
|
|
||||||
# Inference
|
# Inference
|
||||||
t1 = time_synchronized()
|
t1 = time_synchronized()
|
||||||
|
with torch.no_grad(): # Calculating gradients would cause a GPU memory leak
|
||||||
pred = model(img, augment=opt.augment)[0]
|
pred = model(img, augment=opt.augment)[0]
|
||||||
t2 = time_synchronized()
|
t2 = time_synchronized()
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user