From 5d3b268958ce9c98aa6e22fdc986e227dae6d450 Mon Sep 17 00:00:00 2001 From: JIAKUNHAO Date: Mon, 28 Nov 2022 17:24:23 +0800 Subject: [PATCH] =?UTF-8?q?=E6=AF=8F=E8=BD=AE=E8=BF=94=E5=9B=9E=E4=B8=AD?= =?UTF-8?q?=E9=97=B4=E5=80=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/yolov5/train_server.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/yolov5/train_server.py b/app/yolov5/train_server.py index 9358eb2..e466caa 100644 --- a/app/yolov5/train_server.py +++ b/app/yolov5/train_server.py @@ -356,6 +356,7 @@ def train(hyp, opt, device, data_list,id,getsomething,callbacks): # hyp is path if RANK in {-1, 0}: pbar = tqdm(pbar, total=nb, bar_format='{l_bar}{bar:10}{r_bar}{bar:-10b}') # progress bar optimizer.zero_grad() + tempLoss = 0 for i, (imgs, targets, paths, _) in pbar: # batch ------------------------------------------------------------- #callbacks.run('on_train_batch_start') print("start get global_var") @@ -406,7 +407,8 @@ def train(hyp, opt, device, data_list,id,getsomething,callbacks): # hyp is path # Backward scaler.scale(loss).backward() - report_cellback(epoch, epochs, float(loss)) + tempLoss = float(loss) + # Optimize - https://pytorch.org/docs/master/notes/amp_examples.html if ni - last_opt_step >= accumulate: @@ -429,7 +431,7 @@ def train(hyp, opt, device, data_list,id,getsomething,callbacks): # hyp is path if callbacks.stop_training: return # end batch ------------------------------------------------------------------------------------------------ - + report_cellback(epoch, epochs, tempLoss) # Scheduler lr = [x['lr'] for x in optimizer.param_groups] # for loggers scheduler.step()