From cf5db95953644c0e661d6907d035e0de690631ff Mon Sep 17 00:00:00 2001 From: "huntr.dev | the place to protect open source" Date: Mon, 25 Jan 2021 17:39:34 +0000 Subject: [PATCH] Security Fix for Arbitrary Code Execution - huntr.dev (#1672) * fixed arbitary code execution * Update train.py * Full to Safe Co-authored-by: Asjid Kalam Co-authored-by: Jamie Slome Co-authored-by: Glenn Jocher --- train.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index 9f869cfd..91d8dfe0 100644 --- a/train.py +++ b/train.py @@ -59,7 +59,7 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): cuda = device.type != 'cpu' init_seeds(2 + rank) with open(opt.data) as f: - data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + data_dict = yaml.load(f, Loader=yaml.SafeLoader) # data dict with torch_distributed_zero_first(rank): check_dataset(data_dict) # check train_path = data_dict['train'] @@ -476,7 +476,7 @@ if __name__ == '__main__': assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' apriori = opt.global_rank, opt.local_rank with open(Path(ckpt).parent.parent / 'opt.yaml') as f: - opt = argparse.Namespace(**yaml.load(f, Loader=yaml.FullLoader)) # replace + opt = argparse.Namespace(**yaml.load(f, Loader=yaml.SafeLoader)) # replace opt.cfg, opt.weights, opt.resume, opt.global_rank, opt.local_rank = '', ckpt, True, *apriori # reinstate logger.info('Resuming training from %s' % ckpt) else: @@ -500,7 +500,7 @@ if __name__ == '__main__': # Hyperparameters with open(opt.hyp) as f: - hyp = yaml.load(f, Loader=yaml.FullLoader) # load hyps + hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps # Train logger.info(opt)