From b442625bf64c981a8286d776163ba92119cb708d Mon Sep 17 00:00:00 2001 From: Aidan Do <aidando73@gmail.com> Date: Sat, 23 Nov 2024 11:40:54 +0000 Subject: [PATCH] ensure pretrain_.yaml is udpated --- .../meta_eval/prepare_meta_eval.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/tools/benchmarks/llm_eval_harness/meta_eval/prepare_meta_eval.py b/tools/benchmarks/llm_eval_harness/meta_eval/prepare_meta_eval.py index 539dea32..9fb2c59d 100644 --- a/tools/benchmarks/llm_eval_harness/meta_eval/prepare_meta_eval.py +++ b/tools/benchmarks/llm_eval_harness/meta_eval/prepare_meta_eval.py @@ -134,18 +134,26 @@ def change_yaml(args, base_name): "WORK_DIR", str(yaml_dir) ) ) - # 3.2 evals dataset has a differents set of evals + # 3.2 evals dataset has a differents set of evals from 3.1 + # so update the tasks in the meta_pretrain.yaml file (3.2 for meta_instruct.yaml not supported yet) + with open(args.template_dir + "/meta_pretrain.yaml", "r") as yaml_file: + meta_pretrain = yaml.safe_load(yaml_file) + if args.evals_dataset in [ "meta-llama/Llama-3.2-1B-evals", "meta-llama/Llama-3.2-3B-evals", ]: - # Change meta_pretrain.yaml to load in supported evals - with open(args.template_dir + "/meta_pretrain.yaml", "r") as yaml_file: - meta_pretrain = yaml.safe_load(yaml_file) - meta_pretrain["task"] = ["meta_mmlu"] - - with open(args.work_dir + "/meta_pretrain.yaml", "w") as yaml_file: - yaml.dump(meta_pretrain, yaml_file) + meta_pretrain["task"] = ["meta_mmlu"] + elif args.evals_dataset in [ + "meta-llama/Llama-3.1-8B-evals", + "meta-llama/Llama-3.1-70B-evals", + "meta-llama/Llama-3.1-405B-evals", + ]: + meta_pretrain["task"] = ["meta_bbh", "meta_mmlu_pro_pretrain"] + + with open(args.work_dir + "/meta_pretrain.yaml", "w") as yaml_file: + yaml.dump(meta_pretrain, yaml_file) + -- GitLab