PyTorch
English
llama
File size: 1,097 Bytes
f408cc9
1
{"model_name": "finetune", "model_type": "sft", "datasets": ["hamishivi/tulu_3.9_arena_hard_top939k"], "base_model": "meta-llama/Llama-3.1-8B", "wandb_path": "https://wandb.ai/ai2-llm/open_instruct_internal/runs/q4797gyy", "beaker_experiment": "https://beaker.org/ex/01JDE969MD67CMEVA6ZZC54PHG/", "beaker_datasets": ["https://beaker.org/ds/01JDE969NWHDFGEYA7MY4G81KG", "https://beaker.org/ds/01JDEEF2JQQJ656C899PVTTEHS", "https://beaker.org/ds/01JDEEJ2Y7HBSQNFXYCP69PF84", "https://beaker.org/ds/01JDEPEG01SQ5HPNCAZ43ZMCC3", "https://beaker.org/ds/01JDF97PFM739Z3TKV9RNMQXK5", "https://beaker.org/ds/01JDFNFJ9D4ME98WQ4NB1X745F", "https://beaker.org/ds/01JDFSR975VKRB7SN9PBQ9G0B0", "https://beaker.org/ds/01JDHA22ET4G07BK51E1TBB835", "https://beaker.org/ds/01JDHRVW5WF0Z4W2AB5YZSV9E9", "https://beaker.org/ds/01JDHXA3Y686QN2VQ876VVKEA3", "https://beaker.org/ds/01JDK9N4KYP2DVVV63NS4VV9J6", "https://beaker.org/ds/01JDKBEGT17GPN0184J02QXKHN", "https://beaker.org/ds/01JDKNP28EEVA3FTDQF67BDQH1", "https://beaker.org/ds/01JDKT4FMFKDQBYDRFFJ8R9J5Q", "https://beaker.org/ds/01JDKXR5BHFQ6ZBMRAZE4R1SFN"]}