alozowski
commited on
Commit
·
c37556f
1
Parent(s):
b6cde24
Resubmit failed models
Browse files- 01-ai/Yi-1.5-6B-Chat_eval_request_False_bfloat16_Original.json +1 -1
- Danielbrdz/Barcenas-27b_eval_request_False_float16_Original.json +1 -1
- Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4_eval_request_False_bfloat16_Original.json +1 -1
- Gunulhona/Gemma-Ko-Merge_eval_request_False_bfloat16_Original.json +1 -1
- allknowingroger/Llama-3.1-Nemotron-70B-Instruct-HF-F32_eval_request_False_float16_Original.json +1 -1
- allknowingroger/Ministral-8B-slerp_eval_request_False_float16_Original.json +1 -1
- allknowingroger/Qwen2.5pass-50B_eval_request_False_bfloat16_Original.json +1 -1
- allknowingroger/Rombos-LLM-V2.5-Qwen-42b_eval_request_False_bfloat16_Original.json +1 -1
- allknowingroger/Yi-1.5-34B_eval_request_False_bfloat16_Original.json +1 -1
- anthracite-org/magnum-v4-12b_eval_request_False_bfloat16_Original.json +1 -1
- anthracite-org/magnum-v4-27b_eval_request_False_bfloat16_Original.json +1 -1
- bunnycore/Llama-3.2-3B-Mix-Skill_eval_request_False_float16_Original.json +1 -1
- rwitz/cat0.1_eval_request_False_4bit_Original.json +1 -1
- shastraai/Shastra-Mistral-Commonsense-SFT_eval_request_False_bfloat16_Adapter.json +1 -1
- shastraai/Shastra-Mistral-Math-SFT_eval_request_False_bfloat16_Adapter.json +1 -1
- sophosympatheia/Midnight-Miqu-70B-v1.5_eval_request_False_float16_Original.json +1 -1
01-ai/Yi-1.5-6B-Chat_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 6.061,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-22T19:42:46Z",
|
11 |
"model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)",
|
12 |
"job_id": "10094497",
|
|
|
6 |
"params": 6.061,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-22T19:42:46Z",
|
11 |
"model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)",
|
12 |
"job_id": "10094497",
|
Danielbrdz/Barcenas-27b_eval_request_False_float16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 27.227,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T20:22:37Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10080744",
|
|
|
6 |
"params": 27.227,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T20:22:37Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10080744",
|
Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 14.77,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-23T10:51:48Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10103628",
|
|
|
6 |
"params": 14.77,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-23T10:51:48Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10103628",
|
Gunulhona/Gemma-Ko-Merge_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 10.159,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-23T08:34:10Z",
|
11 |
"model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)",
|
12 |
"job_id": "10102386",
|
|
|
6 |
"params": 10.159,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-23T08:34:10Z",
|
11 |
"model_type": "\ud83d\udcac : \ud83d\udcac chat models (RLHF, DPO, IFT, ...)",
|
12 |
"job_id": "10102386",
|
allknowingroger/Llama-3.1-Nemotron-70B-Instruct-HF-F32_eval_request_False_float16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 70.554,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T13:32:43Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080219",
|
|
|
6 |
"params": 70.554,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T13:32:43Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080219",
|
allknowingroger/Ministral-8B-slerp_eval_request_False_float16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 7.248,
|
7 |
"architectures": "MistralForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T13:33:12Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080235",
|
|
|
6 |
"params": 7.248,
|
7 |
"architectures": "MistralForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T13:33:12Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080235",
|
allknowingroger/Qwen2.5pass-50B_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 52.268,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T13:29:34Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080214",
|
|
|
6 |
"params": 52.268,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T13:29:34Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080214",
|
allknowingroger/Rombos-LLM-V2.5-Qwen-42b_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 42.516,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T13:27:56Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080231",
|
|
|
6 |
"params": 42.516,
|
7 |
"architectures": "Qwen2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T13:27:56Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080231",
|
allknowingroger/Yi-1.5-34B_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 34.389,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-21T13:26:44Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080229",
|
|
|
6 |
"params": 34.389,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-21T13:26:44Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10080229",
|
anthracite-org/magnum-v4-12b_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 12.248,
|
7 |
"architectures": "MistralForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-23T02:08:52Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10102264",
|
|
|
6 |
"params": 12.248,
|
7 |
"architectures": "MistralForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-23T02:08:52Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10102264",
|
anthracite-org/magnum-v4-27b_eval_request_False_bfloat16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 27.227,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-23T02:08:19Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10103517",
|
|
|
6 |
"params": 27.227,
|
7 |
"architectures": "Gemma2ForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-23T02:08:19Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10103517",
|
bunnycore/Llama-3.2-3B-Mix-Skill_eval_request_False_float16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 3.607,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-24T10:16:45Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10145110",
|
|
|
6 |
"params": 3.607,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-24T10:16:45Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10145110",
|
rwitz/cat0.1_eval_request_False_4bit_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 1.848,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-23T05:23:42Z",
|
11 |
"model_type": "\ud83d\udfe9 : \ud83d\udfe9 continuously pretrained",
|
12 |
"job_id": "10104221",
|
|
|
6 |
"params": 1.848,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-23T05:23:42Z",
|
11 |
"model_type": "\ud83d\udfe9 : \ud83d\udfe9 continuously pretrained",
|
12 |
"job_id": "10104221",
|
shastraai/Shastra-Mistral-Commonsense-SFT_eval_request_False_bfloat16_Adapter.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 2.398,
|
7 |
"architectures": "?",
|
8 |
"weight_type": "Adapter",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-27T20:15:12Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10321722",
|
|
|
6 |
"params": 2.398,
|
7 |
"architectures": "?",
|
8 |
"weight_type": "Adapter",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-27T20:15:12Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10321722",
|
shastraai/Shastra-Mistral-Math-SFT_eval_request_False_bfloat16_Adapter.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 2.398,
|
7 |
"architectures": "?",
|
8 |
"weight_type": "Adapter",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-27T20:14:14Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10321719",
|
|
|
6 |
"params": 2.398,
|
7 |
"architectures": "?",
|
8 |
"weight_type": "Adapter",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-27T20:14:14Z",
|
11 |
"model_type": "\ud83d\udd36 : \ud83d\udd36 fine-tuned on domain-specific datasets",
|
12 |
"job_id": "10321719",
|
sophosympatheia/Midnight-Miqu-70B-v1.5_eval_request_False_float16_Original.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"params": 68.977,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
-
"status": "
|
10 |
"submitted_time": "2024-10-22T17:46:21Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10103783",
|
|
|
6 |
"params": 68.977,
|
7 |
"architectures": "LlamaForCausalLM",
|
8 |
"weight_type": "Original",
|
9 |
+
"status": "PENDING",
|
10 |
"submitted_time": "2024-10-22T17:46:21Z",
|
11 |
"model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges",
|
12 |
"job_id": "10103783",
|