Rename inference-cache-config/llama3.json to inference-cache-config/llama3-8b.json
Browse files
inference-cache-config/{llama3.json → llama3-8b.json}
RENAMED
@@ -42,19 +42,5 @@
|
|
42 |
"num_cores": 8,
|
43 |
"auto_cast_type": "fp16"
|
44 |
}
|
45 |
-
],
|
46 |
-
"meta-llama/Meta-Llama-3-70B": [
|
47 |
-
{
|
48 |
-
"batch_size": 1,
|
49 |
-
"sequence_length": 4096,
|
50 |
-
"num_cores": 24,
|
51 |
-
"auto_cast_type": "fp16"
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"batch_size": 4,
|
55 |
-
"sequence_length": 4096,
|
56 |
-
"num_cores": 24,
|
57 |
-
"auto_cast_type": "fp16"
|
58 |
-
}
|
59 |
]
|
60 |
}
|
|
|
42 |
"num_cores": 8,
|
43 |
"auto_cast_type": "fp16"
|
44 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
]
|
46 |
}
|