Upload Qwen3ForCausalLM
Browse files- adapter_config.json +1 -1
- adapter_model.safetensors +2 -2
- generation_config.json +14 -0
adapter_config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
-
"base_model_name_or_path": "unsloth/qwen3-
|
5 |
"bias": "none",
|
6 |
"corda_config": null,
|
7 |
"eva_config": null,
|
|
|
1 |
{
|
2 |
"alpha_pattern": {},
|
3 |
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "unsloth/qwen3-1.7b-unsloth-bnb-4bit",
|
5 |
"bias": "none",
|
6 |
"corda_config": null,
|
7 |
"eva_config": null,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d31f5a062912c9f2df7dca9cb76e5b67c5529aa66af59547cf121bed4563d4a8
|
3 |
+
size 12859984
|
generation_config.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token_id": 151643,
|
3 |
+
"do_sample": true,
|
4 |
+
"eos_token_id": [
|
5 |
+
151645,
|
6 |
+
151643
|
7 |
+
],
|
8 |
+
"max_length": 40960,
|
9 |
+
"pad_token_id": 151654,
|
10 |
+
"temperature": 0.6,
|
11 |
+
"top_k": 20,
|
12 |
+
"top_p": 0.95,
|
13 |
+
"transformers_version": "4.52.4"
|
14 |
+
}
|