Upload 29 files
Browse files- config.json +38 -0
- generation_config.json +7 -0
- inputs_stats.pth +3 -0
- key_stats.pth +3 -0
- outputs_stats.pth +3 -0
- pytorch_model-00001-of-00019.bin +3 -0
- pytorch_model-00002-of-00019.bin +3 -0
- pytorch_model-00003-of-00019.bin +3 -0
- pytorch_model-00004-of-00019.bin +3 -0
- pytorch_model-00005-of-00019.bin +3 -0
- pytorch_model-00006-of-00019.bin +3 -0
- pytorch_model-00007-of-00019.bin +3 -0
- pytorch_model-00008-of-00019.bin +3 -0
- pytorch_model-00009-of-00019.bin +3 -0
- pytorch_model-00010-of-00019.bin +3 -0
- pytorch_model-00011-of-00019.bin +3 -0
- pytorch_model-00012-of-00019.bin +3 -0
- pytorch_model-00013-of-00019.bin +3 -0
- pytorch_model-00014-of-00019.bin +3 -0
- pytorch_model-00015-of-00019.bin +3 -0
- pytorch_model-00016-of-00019.bin +3 -0
- pytorch_model-00017-of-00019.bin +3 -0
- pytorch_model-00018-of-00019.bin +3 -0
- pytorch_model-00019-of-00019.bin +3 -0
- pytorch_model.bin.index.json +0 -0
- special_tokens_map.json +30 -0
- tokenizer.model +3 -0
- tokenizer_config.json +43 -0
- value_stats.pth +3 -0
config.json
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/root/.cache/huggingface/hub/models--sophosympatheia--Midnight-Miqu-70B-v1.5/snapshots/f6062ca8ccba38ce91eef16f85138e279160b9b9",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"fp16": true,
|
11 |
+
"hidden_act": "silu",
|
12 |
+
"hidden_size": 8192,
|
13 |
+
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 28672,
|
15 |
+
"max_position_embeddings": 32764,
|
16 |
+
"mlp_bias": false,
|
17 |
+
"model_type": "llama",
|
18 |
+
"num_attention_heads": 64,
|
19 |
+
"num_hidden_layers": 80,
|
20 |
+
"num_key_value_heads": 8,
|
21 |
+
"pad_token_id": 0,
|
22 |
+
"pretraining_tp": 1,
|
23 |
+
"quantization_config": {
|
24 |
+
"bits": 4,
|
25 |
+
"group_size": 128,
|
26 |
+
"quant_method": "awq",
|
27 |
+
"version": "gemm",
|
28 |
+
"zero_point": true
|
29 |
+
},
|
30 |
+
"rms_norm_eps": 1e-05,
|
31 |
+
"rope_scaling": null,
|
32 |
+
"rope_theta": 1000000,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"torch_dtype": "float16",
|
35 |
+
"transformers_version": "4.41.1",
|
36 |
+
"use_cache": false,
|
37 |
+
"vocab_size": 32000
|
38 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.41.1"
|
7 |
+
}
|
inputs_stats.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:852451bc6f6377c791e98a4465a7d119a996c62cfb27fd20fe274880414fc64c
|
3 |
+
size 63049502
|
key_stats.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69a4ea0d7b376f1e419bb6e535b867eab95feff38ffade539246762988e30a6e
|
3 |
+
size 558714
|
outputs_stats.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca8da7acc4dbd188ad24678d826faa06cf15c554b82390303af11ca3e00f0142
|
3 |
+
size 81386059
|
pytorch_model-00001-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22b6f1759784922c373cfcc792c01c777ebc9fc3f2fac04dee877d97ff48498c
|
3 |
+
size 1936452910
|
pytorch_model-00002-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c171180a658898b49eef44c9ad4e40f8d1e726a342475fad07e57cb70df9b7f
|
3 |
+
size 1900314136
|
pytorch_model-00003-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a549cb5fc697dd819af4791a1285a5b19cf9b5856b2e49711005ed291c82f9f7
|
3 |
+
size 1900314200
|
pytorch_model-00004-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:58f01bde502d7d3181fa8eb0b25ace5ac287fdb66909d2528ed5267f098acf6d
|
3 |
+
size 1978798365
|
pytorch_model-00005-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6ee5815ee89052e7a61491ffff62c61ece3ed32505b7e770057bf2f1bb46a72
|
3 |
+
size 1900314264
|
pytorch_model-00006-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66934ccb23add7544a74857dfc7f78519d452c3503b7ae37b028ce92ee7da95a
|
3 |
+
size 1900314264
|
pytorch_model-00007-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f00bbb74b34d5e88f9f3ccf3485d0488a9bf3ad9641276091e151fbc0e796b2b
|
3 |
+
size 1978798365
|
pytorch_model-00008-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6a74e248e5c5196c06ad49ac5132904684fd2c738ba42d77daed3543b0850abb
|
3 |
+
size 1900314264
|
pytorch_model-00009-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3f9f1c6d3fb4d49fb23c85dcf1d91cb9ca15e163caeaba03f30e5a121882be00
|
3 |
+
size 1900314264
|
pytorch_model-00010-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e198b8fd393160191ec8df4d4ce2ae0e521adcefbd90caa10217d62dce5eee3
|
3 |
+
size 1978798365
|
pytorch_model-00011-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:74eaf11c929e721feba6c410cef5818aca7eeb4716f6789078d3f6354feb2190
|
3 |
+
size 1900314264
|
pytorch_model-00012-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:94115e757ea86b1b46442ca8fd1b4d11895312c24f930e78e19898fe19b472eb
|
3 |
+
size 1900314264
|
pytorch_model-00013-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:84d6d756ed55f7a62cfbaf7576abe2cae9ff97b19c343473e32f4b791ce6a27f
|
3 |
+
size 1978798365
|
pytorch_model-00014-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aac29a7e8234656c3f1e7b2f2d78c32f2a71b842034ae23f1f469be761c11de2
|
3 |
+
size 1900314264
|
pytorch_model-00015-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0bd5a719fd9a3978b0d8746ca9a61b1b6f50be68482e73cc9ac73ef2d7ff5ba7
|
3 |
+
size 1900314264
|
pytorch_model-00016-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27fbc1bb0b5aeafc01ef4b7f853fd1242eb7b09180e9effcfb13b1a3799aa84b
|
3 |
+
size 1978798365
|
pytorch_model-00017-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb9df7e27eebf220dff0db1d906fe59e0cf10898d2de76ebe2f420f30e0cf3dc
|
3 |
+
size 1900314264
|
pytorch_model-00018-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a9da363eec9851c19058e56cecd764b4637740242b9b2eb594c0d8266e3e739
|
3 |
+
size 1900314264
|
pytorch_model-00019-of-00019.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5405543480ca08d1ee461c726a1a61bcff61fed656918b16eedeeecce0c71683
|
3 |
+
size 1980081538
|
pytorch_model.bin.index.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
special_tokens_map.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": true,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": true,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"unk_token": {
|
24 |
+
"content": "<unk>",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": true,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
}
|
30 |
+
}
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
tokenizer_config.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": true,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": true,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": true,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": true,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
}
|
30 |
+
},
|
31 |
+
"bos_token": "<s>",
|
32 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
33 |
+
"clean_up_tokenization_spaces": false,
|
34 |
+
"eos_token": "</s>",
|
35 |
+
"legacy": false,
|
36 |
+
"model_max_length": 1000000000000000019884624838656,
|
37 |
+
"pad_token": "<unk>",
|
38 |
+
"sp_model_kwargs": {},
|
39 |
+
"spaces_between_special_tokens": false,
|
40 |
+
"tokenizer_class": "LlamaTokenizer",
|
41 |
+
"unk_token": "<unk>",
|
42 |
+
"use_default_system_prompt": false
|
43 |
+
}
|
value_stats.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00f23575e106e20d3354932837de999990cab9b43a05069c32eb4f8331d8f6d8
|
3 |
+
size 559202
|