ThomasTheMaker commited on
Commit
94ef616
·
verified ·
1 Parent(s): a956dae

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +42 -0
  2. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  3. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  4. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  5. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
  6. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json +34 -0
  7. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json +0 -0
  8. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json +155 -0
  9. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json +0 -0
  10. unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform +3 -0
  11. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  12. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  13. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  14. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
  15. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json +34 -0
  16. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json +0 -0
  17. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json +155 -0
  18. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json +0 -0
  19. unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform +3 -0
  20. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  21. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  22. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  23. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
  24. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json +34 -0
  25. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json +0 -0
  26. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json +155 -0
  27. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json +0 -0
  28. unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform +3 -0
  29. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  30. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  31. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  32. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
  33. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json +34 -0
  34. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json +0 -0
  35. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json +155 -0
  36. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json +0 -0
  37. unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform +3 -0
  38. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  39. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  40. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  41. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
  42. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json +34 -0
  43. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json +0 -0
  44. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json +155 -0
  45. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json +0 -0
  46. unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform +3 -0
  47. unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json +31 -0
  48. unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json +8 -0
  49. unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt +0 -0
  50. unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors +3 -0
.gitattributes CHANGED
@@ -125,3 +125,45 @@ unsloth_SmolLM2-360M-Instruct_6_layers_2_8_Open-Orca_SlimOrca_8000_ReplaceMe_lst
125
  unsloth_SmolLM2-360M-Instruct_7_layers_3_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
126
  unsloth_SmolLM2-360M-Instruct_8_layers_2_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
127
  unsloth_SmolLM2-360M-Instruct_9_layers_4_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  unsloth_SmolLM2-360M-Instruct_7_layers_3_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
126
  unsloth_SmolLM2-360M-Instruct_8_layers_2_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
127
  unsloth_SmolLM2-360M-Instruct_9_layers_4_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
128
+ unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
129
+ unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
130
+ unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
131
+ unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
132
+ unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
133
+ unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
134
+ unsloth_SmolLM-360M-Instruct_16_layers_2_18_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
135
+ unsloth_SmolLM-360M-Instruct_17_layers_1_18_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
136
+ unsloth_SmolLM-360M-Instruct_18_layers_1_19_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
137
+ unsloth_SmolLM-360M-Instruct_19_layers_9_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
138
+ unsloth_SmolLM-360M-Instruct_1_layers_9_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
139
+ unsloth_SmolLM-360M-Instruct_20_layers_8_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
140
+ unsloth_SmolLM-360M-Instruct_21_layers_7_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
141
+ unsloth_SmolLM-360M-Instruct_22_layers_6_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
142
+ unsloth_SmolLM-360M-Instruct_23_layers_5_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
143
+ unsloth_SmolLM-360M-Instruct_24_layers_4_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
144
+ unsloth_SmolLM-360M-Instruct_25_layers_3_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
145
+ unsloth_SmolLM-360M-Instruct_26_layers_2_28_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
146
+ unsloth_SmolLM-360M-Instruct_27_layers_2_29_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
147
+ unsloth_SmolLM-360M-Instruct_28_layers_2_30_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
148
+ unsloth_SmolLM-360M-Instruct_29_layers_2_31_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
149
+ unsloth_SmolLM-360M-Instruct_2_layers_5_7_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
150
+ unsloth_SmolLM-360M-Instruct_30_layers_1_31_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
151
+ unsloth_SmolLM-360M-Instruct_31_layers_1_32_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
152
+ unsloth_SmolLM-360M-Instruct_3_layers_3_6_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
153
+ unsloth_SmolLM-360M-Instruct_4_layers_3_7_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
154
+ unsloth_SmolLM-360M-Instruct_5_layers_5_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
155
+ unsloth_SmolLM-360M-Instruct_6_layers_3_9_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
156
+ unsloth_SmolLM-360M-Instruct_7_layers_3_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
157
+ unsloth_SmolLM-360M-Instruct_8_layers_2_10_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
158
+ unsloth_SmolLM-360M-Instruct_9_layers_2_11_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
159
+ unsloth_SmolLM2-360M-Instruct_21_layers_1_22_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
160
+ unsloth_SmolLM2-360M-Instruct_22_layers_1_23_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
161
+ unsloth_SmolLM2-360M-Instruct_23_layers_2_25_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
162
+ unsloth_SmolLM2-360M-Instruct_24_layers_3_27_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
163
+ unsloth_SmolLM2-360M-Instruct_25_layers_2_27_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
164
+ unsloth_SmolLM2-360M-Instruct_26_layers_1_27_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
165
+ unsloth_SmolLM2-360M-Instruct_27_layers_3_30_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
166
+ unsloth_SmolLM2-360M-Instruct_28_layers_2_30_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
167
+ unsloth_SmolLM2-360M-Instruct_29_layers_1_30_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
168
+ unsloth_SmolLM2-360M-Instruct_30_layers_1_31_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
169
+ unsloth_SmolLM2-360M-Instruct_31_layers_1_32_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform filter=lfs diff=lfs merge=lfs -text
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 22,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a389da2dcf5bf4ee5b1abd5304df0f3d059c30fbaf5d23b582ff69cef7a7fcfa
3
+ size 527018344
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<empty_output>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "model_max_length": 2048,
150
+ "pad_token": "<empty_output>",
151
+ "padding_side": "left",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_10_layers_2_12_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a04b3aba15b660952fc92d969e9e5421317b218b75f4a54e28966963da78a902
3
+ size 7374868
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 21,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45f4990dd535c1e9eee65f17dbf2c6d19b15c1aa019ab35bbd6fb587b09d288f
3
+ size 507352688
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<empty_output>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "model_max_length": 2048,
150
+ "pad_token": "<empty_output>",
151
+ "padding_side": "left",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_11_layers_2_13_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e93e3ae3216a0fa4f221818726e3e86d21a5b9797bf7342fcfee144b2d89c57
3
+ size 7374868
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 20,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:836c20a3bbd5981cb6b4e7964495179a713a0fadea4c3e8c2fc2cfa19ada21d4
3
+ size 487687032
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<empty_output>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "model_max_length": 2048,
150
+ "pad_token": "<empty_output>",
151
+ "padding_side": "left",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_12_layers_2_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f76197d26f4fc9608aba471c4bd2978148038ddb623828c2dd5a697b11ed23f
3
+ size 7374868
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 19,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa9b5a3e742b2c67677ab31f9b6e5ce3c33cd33312aa2b0b439c1e5132c1c9ce
3
+ size 468021376
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<empty_output>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "model_max_length": 2048,
150
+ "pad_token": "<empty_output>",
151
+ "padding_side": "left",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_13_layers_1_14_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee2b2a9ecc40312f2510203d151ec3669d2ae590d766e3da0ad9e5ce596e8410
3
+ size 7374868
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 18,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8bcc837724276eab2913fdeb1fe1b2712f8f0dadfeada8cc4ca593f76325082f
3
+ size 448355712
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<empty_output>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "model_max_length": 2048,
150
+ "pad_token": "<empty_output>",
151
+ "padding_side": "left",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_14_layers_1_15_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1_transform ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a0d57d2abcaab6f938021b34ccb213f5c29c862bdaac8f84a85d57b24e9ca5e
3
+ size 7374868
unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "unsloth/SmolLM-360M-Instruct",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 960,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2560,
15
+ "max_position_embeddings": 2048,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 15,
19
+ "num_hidden_layers": 17,
20
+ "num_key_value_heads": 5,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "tie_word_embeddings": true,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.46.3",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "max_new_tokens": 40,
6
+ "pad_token_id": 2,
7
+ "transformers_version": "4.46.3"
8
+ }
unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
unsloth_SmolLM-360M-Instruct_15_layers_2_17_Open-Orca_SlimOrca_8000_ReplaceMe_lstsq_1/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfdbb4c3c5c13f30974c94b5ba621b7ea061f47d96e0858e63d1104df88ae2a6
3
+ size 428690056