Upload 11 files
Browse files- .gitattributes +1 -0
- chat_template.jinja +397 -0
- config.json +75 -0
- generation_config.json +10 -0
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +855 -0
- special_tokens_map.json +23 -0
- tokenizer.json +3 -0
- tokenizer_config.json +183 -0
    	
        .gitattributes
    CHANGED
    
    | @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
|  | 
|  | |
| 33 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 34 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 35 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
| 36 | 
            +
            tokenizer.json filter=lfs diff=lfs merge=lfs -text
         | 
    	
        chat_template.jinja
    ADDED
    
    | @@ -0,0 +1,397 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {#-
         | 
| 2 | 
            +
              In addition to the normal inputs of `messages` and `tools`, this template also accepts the
         | 
| 3 | 
            +
              following kwargs:
         | 
| 4 | 
            +
              - "builtin_tools": A list, can contain "browser" and/or "python".
         | 
| 5 | 
            +
              - "model_identity": A string that optionally describes the model identity.
         | 
| 6 | 
            +
              - "reasoning_effort": A string that describes the reasoning effort, defaults to "medium".
         | 
| 7 | 
            +
             #}
         | 
| 8 | 
            +
             | 
| 9 | 
            +
            {#- Tool Definition Rendering ============================================== #}
         | 
| 10 | 
            +
            {%- macro render_typescript_type(param_spec, required_params, is_nullable=false) -%}
         | 
| 11 | 
            +
                {%- if param_spec.type == "array" -%}
         | 
| 12 | 
            +
                    {%- if param_spec['items'] -%}
         | 
| 13 | 
            +
                        {%- if param_spec['items']['type'] == "string" -%}
         | 
| 14 | 
            +
                            {{- "string[]" }}
         | 
| 15 | 
            +
                        {%- elif param_spec['items']['type'] == "number" -%}
         | 
| 16 | 
            +
                            {{- "number[]" }}
         | 
| 17 | 
            +
                        {%- elif param_spec['items']['type'] == "integer" -%}
         | 
| 18 | 
            +
                            {{- "number[]" }}
         | 
| 19 | 
            +
                        {%- elif param_spec['items']['type'] == "boolean" -%}
         | 
| 20 | 
            +
                            {{- "boolean[]" }}
         | 
| 21 | 
            +
                        {%- else -%}
         | 
| 22 | 
            +
                            {%- set inner_type = render_typescript_type(param_spec['items'], required_params) -%}
         | 
| 23 | 
            +
                            {%- if inner_type == "object | object" or inner_type|length > 50 -%}
         | 
| 24 | 
            +
                                {{- "any[]" }}
         | 
| 25 | 
            +
                            {%- else -%}
         | 
| 26 | 
            +
                                {{- inner_type + "[]" }}
         | 
| 27 | 
            +
                            {%- endif -%}
         | 
| 28 | 
            +
                        {%- endif -%}
         | 
| 29 | 
            +
                        {%- if param_spec.nullable -%}
         | 
| 30 | 
            +
                            {{- " | null" }}
         | 
| 31 | 
            +
                        {%- endif -%}
         | 
| 32 | 
            +
                    {%- else -%}
         | 
| 33 | 
            +
                        {{- "any[]" }}
         | 
| 34 | 
            +
                        {%- if param_spec.nullable -%}
         | 
| 35 | 
            +
                            {{- " | null" }}
         | 
| 36 | 
            +
                        {%- endif -%}
         | 
| 37 | 
            +
                    {%- endif -%}
         | 
| 38 | 
            +
                {%- elif param_spec.type is defined and param_spec.type is iterable and param_spec.type is not string and param_spec.type is not mapping and param_spec.type[0] is defined -%}
         | 
| 39 | 
            +
                    {#- Handle array of types like ["object", "object"] from Union[dict, list] #}
         | 
| 40 | 
            +
                    {%- if param_spec.type | length > 1 -%}
         | 
| 41 | 
            +
                        {{- param_spec.type | join(" | ") }}
         | 
| 42 | 
            +
                    {%- else -%}
         | 
| 43 | 
            +
                        {{- param_spec.type[0] }}
         | 
| 44 | 
            +
                    {%- endif -%}
         | 
| 45 | 
            +
                {%- elif param_spec.oneOf -%}
         | 
| 46 | 
            +
                    {#- Handle oneOf schemas - check for complex unions and fallback to any #}
         | 
| 47 | 
            +
                    {%- set has_object_variants = false -%}
         | 
| 48 | 
            +
                    {%- for variant in param_spec.oneOf -%}
         | 
| 49 | 
            +
                        {%- if variant.type == "object" -%}
         | 
| 50 | 
            +
                            {%- set has_object_variants = true -%}
         | 
| 51 | 
            +
                        {%- endif -%}
         | 
| 52 | 
            +
                    {%- endfor -%}
         | 
| 53 | 
            +
                    {%- if has_object_variants and param_spec.oneOf|length > 1 -%}
         | 
| 54 | 
            +
                        {{- "any" }}
         | 
| 55 | 
            +
                    {%- else -%}
         | 
| 56 | 
            +
                        {%- for variant in param_spec.oneOf -%}
         | 
| 57 | 
            +
                            {{- render_typescript_type(variant, required_params) -}}
         | 
| 58 | 
            +
                            {%- if variant.description %}
         | 
| 59 | 
            +
                                {{- "// " + variant.description }}
         | 
| 60 | 
            +
                            {%- endif -%}
         | 
| 61 | 
            +
                            {%- if variant.default is defined %}
         | 
| 62 | 
            +
                                {{ "// default: " + variant.default|tojson }}
         | 
| 63 | 
            +
                            {%- endif -%}
         | 
| 64 | 
            +
                            {%- if not loop.last %}
         | 
| 65 | 
            +
                                {{- " | " }}
         | 
| 66 | 
            +
                            {% endif -%}
         | 
| 67 | 
            +
                        {%- endfor -%}
         | 
| 68 | 
            +
                    {%- endif -%}
         | 
| 69 | 
            +
                {%- elif param_spec.type == "string" -%}
         | 
| 70 | 
            +
                    {%- if param_spec.enum -%}
         | 
| 71 | 
            +
                        {{- '"' + param_spec.enum|join('" | "') + '"' -}}
         | 
| 72 | 
            +
                    {%- else -%}
         | 
| 73 | 
            +
                        {{- "string" }}
         | 
| 74 | 
            +
                        {%- if param_spec.nullable %}
         | 
| 75 | 
            +
                            {{- " | null" }}
         | 
| 76 | 
            +
                        {%- endif -%}
         | 
| 77 | 
            +
                    {%- endif -%}
         | 
| 78 | 
            +
                {%- elif param_spec.type == "number" -%}
         | 
| 79 | 
            +
                    {{- "number" }}
         | 
| 80 | 
            +
                {%- elif param_spec.type == "integer" -%}
         | 
| 81 | 
            +
                    {{- "number" }}
         | 
| 82 | 
            +
                {%- elif param_spec.type == "boolean" -%}
         | 
| 83 | 
            +
                    {{- "boolean" }}
         | 
| 84 | 
            +
             | 
| 85 | 
            +
                {%- elif param_spec.type == "object" -%}
         | 
| 86 | 
            +
                    {%- if param_spec.properties -%}
         | 
| 87 | 
            +
                        {{- "{
         | 
| 88 | 
            +
            " }}
         | 
| 89 | 
            +
                        {%- for prop_name, prop_spec in param_spec.properties.items() -%}
         | 
| 90 | 
            +
                            {{- prop_name -}}
         | 
| 91 | 
            +
                            {%- if prop_name not in (param_spec.required or []) -%}
         | 
| 92 | 
            +
                                {{- "?" }}
         | 
| 93 | 
            +
                            {%- endif -%}
         | 
| 94 | 
            +
                            {{- ": " }}
         | 
| 95 | 
            +
                            {{ render_typescript_type(prop_spec, param_spec.required or []) }}
         | 
| 96 | 
            +
                            {%- if not loop.last -%}
         | 
| 97 | 
            +
                                {{-", " }}
         | 
| 98 | 
            +
                            {%- endif -%}
         | 
| 99 | 
            +
                        {%- endfor -%}
         | 
| 100 | 
            +
                        {{- "}" }}
         | 
| 101 | 
            +
                    {%- else -%}
         | 
| 102 | 
            +
                        {{- "object" }}
         | 
| 103 | 
            +
                    {%- endif -%}
         | 
| 104 | 
            +
                {%- else -%}
         | 
| 105 | 
            +
                    {{- "any" }}
         | 
| 106 | 
            +
                {%- endif -%}
         | 
| 107 | 
            +
            {%- endmacro -%}
         | 
| 108 | 
            +
             | 
| 109 | 
            +
            {%- macro render_tool_namespace(namespace_name, tools) -%}
         | 
| 110 | 
            +
                {{- "## " + namespace_name + "
         | 
| 111 | 
            +
             | 
| 112 | 
            +
            " }}
         | 
| 113 | 
            +
                {{- "namespace " + namespace_name + " {
         | 
| 114 | 
            +
             | 
| 115 | 
            +
            " }}
         | 
| 116 | 
            +
                {%- for tool in tools %}
         | 
| 117 | 
            +
                    {%- set tool = tool.function %}
         | 
| 118 | 
            +
                    {{- "// " + tool.description + "
         | 
| 119 | 
            +
            " }}
         | 
| 120 | 
            +
                    {{- "type "+ tool.name + " = " }}
         | 
| 121 | 
            +
                    {%- if tool.parameters and tool.parameters.properties %}
         | 
| 122 | 
            +
                        {{- "(_: {
         | 
| 123 | 
            +
            " }}
         | 
| 124 | 
            +
                        {%- for param_name, param_spec in tool.parameters.properties.items() %}
         | 
| 125 | 
            +
                            {%- if param_spec.description %}
         | 
| 126 | 
            +
                                {{- "// " + param_spec.description + "
         | 
| 127 | 
            +
            " }}
         | 
| 128 | 
            +
                            {%- endif %}
         | 
| 129 | 
            +
                            {{- param_name }}
         | 
| 130 | 
            +
                            {%- if param_name not in (tool.parameters.required or []) -%}
         | 
| 131 | 
            +
                                {{- "?" }}
         | 
| 132 | 
            +
                            {%- endif -%}
         | 
| 133 | 
            +
                            {{- ": " }}
         | 
| 134 | 
            +
                            {{- render_typescript_type(param_spec, tool.parameters.required or []) }}
         | 
| 135 | 
            +
                            {%- if param_spec.default is defined -%}
         | 
| 136 | 
            +
                                {%- if param_spec.enum %}
         | 
| 137 | 
            +
                                    {{- ", // default: " + param_spec.default }}
         | 
| 138 | 
            +
                                {%- elif param_spec.oneOf %}
         | 
| 139 | 
            +
                                    {{- "// default: " + param_spec.default }}
         | 
| 140 | 
            +
                                {%- else %}
         | 
| 141 | 
            +
                                    {{- ", // default: " + param_spec.default|tojson }}
         | 
| 142 | 
            +
                                {%- endif -%}
         | 
| 143 | 
            +
                            {%- endif -%}
         | 
| 144 | 
            +
                            {%- if not loop.last %}
         | 
| 145 | 
            +
                                {{- ",
         | 
| 146 | 
            +
            " }}
         | 
| 147 | 
            +
                            {%- else %}
         | 
| 148 | 
            +
                                {{- "
         | 
| 149 | 
            +
            " }}
         | 
| 150 | 
            +
                            {%- endif -%}
         | 
| 151 | 
            +
                        {%- endfor %}
         | 
| 152 | 
            +
                        {{- "}) => any;
         | 
| 153 | 
            +
             | 
| 154 | 
            +
            " }}
         | 
| 155 | 
            +
                    {%- else -%}
         | 
| 156 | 
            +
                        {{- "() => any;
         | 
| 157 | 
            +
             | 
| 158 | 
            +
            " }}
         | 
| 159 | 
            +
                    {%- endif -%}
         | 
| 160 | 
            +
                {%- endfor %}
         | 
| 161 | 
            +
                {{- "} // namespace " + namespace_name }}
         | 
| 162 | 
            +
            {%- endmacro -%}
         | 
| 163 | 
            +
             | 
| 164 | 
            +
            {%- macro render_builtin_tools(browser_tool, python_tool) -%}
         | 
| 165 | 
            +
                {%- if browser_tool %}
         | 
| 166 | 
            +
                    {{- "## browser
         | 
| 167 | 
            +
             | 
| 168 | 
            +
            " }}
         | 
| 169 | 
            +
                    {{- "// Tool for browsing.
         | 
| 170 | 
            +
            " }}
         | 
| 171 | 
            +
                    {{- "// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.
         | 
| 172 | 
            +
            " }}
         | 
| 173 | 
            +
                    {{- "// Cite information from the tool using the following format:
         | 
| 174 | 
            +
            " }}
         | 
| 175 | 
            +
                    {{- "// `【{cursor}†L{line_start}(-L{line_end})?】`, for example: `【6†L9-L11】` or `【8†L3】`.
         | 
| 176 | 
            +
            " }}
         | 
| 177 | 
            +
                    {{- "// Do not quote more than 10 words directly from the tool output.
         | 
| 178 | 
            +
            " }}
         | 
| 179 | 
            +
                    {{- "// sources=web (default: web)
         | 
| 180 | 
            +
            " }}
         | 
| 181 | 
            +
                    {{- "namespace browser {
         | 
| 182 | 
            +
             | 
| 183 | 
            +
            " }}
         | 
| 184 | 
            +
                    {{- "// Searches for information related to `query` and displays `topn` results.
         | 
| 185 | 
            +
            " }}
         | 
| 186 | 
            +
                    {{- "type search = (_: {
         | 
| 187 | 
            +
            " }}
         | 
| 188 | 
            +
                    {{- "query: string,
         | 
| 189 | 
            +
            " }}
         | 
| 190 | 
            +
                    {{- "topn?: number, // default: 10
         | 
| 191 | 
            +
            " }}
         | 
| 192 | 
            +
                    {{- "source?: string,
         | 
| 193 | 
            +
            " }}
         | 
| 194 | 
            +
                    {{- "}) => any;
         | 
| 195 | 
            +
             | 
| 196 | 
            +
            " }}
         | 
| 197 | 
            +
                    {{- "// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.
         | 
| 198 | 
            +
            " }}
         | 
| 199 | 
            +
                    {{- "// Valid link ids are displayed with the formatting: `【{id}†.*】`.
         | 
| 200 | 
            +
            " }}
         | 
| 201 | 
            +
                    {{- "// If `cursor` is not provided, the most recent page is implied.
         | 
| 202 | 
            +
            " }}
         | 
| 203 | 
            +
                    {{- "// If `id` is a string, it is treated as a fully qualified URL associated with `source`.
         | 
| 204 | 
            +
            " }}
         | 
| 205 | 
            +
                    {{- "// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.
         | 
| 206 | 
            +
            " }}
         | 
| 207 | 
            +
                    {{- "// Use this function without `id` to scroll to a new location of an opened page.
         | 
| 208 | 
            +
            " }}
         | 
| 209 | 
            +
                    {{- "type open = (_: {
         | 
| 210 | 
            +
            " }}
         | 
| 211 | 
            +
                    {{- "id?: number | string, // default: -1
         | 
| 212 | 
            +
            " }}
         | 
| 213 | 
            +
                    {{- "cursor?: number, // default: -1
         | 
| 214 | 
            +
            " }}
         | 
| 215 | 
            +
                    {{- "loc?: number, // default: -1
         | 
| 216 | 
            +
            " }}
         | 
| 217 | 
            +
                    {{- "num_lines?: number, // default: -1
         | 
| 218 | 
            +
            " }}
         | 
| 219 | 
            +
                    {{- "view_source?: boolean, // default: false
         | 
| 220 | 
            +
            " }}
         | 
| 221 | 
            +
                    {{- "source?: string,
         | 
| 222 | 
            +
            " }}
         | 
| 223 | 
            +
                    {{- "}) => any;
         | 
| 224 | 
            +
             | 
| 225 | 
            +
            " }}
         | 
| 226 | 
            +
                    {{- "// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.
         | 
| 227 | 
            +
            " }}
         | 
| 228 | 
            +
                    {{- "type find = (_: {
         | 
| 229 | 
            +
            " }}
         | 
| 230 | 
            +
                    {{- "pattern: string,
         | 
| 231 | 
            +
            " }}
         | 
| 232 | 
            +
                    {{- "cursor?: number, // default: -1
         | 
| 233 | 
            +
            " }}
         | 
| 234 | 
            +
                    {{- "}) => any;
         | 
| 235 | 
            +
             | 
| 236 | 
            +
            " }}
         | 
| 237 | 
            +
                    {{- "} // namespace browser
         | 
| 238 | 
            +
             | 
| 239 | 
            +
            " }}
         | 
| 240 | 
            +
                {%- endif -%}
         | 
| 241 | 
            +
             | 
| 242 | 
            +
                {%- if python_tool %}
         | 
| 243 | 
            +
                    {{- "## python
         | 
| 244 | 
            +
             | 
| 245 | 
            +
            " }}
         | 
| 246 | 
            +
                    {{- "Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).
         | 
| 247 | 
            +
             | 
| 248 | 
            +
            " }}
         | 
| 249 | 
            +
                    {{- "When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.
         | 
| 250 | 
            +
             | 
| 251 | 
            +
            " }}
         | 
| 252 | 
            +
                {%- endif -%}
         | 
| 253 | 
            +
            {%- endmacro -%}
         | 
| 254 | 
            +
             | 
| 255 | 
            +
            {#- System Message Construction ============================================ #}
         | 
| 256 | 
            +
            {%- macro build_system_message() -%}
         | 
| 257 | 
            +
                {%- if model_identity is not defined %}
         | 
| 258 | 
            +
                    {%- set model_identity = "You are ChatGPT, a large language model trained by OpenAI." %}
         | 
| 259 | 
            +
                {%- endif %}
         | 
| 260 | 
            +
                {{- model_identity + "
         | 
| 261 | 
            +
            " }}
         | 
| 262 | 
            +
                {{- "Knowledge cutoff: 2024-06
         | 
| 263 | 
            +
            " }}
         | 
| 264 | 
            +
                {{- "Current date: " + strftime_now("%Y-%m-%d") + "
         | 
| 265 | 
            +
             | 
| 266 | 
            +
            " }}
         | 
| 267 | 
            +
                {%- if reasoning_effort is not defined %}
         | 
| 268 | 
            +
                    {%- set reasoning_effort = "medium" %}
         | 
| 269 | 
            +
                {%- endif %}
         | 
| 270 | 
            +
                {{- "Reasoning: " + reasoning_effort + "
         | 
| 271 | 
            +
             | 
| 272 | 
            +
            " }}
         | 
| 273 | 
            +
                {%- if builtin_tools %}
         | 
| 274 | 
            +
                    {{- "# Tools
         | 
| 275 | 
            +
             | 
| 276 | 
            +
            " }}
         | 
| 277 | 
            +
                    {%- set available_builtin_tools = namespace(browser=false, python=false) %}
         | 
| 278 | 
            +
                    {%- for tool in builtin_tools %}
         | 
| 279 | 
            +
                        {%- if tool == "browser" %}
         | 
| 280 | 
            +
                            {%- set available_builtin_tools.browser = true %}
         | 
| 281 | 
            +
                        {%- elif tool == "python" %}
         | 
| 282 | 
            +
                            {%- set available_builtin_tools.python = true %}
         | 
| 283 | 
            +
                        {%- endif %}
         | 
| 284 | 
            +
                    {%- endfor %}
         | 
| 285 | 
            +
                    {{- render_builtin_tools(available_builtin_tools.browser, available_builtin_tools.python) }}
         | 
| 286 | 
            +
                {%- endif -%}
         | 
| 287 | 
            +
                {{- "# Valid channels: analysis, commentary, final. Channel must be included for every message." }}
         | 
| 288 | 
            +
                {%- if tools -%}
         | 
| 289 | 
            +
                    {{- "
         | 
| 290 | 
            +
            Calls to these tools must go to the commentary channel: 'functions'." }}
         | 
| 291 | 
            +
                {%- endif -%}
         | 
| 292 | 
            +
            {%- endmacro -%}
         | 
| 293 | 
            +
             | 
| 294 | 
            +
            {#- Main Template Logic ================================================= #}
         | 
| 295 | 
            +
            {#- Set defaults #}
         | 
| 296 | 
            +
             | 
| 297 | 
            +
            {#- Render system message #}
         | 
| 298 | 
            +
            {{- "<|start|>system<|message|>" }}
         | 
| 299 | 
            +
            {{- build_system_message() }}
         | 
| 300 | 
            +
            {{- "<|end|>" }}
         | 
| 301 | 
            +
             | 
| 302 | 
            +
            {#- Extract developer message #}
         | 
| 303 | 
            +
            {%- if messages[0].role == "developer" or messages[0].role == "system" %}
         | 
| 304 | 
            +
                {%- set developer_message = messages[0].content %}
         | 
| 305 | 
            +
                {%- set loop_messages = messages[1:] %}
         | 
| 306 | 
            +
            {%- else %}
         | 
| 307 | 
            +
                {%- set developer_message = "" %}
         | 
| 308 | 
            +
                {%- set loop_messages = messages %}
         | 
| 309 | 
            +
            {%- endif %}
         | 
| 310 | 
            +
             | 
| 311 | 
            +
            {#- Render developer message #}
         | 
| 312 | 
            +
            {%- if developer_message or tools %}
         | 
| 313 | 
            +
                {{- "<|start|>developer<|message|>" }}
         | 
| 314 | 
            +
                {%- if developer_message %}
         | 
| 315 | 
            +
                    {{- "# Instructions
         | 
| 316 | 
            +
             | 
| 317 | 
            +
            " }}
         | 
| 318 | 
            +
                    {{- developer_message }}
         | 
| 319 | 
            +
                {%- endif %}
         | 
| 320 | 
            +
                {%- if tools -%}
         | 
| 321 | 
            +
                    {{- "
         | 
| 322 | 
            +
             | 
| 323 | 
            +
            " }}
         | 
| 324 | 
            +
                    {{- "# Tools
         | 
| 325 | 
            +
             | 
| 326 | 
            +
            " }}
         | 
| 327 | 
            +
                    {{- render_tool_namespace("functions", tools) }}
         | 
| 328 | 
            +
                {%- endif -%}
         | 
| 329 | 
            +
                {{- "<|end|>" }}
         | 
| 330 | 
            +
            {%- endif %}
         | 
| 331 | 
            +
             | 
| 332 | 
            +
            {#- Render messages #}
         | 
| 333 | 
            +
            {%- set last_tool_call = namespace(name=none) %}
         | 
| 334 | 
            +
            {%- for message in loop_messages -%}
         | 
| 335 | 
            +
                {#- At this point only assistant/user/tool messages should remain #}
         | 
| 336 | 
            +
                {%- if message.role == 'assistant' -%}
         | 
| 337 | 
            +
                    {#- Checks to ensure the messages are being passed in the format we expect #}
         | 
| 338 | 
            +
                    {%- if "content" in message %}
         | 
| 339 | 
            +
                        {%- if "<|channel|>analysis<|message|>" in message.content or "<|channel|>final<|message|>" in message.content %}
         | 
| 340 | 
            +
                            {{- raise_exception("You have passed a message containing <|channel|> tags in the content field. Instead of doing this, you should pass analysis messages (the string between '<|message|>' and '<|end|>') in the 'thinking' field, and final messages (the string between '<|message|>' and '<|end|>') in the 'content' field.") }}
         | 
| 341 | 
            +
                        {%- endif %}
         | 
| 342 | 
            +
                    {%- endif %}
         | 
| 343 | 
            +
                    {%- if "thinking" in message %}
         | 
| 344 | 
            +
                        {%- if "<|channel|>analysis<|message|>" in message.thinking or "<|channel|>final<|message|>" in message.thinking %}
         | 
| 345 | 
            +
                            {{- raise_exception("You have passed a message containing <|channel|> tags in the thinking field. Instead of doing this, you should pass analysis messages (the string between '<|message|>' and '<|end|>') in the 'thinking' field, and final messages (the string between '<|message|>' and '<|end|>') in the 'content' field.") }}
         | 
| 346 | 
            +
                        {%- endif %}
         | 
| 347 | 
            +
                    {%- endif %}
         | 
| 348 | 
            +
                    {%- if "tool_calls" in message %}
         | 
| 349 | 
            +
                        {#- We assume max 1 tool call per message, and so we infer the tool call name #}
         | 
| 350 | 
            +
                        {#- in "tool" messages from the most recent assistant tool call name #}
         | 
| 351 | 
            +
                        {%- set tool_call = message.tool_calls[0] %}
         | 
| 352 | 
            +
                        {%- if tool_call.function %}
         | 
| 353 | 
            +
                            {%- set tool_call = tool_call.function %}
         | 
| 354 | 
            +
                        {%- endif %}
         | 
| 355 | 
            +
                        {%- if message.content and message.thinking %}
         | 
| 356 | 
            +
                            {{- raise_exception("Cannot pass both content and thinking in an assistant message with tool calls! Put the analysis message in one or the other, but not both.") }}
         | 
| 357 | 
            +
                        {%- elif message.content %}
         | 
| 358 | 
            +
                            {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.content + "<|end|>" }}
         | 
| 359 | 
            +
                        {%- elif message.thinking %}
         | 
| 360 | 
            +
                            {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }}
         | 
| 361 | 
            +
                        {%- endif %}
         | 
| 362 | 
            +
                        {{- "<|start|>assistant to=" }}
         | 
| 363 | 
            +
                        {{- "functions." + tool_call.name + "<|channel|>commentary " }}
         | 
| 364 | 
            +
                        {{- (tool_call.content_type if tool_call.content_type is defined else "json") + "<|message|>" }}
         | 
| 365 | 
            +
                        {{- tool_call.arguments|tojson }}
         | 
| 366 | 
            +
                        {{- "<|call|>" }}
         | 
| 367 | 
            +
                        {%- set last_tool_call.name = tool_call.name %}
         | 
| 368 | 
            +
                    {%- elif loop.last and not add_generation_prompt %}
         | 
| 369 | 
            +
                        {#- Only render the CoT if the final turn is an assistant turn and add_generation_prompt is false #}
         | 
| 370 | 
            +
                        {#- This is a situation that should only occur in training, never in inference. #}
         | 
| 371 | 
            +
                        {%- if "thinking" in message %}
         | 
| 372 | 
            +
                            {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }}
         | 
| 373 | 
            +
                        {%- endif %}
         | 
| 374 | 
            +
                        {#- <|return|> indicates the end of generation, but <|end|> does not #}
         | 
| 375 | 
            +
                        {#- <|return|> should never be an input to the model, but we include it as the final token #}
         | 
| 376 | 
            +
                        {#- when training, so the model learns to emit it. #}
         | 
| 377 | 
            +
                        {{- "<|start|>assistant<|channel|>final<|message|>" + message.content + "<|return|>" }}
         | 
| 378 | 
            +
                    {%- else %}
         | 
| 379 | 
            +
                        {#- CoT is dropped during all previous turns, so we never render it for inference #}
         | 
| 380 | 
            +
                        {{- "<|start|>assistant<|channel|>final<|message|>" + message.content + "<|end|>" }}
         | 
| 381 | 
            +
                        {%- set last_tool_call.name = none %}
         | 
| 382 | 
            +
                    {%- endif %}
         | 
| 383 | 
            +
                {%- elif message.role == 'tool' -%}
         | 
| 384 | 
            +
                    {%- if last_tool_call.name is none %}
         | 
| 385 | 
            +
                        {{- raise_exception("Message has tool role, but there was no previous assistant message with a tool call!") }}
         | 
| 386 | 
            +
                    {%- endif %}
         | 
| 387 | 
            +
                    {{- "<|start|>functions." + last_tool_call.name }}
         | 
| 388 | 
            +
                    {{- " to=assistant<|channel|>commentary<|message|>" + message.content|tojson + "<|end|>" }}
         | 
| 389 | 
            +
                {%- elif message.role == 'user' -%}
         | 
| 390 | 
            +
                    {{- "<|start|>user<|message|>" + message.content + "<|end|>" }}
         | 
| 391 | 
            +
                {%- endif -%}
         | 
| 392 | 
            +
            {%- endfor -%}
         | 
| 393 | 
            +
             | 
| 394 | 
            +
            {#- Generation prompt #}
         | 
| 395 | 
            +
            {%- if add_generation_prompt -%}
         | 
| 396 | 
            +
            <|start|>assistant
         | 
| 397 | 
            +
            {%- endif -%}
         | 
    	
        config.json
    ADDED
    
    | @@ -0,0 +1,75 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
                "architectures": [
         | 
| 3 | 
            +
                    "GptOssForCausalLM"
         | 
| 4 | 
            +
                ],
         | 
| 5 | 
            +
                "attention_bias": true,
         | 
| 6 | 
            +
                "attention_dropout": 0.0,
         | 
| 7 | 
            +
                "eos_token_id": 200002,
         | 
| 8 | 
            +
                "experts_per_token": 4,
         | 
| 9 | 
            +
                "head_dim": 64,
         | 
| 10 | 
            +
                "hidden_act": "silu",
         | 
| 11 | 
            +
                "hidden_size": 2880,
         | 
| 12 | 
            +
                "initial_context_length": 4096,
         | 
| 13 | 
            +
                "initializer_range": 0.02,
         | 
| 14 | 
            +
                "intermediate_size": 2880,
         | 
| 15 | 
            +
                "layer_types": [
         | 
| 16 | 
            +
                    "sliding_attention",
         | 
| 17 | 
            +
                    "full_attention",
         | 
| 18 | 
            +
                    "sliding_attention",
         | 
| 19 | 
            +
                    "full_attention",
         | 
| 20 | 
            +
                    "sliding_attention",
         | 
| 21 | 
            +
                    "full_attention",
         | 
| 22 | 
            +
                    "sliding_attention",
         | 
| 23 | 
            +
                    "full_attention",
         | 
| 24 | 
            +
                    "sliding_attention",
         | 
| 25 | 
            +
                    "full_attention",
         | 
| 26 | 
            +
                    "sliding_attention",
         | 
| 27 | 
            +
                    "full_attention",
         | 
| 28 | 
            +
                    "sliding_attention",
         | 
| 29 | 
            +
                    "full_attention",
         | 
| 30 | 
            +
                    "sliding_attention",
         | 
| 31 | 
            +
                    "full_attention",
         | 
| 32 | 
            +
                    "sliding_attention",
         | 
| 33 | 
            +
                    "full_attention",
         | 
| 34 | 
            +
                    "sliding_attention",
         | 
| 35 | 
            +
                    "full_attention",
         | 
| 36 | 
            +
                    "sliding_attention",
         | 
| 37 | 
            +
                    "full_attention",
         | 
| 38 | 
            +
                    "sliding_attention",
         | 
| 39 | 
            +
                    "full_attention"
         | 
| 40 | 
            +
                ],
         | 
| 41 | 
            +
                "max_position_embeddings": 131072,
         | 
| 42 | 
            +
                "model_type": "gpt_oss",
         | 
| 43 | 
            +
                "num_attention_heads": 64,
         | 
| 44 | 
            +
                "num_experts_per_tok": 4,
         | 
| 45 | 
            +
                "num_hidden_layers": 24,
         | 
| 46 | 
            +
                "num_key_value_heads": 8,
         | 
| 47 | 
            +
                "num_local_experts": 32,
         | 
| 48 | 
            +
                "output_router_logits": false,
         | 
| 49 | 
            +
                "pad_token_id": 199999,
         | 
| 50 | 
            +
                "quantization": {
         | 
| 51 | 
            +
                    "group_size": 32,
         | 
| 52 | 
            +
                    "bits": 6
         | 
| 53 | 
            +
                },
         | 
| 54 | 
            +
                "quantization_config": {
         | 
| 55 | 
            +
                    "group_size": 32,
         | 
| 56 | 
            +
                    "bits": 6
         | 
| 57 | 
            +
                },
         | 
| 58 | 
            +
                "rms_norm_eps": 1e-05,
         | 
| 59 | 
            +
                "rope_scaling": {
         | 
| 60 | 
            +
                    "beta_fast": 32.0,
         | 
| 61 | 
            +
                    "beta_slow": 1.0,
         | 
| 62 | 
            +
                    "factor": 32.0,
         | 
| 63 | 
            +
                    "original_max_position_embeddings": 4096,
         | 
| 64 | 
            +
                    "rope_type": "yarn",
         | 
| 65 | 
            +
                    "truncate": false
         | 
| 66 | 
            +
                },
         | 
| 67 | 
            +
                "rope_theta": 150000,
         | 
| 68 | 
            +
                "router_aux_loss_coef": 0.9,
         | 
| 69 | 
            +
                "sliding_window": 128,
         | 
| 70 | 
            +
                "swiglu_limit": 7.0,
         | 
| 71 | 
            +
                "tie_word_embeddings": false,
         | 
| 72 | 
            +
                "transformers_version": "4.55.0.dev0",
         | 
| 73 | 
            +
                "use_cache": true,
         | 
| 74 | 
            +
                "vocab_size": 201088
         | 
| 75 | 
            +
            }
         | 
    	
        generation_config.json
    ADDED
    
    | @@ -0,0 +1,10 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token_id": 199998,
         | 
| 3 | 
            +
              "do_sample": true,
         | 
| 4 | 
            +
              "eos_token_id": [
         | 
| 5 | 
            +
                200002,
         | 
| 6 | 
            +
                199999
         | 
| 7 | 
            +
              ],
         | 
| 8 | 
            +
              "pad_token_id": 199999,
         | 
| 9 | 
            +
              "transformers_version": "4.55.0.dev0"
         | 
| 10 | 
            +
            }
         | 
    	
        model-00001-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:822a964e76ef1b8824b6e690dc73cdffaeedba4f360263ed17970db49e062a0a
         | 
| 3 | 
            +
            size 5318561603
         | 
    	
        model-00002-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:1b6abb7684ff812910b5be51ae0baa6de175202f33036427fec643617cc5de40
         | 
| 3 | 
            +
            size 5300097845
         | 
    	
        model-00003-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:58f0f29e717a793ef02dcb91018aa078bb85c0c7f3322abdb437584ce0aa4027
         | 
| 3 | 
            +
            size 5276762711
         | 
    	
        model-00004-of-00004.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:64347e3a2db5bdd7c95f83551d9337583788e7b866736dc4a622f099c79c954a
         | 
| 3 | 
            +
            size 2412929400
         | 
    	
        model.safetensors.index.json
    ADDED
    
    | @@ -0,0 +1,855 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
                "metadata": {
         | 
| 3 | 
            +
                    "total_size": 18308254848,
         | 
| 4 | 
            +
                    "total_parameters": 20914755648
         | 
| 5 | 
            +
                },
         | 
| 6 | 
            +
                "weight_map": {
         | 
| 7 | 
            +
                    "lm_head.biases": "model-00004-of-00004.safetensors",
         | 
| 8 | 
            +
                    "lm_head.scales": "model-00004-of-00004.safetensors",
         | 
| 9 | 
            +
                    "lm_head.weight": "model-00004-of-00004.safetensors",
         | 
| 10 | 
            +
                    "model.embed_tokens.biases": "model-00001-of-00004.safetensors",
         | 
| 11 | 
            +
                    "model.embed_tokens.scales": "model-00001-of-00004.safetensors",
         | 
| 12 | 
            +
                    "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
         | 
| 13 | 
            +
                    "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 14 | 
            +
                    "model.layers.0.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 15 | 
            +
                    "model.layers.0.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 16 | 
            +
                    "model.layers.0.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 17 | 
            +
                    "model.layers.0.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 18 | 
            +
                    "model.layers.0.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 19 | 
            +
                    "model.layers.0.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 20 | 
            +
                    "model.layers.0.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 21 | 
            +
                    "model.layers.0.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 22 | 
            +
                    "model.layers.0.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 23 | 
            +
                    "model.layers.0.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 24 | 
            +
                    "model.layers.0.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 25 | 
            +
                    "model.layers.0.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 26 | 
            +
                    "model.layers.0.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 27 | 
            +
                    "model.layers.0.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 28 | 
            +
                    "model.layers.0.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 29 | 
            +
                    "model.layers.0.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 30 | 
            +
                    "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 31 | 
            +
                    "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 32 | 
            +
                    "model.layers.0.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 33 | 
            +
                    "model.layers.0.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 34 | 
            +
                    "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 35 | 
            +
                    "model.layers.0.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 36 | 
            +
                    "model.layers.0.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 37 | 
            +
                    "model.layers.0.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 38 | 
            +
                    "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 39 | 
            +
                    "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 40 | 
            +
                    "model.layers.0.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 41 | 
            +
                    "model.layers.0.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 42 | 
            +
                    "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 43 | 
            +
                    "model.layers.0.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 44 | 
            +
                    "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 45 | 
            +
                    "model.layers.0.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 46 | 
            +
                    "model.layers.0.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 47 | 
            +
                    "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 48 | 
            +
                    "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 49 | 
            +
                    "model.layers.1.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 50 | 
            +
                    "model.layers.1.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 51 | 
            +
                    "model.layers.1.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 52 | 
            +
                    "model.layers.1.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 53 | 
            +
                    "model.layers.1.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 54 | 
            +
                    "model.layers.1.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 55 | 
            +
                    "model.layers.1.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 56 | 
            +
                    "model.layers.1.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 57 | 
            +
                    "model.layers.1.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 58 | 
            +
                    "model.layers.1.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 59 | 
            +
                    "model.layers.1.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 60 | 
            +
                    "model.layers.1.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 61 | 
            +
                    "model.layers.1.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 62 | 
            +
                    "model.layers.1.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 63 | 
            +
                    "model.layers.1.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 64 | 
            +
                    "model.layers.1.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 65 | 
            +
                    "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 66 | 
            +
                    "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 67 | 
            +
                    "model.layers.1.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 68 | 
            +
                    "model.layers.1.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 69 | 
            +
                    "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 70 | 
            +
                    "model.layers.1.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 71 | 
            +
                    "model.layers.1.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 72 | 
            +
                    "model.layers.1.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 73 | 
            +
                    "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 74 | 
            +
                    "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 75 | 
            +
                    "model.layers.1.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 76 | 
            +
                    "model.layers.1.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 77 | 
            +
                    "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 78 | 
            +
                    "model.layers.1.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 79 | 
            +
                    "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 80 | 
            +
                    "model.layers.1.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 81 | 
            +
                    "model.layers.1.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 82 | 
            +
                    "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 83 | 
            +
                    "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 84 | 
            +
                    "model.layers.10.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 85 | 
            +
                    "model.layers.10.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 86 | 
            +
                    "model.layers.10.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 87 | 
            +
                    "model.layers.10.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 88 | 
            +
                    "model.layers.10.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 89 | 
            +
                    "model.layers.10.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 90 | 
            +
                    "model.layers.10.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 91 | 
            +
                    "model.layers.10.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 92 | 
            +
                    "model.layers.10.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 93 | 
            +
                    "model.layers.10.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 94 | 
            +
                    "model.layers.10.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 95 | 
            +
                    "model.layers.10.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 96 | 
            +
                    "model.layers.10.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 97 | 
            +
                    "model.layers.10.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 98 | 
            +
                    "model.layers.10.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 99 | 
            +
                    "model.layers.10.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 100 | 
            +
                    "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 101 | 
            +
                    "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 102 | 
            +
                    "model.layers.10.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 103 | 
            +
                    "model.layers.10.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 104 | 
            +
                    "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 105 | 
            +
                    "model.layers.10.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 106 | 
            +
                    "model.layers.10.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 107 | 
            +
                    "model.layers.10.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 108 | 
            +
                    "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 109 | 
            +
                    "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 110 | 
            +
                    "model.layers.10.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 111 | 
            +
                    "model.layers.10.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 112 | 
            +
                    "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 113 | 
            +
                    "model.layers.10.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 114 | 
            +
                    "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 115 | 
            +
                    "model.layers.10.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 116 | 
            +
                    "model.layers.10.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 117 | 
            +
                    "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 118 | 
            +
                    "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 119 | 
            +
                    "model.layers.11.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 120 | 
            +
                    "model.layers.11.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 121 | 
            +
                    "model.layers.11.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 122 | 
            +
                    "model.layers.11.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 123 | 
            +
                    "model.layers.11.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 124 | 
            +
                    "model.layers.11.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 125 | 
            +
                    "model.layers.11.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 126 | 
            +
                    "model.layers.11.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 127 | 
            +
                    "model.layers.11.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 128 | 
            +
                    "model.layers.11.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 129 | 
            +
                    "model.layers.11.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 130 | 
            +
                    "model.layers.11.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 131 | 
            +
                    "model.layers.11.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 132 | 
            +
                    "model.layers.11.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 133 | 
            +
                    "model.layers.11.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 134 | 
            +
                    "model.layers.11.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 135 | 
            +
                    "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 136 | 
            +
                    "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 137 | 
            +
                    "model.layers.11.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 138 | 
            +
                    "model.layers.11.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 139 | 
            +
                    "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 140 | 
            +
                    "model.layers.11.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 141 | 
            +
                    "model.layers.11.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 142 | 
            +
                    "model.layers.11.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 143 | 
            +
                    "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 144 | 
            +
                    "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 145 | 
            +
                    "model.layers.11.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 146 | 
            +
                    "model.layers.11.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 147 | 
            +
                    "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 148 | 
            +
                    "model.layers.11.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 149 | 
            +
                    "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 150 | 
            +
                    "model.layers.11.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 151 | 
            +
                    "model.layers.11.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 152 | 
            +
                    "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 153 | 
            +
                    "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 154 | 
            +
                    "model.layers.12.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 155 | 
            +
                    "model.layers.12.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 156 | 
            +
                    "model.layers.12.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 157 | 
            +
                    "model.layers.12.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 158 | 
            +
                    "model.layers.12.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 159 | 
            +
                    "model.layers.12.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 160 | 
            +
                    "model.layers.12.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 161 | 
            +
                    "model.layers.12.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 162 | 
            +
                    "model.layers.12.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 163 | 
            +
                    "model.layers.12.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 164 | 
            +
                    "model.layers.12.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 165 | 
            +
                    "model.layers.12.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 166 | 
            +
                    "model.layers.12.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 167 | 
            +
                    "model.layers.12.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 168 | 
            +
                    "model.layers.12.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 169 | 
            +
                    "model.layers.12.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 170 | 
            +
                    "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 171 | 
            +
                    "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 172 | 
            +
                    "model.layers.12.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 173 | 
            +
                    "model.layers.12.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 174 | 
            +
                    "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 175 | 
            +
                    "model.layers.12.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 176 | 
            +
                    "model.layers.12.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 177 | 
            +
                    "model.layers.12.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 178 | 
            +
                    "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 179 | 
            +
                    "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 180 | 
            +
                    "model.layers.12.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 181 | 
            +
                    "model.layers.12.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 182 | 
            +
                    "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 183 | 
            +
                    "model.layers.12.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 184 | 
            +
                    "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 185 | 
            +
                    "model.layers.12.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 186 | 
            +
                    "model.layers.12.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 187 | 
            +
                    "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 188 | 
            +
                    "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 189 | 
            +
                    "model.layers.13.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 190 | 
            +
                    "model.layers.13.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 191 | 
            +
                    "model.layers.13.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 192 | 
            +
                    "model.layers.13.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 193 | 
            +
                    "model.layers.13.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 194 | 
            +
                    "model.layers.13.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 195 | 
            +
                    "model.layers.13.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 196 | 
            +
                    "model.layers.13.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 197 | 
            +
                    "model.layers.13.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 198 | 
            +
                    "model.layers.13.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 199 | 
            +
                    "model.layers.13.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 200 | 
            +
                    "model.layers.13.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 201 | 
            +
                    "model.layers.13.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 202 | 
            +
                    "model.layers.13.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 203 | 
            +
                    "model.layers.13.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 204 | 
            +
                    "model.layers.13.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 205 | 
            +
                    "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 206 | 
            +
                    "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 207 | 
            +
                    "model.layers.13.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 208 | 
            +
                    "model.layers.13.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 209 | 
            +
                    "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 210 | 
            +
                    "model.layers.13.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 211 | 
            +
                    "model.layers.13.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 212 | 
            +
                    "model.layers.13.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 213 | 
            +
                    "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 214 | 
            +
                    "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 215 | 
            +
                    "model.layers.13.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 216 | 
            +
                    "model.layers.13.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 217 | 
            +
                    "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 218 | 
            +
                    "model.layers.13.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 219 | 
            +
                    "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 220 | 
            +
                    "model.layers.13.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 221 | 
            +
                    "model.layers.13.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 222 | 
            +
                    "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 223 | 
            +
                    "model.layers.14.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 224 | 
            +
                    "model.layers.14.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 225 | 
            +
                    "model.layers.14.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 226 | 
            +
                    "model.layers.14.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 227 | 
            +
                    "model.layers.14.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 228 | 
            +
                    "model.layers.14.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 229 | 
            +
                    "model.layers.14.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 230 | 
            +
                    "model.layers.14.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 231 | 
            +
                    "model.layers.14.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 232 | 
            +
                    "model.layers.14.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 233 | 
            +
                    "model.layers.14.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 234 | 
            +
                    "model.layers.14.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 235 | 
            +
                    "model.layers.14.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 236 | 
            +
                    "model.layers.14.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 237 | 
            +
                    "model.layers.14.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 238 | 
            +
                    "model.layers.14.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 239 | 
            +
                    "model.layers.14.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 240 | 
            +
                    "model.layers.14.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 241 | 
            +
                    "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 242 | 
            +
                    "model.layers.14.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 243 | 
            +
                    "model.layers.14.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 244 | 
            +
                    "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 245 | 
            +
                    "model.layers.14.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 246 | 
            +
                    "model.layers.14.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 247 | 
            +
                    "model.layers.14.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 248 | 
            +
                    "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 249 | 
            +
                    "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 250 | 
            +
                    "model.layers.14.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 251 | 
            +
                    "model.layers.14.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 252 | 
            +
                    "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 253 | 
            +
                    "model.layers.14.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 254 | 
            +
                    "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 255 | 
            +
                    "model.layers.14.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 256 | 
            +
                    "model.layers.14.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 257 | 
            +
                    "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 258 | 
            +
                    "model.layers.15.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 259 | 
            +
                    "model.layers.15.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 260 | 
            +
                    "model.layers.15.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 261 | 
            +
                    "model.layers.15.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 262 | 
            +
                    "model.layers.15.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 263 | 
            +
                    "model.layers.15.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 264 | 
            +
                    "model.layers.15.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 265 | 
            +
                    "model.layers.15.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 266 | 
            +
                    "model.layers.15.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 267 | 
            +
                    "model.layers.15.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 268 | 
            +
                    "model.layers.15.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 269 | 
            +
                    "model.layers.15.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 270 | 
            +
                    "model.layers.15.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 271 | 
            +
                    "model.layers.15.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 272 | 
            +
                    "model.layers.15.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 273 | 
            +
                    "model.layers.15.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 274 | 
            +
                    "model.layers.15.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 275 | 
            +
                    "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 276 | 
            +
                    "model.layers.15.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 277 | 
            +
                    "model.layers.15.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 278 | 
            +
                    "model.layers.15.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 279 | 
            +
                    "model.layers.15.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 280 | 
            +
                    "model.layers.15.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 281 | 
            +
                    "model.layers.15.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 282 | 
            +
                    "model.layers.15.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 283 | 
            +
                    "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 284 | 
            +
                    "model.layers.15.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 285 | 
            +
                    "model.layers.15.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 286 | 
            +
                    "model.layers.15.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 287 | 
            +
                    "model.layers.15.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 288 | 
            +
                    "model.layers.15.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 289 | 
            +
                    "model.layers.15.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 290 | 
            +
                    "model.layers.15.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 291 | 
            +
                    "model.layers.15.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 292 | 
            +
                    "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 293 | 
            +
                    "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 294 | 
            +
                    "model.layers.16.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 295 | 
            +
                    "model.layers.16.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 296 | 
            +
                    "model.layers.16.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 297 | 
            +
                    "model.layers.16.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 298 | 
            +
                    "model.layers.16.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 299 | 
            +
                    "model.layers.16.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 300 | 
            +
                    "model.layers.16.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 301 | 
            +
                    "model.layers.16.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 302 | 
            +
                    "model.layers.16.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 303 | 
            +
                    "model.layers.16.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 304 | 
            +
                    "model.layers.16.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 305 | 
            +
                    "model.layers.16.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 306 | 
            +
                    "model.layers.16.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 307 | 
            +
                    "model.layers.16.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 308 | 
            +
                    "model.layers.16.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 309 | 
            +
                    "model.layers.16.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 310 | 
            +
                    "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 311 | 
            +
                    "model.layers.16.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 312 | 
            +
                    "model.layers.16.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 313 | 
            +
                    "model.layers.16.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 314 | 
            +
                    "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 315 | 
            +
                    "model.layers.16.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 316 | 
            +
                    "model.layers.16.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 317 | 
            +
                    "model.layers.16.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 318 | 
            +
                    "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 319 | 
            +
                    "model.layers.16.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 320 | 
            +
                    "model.layers.16.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 321 | 
            +
                    "model.layers.16.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 322 | 
            +
                    "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 323 | 
            +
                    "model.layers.16.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 324 | 
            +
                    "model.layers.16.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 325 | 
            +
                    "model.layers.16.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 326 | 
            +
                    "model.layers.16.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 327 | 
            +
                    "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 328 | 
            +
                    "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 329 | 
            +
                    "model.layers.17.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 330 | 
            +
                    "model.layers.17.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 331 | 
            +
                    "model.layers.17.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 332 | 
            +
                    "model.layers.17.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 333 | 
            +
                    "model.layers.17.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 334 | 
            +
                    "model.layers.17.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 335 | 
            +
                    "model.layers.17.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 336 | 
            +
                    "model.layers.17.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 337 | 
            +
                    "model.layers.17.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 338 | 
            +
                    "model.layers.17.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 339 | 
            +
                    "model.layers.17.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 340 | 
            +
                    "model.layers.17.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 341 | 
            +
                    "model.layers.17.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 342 | 
            +
                    "model.layers.17.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 343 | 
            +
                    "model.layers.17.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 344 | 
            +
                    "model.layers.17.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 345 | 
            +
                    "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 346 | 
            +
                    "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 347 | 
            +
                    "model.layers.17.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 348 | 
            +
                    "model.layers.17.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 349 | 
            +
                    "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 350 | 
            +
                    "model.layers.17.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 351 | 
            +
                    "model.layers.17.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 352 | 
            +
                    "model.layers.17.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 353 | 
            +
                    "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 354 | 
            +
                    "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 355 | 
            +
                    "model.layers.17.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 356 | 
            +
                    "model.layers.17.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 357 | 
            +
                    "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 358 | 
            +
                    "model.layers.17.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 359 | 
            +
                    "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 360 | 
            +
                    "model.layers.17.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 361 | 
            +
                    "model.layers.17.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 362 | 
            +
                    "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 363 | 
            +
                    "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 364 | 
            +
                    "model.layers.18.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 365 | 
            +
                    "model.layers.18.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 366 | 
            +
                    "model.layers.18.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 367 | 
            +
                    "model.layers.18.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 368 | 
            +
                    "model.layers.18.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 369 | 
            +
                    "model.layers.18.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 370 | 
            +
                    "model.layers.18.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 371 | 
            +
                    "model.layers.18.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 372 | 
            +
                    "model.layers.18.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 373 | 
            +
                    "model.layers.18.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 374 | 
            +
                    "model.layers.18.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 375 | 
            +
                    "model.layers.18.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 376 | 
            +
                    "model.layers.18.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 377 | 
            +
                    "model.layers.18.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 378 | 
            +
                    "model.layers.18.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 379 | 
            +
                    "model.layers.18.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 380 | 
            +
                    "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 381 | 
            +
                    "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 382 | 
            +
                    "model.layers.18.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 383 | 
            +
                    "model.layers.18.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 384 | 
            +
                    "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 385 | 
            +
                    "model.layers.18.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 386 | 
            +
                    "model.layers.18.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 387 | 
            +
                    "model.layers.18.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 388 | 
            +
                    "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 389 | 
            +
                    "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 390 | 
            +
                    "model.layers.18.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 391 | 
            +
                    "model.layers.18.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 392 | 
            +
                    "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 393 | 
            +
                    "model.layers.18.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 394 | 
            +
                    "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 395 | 
            +
                    "model.layers.18.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 396 | 
            +
                    "model.layers.18.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 397 | 
            +
                    "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 398 | 
            +
                    "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 399 | 
            +
                    "model.layers.19.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 400 | 
            +
                    "model.layers.19.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 401 | 
            +
                    "model.layers.19.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 402 | 
            +
                    "model.layers.19.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 403 | 
            +
                    "model.layers.19.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 404 | 
            +
                    "model.layers.19.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 405 | 
            +
                    "model.layers.19.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 406 | 
            +
                    "model.layers.19.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 407 | 
            +
                    "model.layers.19.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 408 | 
            +
                    "model.layers.19.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 409 | 
            +
                    "model.layers.19.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 410 | 
            +
                    "model.layers.19.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 411 | 
            +
                    "model.layers.19.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 412 | 
            +
                    "model.layers.19.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 413 | 
            +
                    "model.layers.19.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 414 | 
            +
                    "model.layers.19.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 415 | 
            +
                    "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 416 | 
            +
                    "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 417 | 
            +
                    "model.layers.19.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 418 | 
            +
                    "model.layers.19.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 419 | 
            +
                    "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 420 | 
            +
                    "model.layers.19.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 421 | 
            +
                    "model.layers.19.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 422 | 
            +
                    "model.layers.19.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 423 | 
            +
                    "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 424 | 
            +
                    "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 425 | 
            +
                    "model.layers.19.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 426 | 
            +
                    "model.layers.19.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 427 | 
            +
                    "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 428 | 
            +
                    "model.layers.19.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 429 | 
            +
                    "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 430 | 
            +
                    "model.layers.19.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 431 | 
            +
                    "model.layers.19.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 432 | 
            +
                    "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 433 | 
            +
                    "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 434 | 
            +
                    "model.layers.2.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 435 | 
            +
                    "model.layers.2.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 436 | 
            +
                    "model.layers.2.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 437 | 
            +
                    "model.layers.2.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 438 | 
            +
                    "model.layers.2.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 439 | 
            +
                    "model.layers.2.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 440 | 
            +
                    "model.layers.2.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 441 | 
            +
                    "model.layers.2.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 442 | 
            +
                    "model.layers.2.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 443 | 
            +
                    "model.layers.2.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 444 | 
            +
                    "model.layers.2.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 445 | 
            +
                    "model.layers.2.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 446 | 
            +
                    "model.layers.2.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 447 | 
            +
                    "model.layers.2.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 448 | 
            +
                    "model.layers.2.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 449 | 
            +
                    "model.layers.2.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 450 | 
            +
                    "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 451 | 
            +
                    "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 452 | 
            +
                    "model.layers.2.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 453 | 
            +
                    "model.layers.2.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 454 | 
            +
                    "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 455 | 
            +
                    "model.layers.2.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 456 | 
            +
                    "model.layers.2.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 457 | 
            +
                    "model.layers.2.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 458 | 
            +
                    "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 459 | 
            +
                    "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 460 | 
            +
                    "model.layers.2.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 461 | 
            +
                    "model.layers.2.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 462 | 
            +
                    "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 463 | 
            +
                    "model.layers.2.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 464 | 
            +
                    "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 465 | 
            +
                    "model.layers.2.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 466 | 
            +
                    "model.layers.2.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 467 | 
            +
                    "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 468 | 
            +
                    "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 469 | 
            +
                    "model.layers.20.mlp.experts.down_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 470 | 
            +
                    "model.layers.20.mlp.experts.down_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 471 | 
            +
                    "model.layers.20.mlp.experts.down_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 472 | 
            +
                    "model.layers.20.mlp.experts.down_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 473 | 
            +
                    "model.layers.20.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 474 | 
            +
                    "model.layers.20.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 475 | 
            +
                    "model.layers.20.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 476 | 
            +
                    "model.layers.20.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 477 | 
            +
                    "model.layers.20.mlp.experts.up_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 478 | 
            +
                    "model.layers.20.mlp.experts.up_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 479 | 
            +
                    "model.layers.20.mlp.experts.up_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 480 | 
            +
                    "model.layers.20.mlp.experts.up_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 481 | 
            +
                    "model.layers.20.mlp.router.bias": "model-00003-of-00004.safetensors",
         | 
| 482 | 
            +
                    "model.layers.20.mlp.router.biases": "model-00003-of-00004.safetensors",
         | 
| 483 | 
            +
                    "model.layers.20.mlp.router.scales": "model-00003-of-00004.safetensors",
         | 
| 484 | 
            +
                    "model.layers.20.mlp.router.weight": "model-00003-of-00004.safetensors",
         | 
| 485 | 
            +
                    "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
         | 
| 486 | 
            +
                    "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 487 | 
            +
                    "model.layers.20.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 488 | 
            +
                    "model.layers.20.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 489 | 
            +
                    "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 490 | 
            +
                    "model.layers.20.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 491 | 
            +
                    "model.layers.20.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 492 | 
            +
                    "model.layers.20.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 493 | 
            +
                    "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 494 | 
            +
                    "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 495 | 
            +
                    "model.layers.20.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 496 | 
            +
                    "model.layers.20.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 497 | 
            +
                    "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 498 | 
            +
                    "model.layers.20.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 499 | 
            +
                    "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 500 | 
            +
                    "model.layers.20.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 501 | 
            +
                    "model.layers.20.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 502 | 
            +
                    "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 503 | 
            +
                    "model.layers.21.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 504 | 
            +
                    "model.layers.21.mlp.experts.down_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 505 | 
            +
                    "model.layers.21.mlp.experts.down_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 506 | 
            +
                    "model.layers.21.mlp.experts.down_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 507 | 
            +
                    "model.layers.21.mlp.experts.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 508 | 
            +
                    "model.layers.21.mlp.experts.gate_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 509 | 
            +
                    "model.layers.21.mlp.experts.gate_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 510 | 
            +
                    "model.layers.21.mlp.experts.gate_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 511 | 
            +
                    "model.layers.21.mlp.experts.gate_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 512 | 
            +
                    "model.layers.21.mlp.experts.up_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 513 | 
            +
                    "model.layers.21.mlp.experts.up_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 514 | 
            +
                    "model.layers.21.mlp.experts.up_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 515 | 
            +
                    "model.layers.21.mlp.experts.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 516 | 
            +
                    "model.layers.21.mlp.router.bias": "model-00004-of-00004.safetensors",
         | 
| 517 | 
            +
                    "model.layers.21.mlp.router.biases": "model-00004-of-00004.safetensors",
         | 
| 518 | 
            +
                    "model.layers.21.mlp.router.scales": "model-00004-of-00004.safetensors",
         | 
| 519 | 
            +
                    "model.layers.21.mlp.router.weight": "model-00004-of-00004.safetensors",
         | 
| 520 | 
            +
                    "model.layers.21.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 521 | 
            +
                    "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 522 | 
            +
                    "model.layers.21.self_attn.k_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 523 | 
            +
                    "model.layers.21.self_attn.k_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 524 | 
            +
                    "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 525 | 
            +
                    "model.layers.21.self_attn.o_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 526 | 
            +
                    "model.layers.21.self_attn.o_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 527 | 
            +
                    "model.layers.21.self_attn.o_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 528 | 
            +
                    "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 529 | 
            +
                    "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 530 | 
            +
                    "model.layers.21.self_attn.q_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 531 | 
            +
                    "model.layers.21.self_attn.q_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 532 | 
            +
                    "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 533 | 
            +
                    "model.layers.21.self_attn.sinks": "model-00003-of-00004.safetensors",
         | 
| 534 | 
            +
                    "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
         | 
| 535 | 
            +
                    "model.layers.21.self_attn.v_proj.biases": "model-00003-of-00004.safetensors",
         | 
| 536 | 
            +
                    "model.layers.21.self_attn.v_proj.scales": "model-00003-of-00004.safetensors",
         | 
| 537 | 
            +
                    "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
         | 
| 538 | 
            +
                    "model.layers.22.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 539 | 
            +
                    "model.layers.22.mlp.experts.down_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 540 | 
            +
                    "model.layers.22.mlp.experts.down_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 541 | 
            +
                    "model.layers.22.mlp.experts.down_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 542 | 
            +
                    "model.layers.22.mlp.experts.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 543 | 
            +
                    "model.layers.22.mlp.experts.gate_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 544 | 
            +
                    "model.layers.22.mlp.experts.gate_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 545 | 
            +
                    "model.layers.22.mlp.experts.gate_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 546 | 
            +
                    "model.layers.22.mlp.experts.gate_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 547 | 
            +
                    "model.layers.22.mlp.experts.up_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 548 | 
            +
                    "model.layers.22.mlp.experts.up_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 549 | 
            +
                    "model.layers.22.mlp.experts.up_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 550 | 
            +
                    "model.layers.22.mlp.experts.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 551 | 
            +
                    "model.layers.22.mlp.router.bias": "model-00004-of-00004.safetensors",
         | 
| 552 | 
            +
                    "model.layers.22.mlp.router.biases": "model-00004-of-00004.safetensors",
         | 
| 553 | 
            +
                    "model.layers.22.mlp.router.scales": "model-00004-of-00004.safetensors",
         | 
| 554 | 
            +
                    "model.layers.22.mlp.router.weight": "model-00004-of-00004.safetensors",
         | 
| 555 | 
            +
                    "model.layers.22.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 556 | 
            +
                    "model.layers.22.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 557 | 
            +
                    "model.layers.22.self_attn.k_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 558 | 
            +
                    "model.layers.22.self_attn.k_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 559 | 
            +
                    "model.layers.22.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 560 | 
            +
                    "model.layers.22.self_attn.o_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 561 | 
            +
                    "model.layers.22.self_attn.o_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 562 | 
            +
                    "model.layers.22.self_attn.o_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 563 | 
            +
                    "model.layers.22.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 564 | 
            +
                    "model.layers.22.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 565 | 
            +
                    "model.layers.22.self_attn.q_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 566 | 
            +
                    "model.layers.22.self_attn.q_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 567 | 
            +
                    "model.layers.22.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 568 | 
            +
                    "model.layers.22.self_attn.sinks": "model-00004-of-00004.safetensors",
         | 
| 569 | 
            +
                    "model.layers.22.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 570 | 
            +
                    "model.layers.22.self_attn.v_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 571 | 
            +
                    "model.layers.22.self_attn.v_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 572 | 
            +
                    "model.layers.22.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 573 | 
            +
                    "model.layers.23.input_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 574 | 
            +
                    "model.layers.23.mlp.experts.down_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 575 | 
            +
                    "model.layers.23.mlp.experts.down_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 576 | 
            +
                    "model.layers.23.mlp.experts.down_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 577 | 
            +
                    "model.layers.23.mlp.experts.down_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 578 | 
            +
                    "model.layers.23.mlp.experts.gate_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 579 | 
            +
                    "model.layers.23.mlp.experts.gate_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 580 | 
            +
                    "model.layers.23.mlp.experts.gate_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 581 | 
            +
                    "model.layers.23.mlp.experts.gate_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 582 | 
            +
                    "model.layers.23.mlp.experts.up_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 583 | 
            +
                    "model.layers.23.mlp.experts.up_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 584 | 
            +
                    "model.layers.23.mlp.experts.up_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 585 | 
            +
                    "model.layers.23.mlp.experts.up_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 586 | 
            +
                    "model.layers.23.mlp.router.bias": "model-00004-of-00004.safetensors",
         | 
| 587 | 
            +
                    "model.layers.23.mlp.router.biases": "model-00004-of-00004.safetensors",
         | 
| 588 | 
            +
                    "model.layers.23.mlp.router.scales": "model-00004-of-00004.safetensors",
         | 
| 589 | 
            +
                    "model.layers.23.mlp.router.weight": "model-00004-of-00004.safetensors",
         | 
| 590 | 
            +
                    "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
         | 
| 591 | 
            +
                    "model.layers.23.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 592 | 
            +
                    "model.layers.23.self_attn.k_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 593 | 
            +
                    "model.layers.23.self_attn.k_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 594 | 
            +
                    "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 595 | 
            +
                    "model.layers.23.self_attn.o_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 596 | 
            +
                    "model.layers.23.self_attn.o_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 597 | 
            +
                    "model.layers.23.self_attn.o_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 598 | 
            +
                    "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 599 | 
            +
                    "model.layers.23.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 600 | 
            +
                    "model.layers.23.self_attn.q_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 601 | 
            +
                    "model.layers.23.self_attn.q_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 602 | 
            +
                    "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 603 | 
            +
                    "model.layers.23.self_attn.sinks": "model-00004-of-00004.safetensors",
         | 
| 604 | 
            +
                    "model.layers.23.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         | 
| 605 | 
            +
                    "model.layers.23.self_attn.v_proj.biases": "model-00004-of-00004.safetensors",
         | 
| 606 | 
            +
                    "model.layers.23.self_attn.v_proj.scales": "model-00004-of-00004.safetensors",
         | 
| 607 | 
            +
                    "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         | 
| 608 | 
            +
                    "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 609 | 
            +
                    "model.layers.3.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 610 | 
            +
                    "model.layers.3.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 611 | 
            +
                    "model.layers.3.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 612 | 
            +
                    "model.layers.3.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 613 | 
            +
                    "model.layers.3.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 614 | 
            +
                    "model.layers.3.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 615 | 
            +
                    "model.layers.3.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 616 | 
            +
                    "model.layers.3.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 617 | 
            +
                    "model.layers.3.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 618 | 
            +
                    "model.layers.3.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 619 | 
            +
                    "model.layers.3.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 620 | 
            +
                    "model.layers.3.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 621 | 
            +
                    "model.layers.3.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 622 | 
            +
                    "model.layers.3.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 623 | 
            +
                    "model.layers.3.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 624 | 
            +
                    "model.layers.3.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 625 | 
            +
                    "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 626 | 
            +
                    "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 627 | 
            +
                    "model.layers.3.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 628 | 
            +
                    "model.layers.3.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 629 | 
            +
                    "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 630 | 
            +
                    "model.layers.3.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 631 | 
            +
                    "model.layers.3.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 632 | 
            +
                    "model.layers.3.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 633 | 
            +
                    "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 634 | 
            +
                    "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 635 | 
            +
                    "model.layers.3.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 636 | 
            +
                    "model.layers.3.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 637 | 
            +
                    "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 638 | 
            +
                    "model.layers.3.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 639 | 
            +
                    "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 640 | 
            +
                    "model.layers.3.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 641 | 
            +
                    "model.layers.3.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 642 | 
            +
                    "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 643 | 
            +
                    "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 644 | 
            +
                    "model.layers.4.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 645 | 
            +
                    "model.layers.4.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 646 | 
            +
                    "model.layers.4.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 647 | 
            +
                    "model.layers.4.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 648 | 
            +
                    "model.layers.4.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 649 | 
            +
                    "model.layers.4.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 650 | 
            +
                    "model.layers.4.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 651 | 
            +
                    "model.layers.4.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 652 | 
            +
                    "model.layers.4.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 653 | 
            +
                    "model.layers.4.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 654 | 
            +
                    "model.layers.4.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 655 | 
            +
                    "model.layers.4.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 656 | 
            +
                    "model.layers.4.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 657 | 
            +
                    "model.layers.4.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 658 | 
            +
                    "model.layers.4.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 659 | 
            +
                    "model.layers.4.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 660 | 
            +
                    "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 661 | 
            +
                    "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 662 | 
            +
                    "model.layers.4.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 663 | 
            +
                    "model.layers.4.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 664 | 
            +
                    "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 665 | 
            +
                    "model.layers.4.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 666 | 
            +
                    "model.layers.4.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 667 | 
            +
                    "model.layers.4.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 668 | 
            +
                    "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 669 | 
            +
                    "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 670 | 
            +
                    "model.layers.4.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 671 | 
            +
                    "model.layers.4.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 672 | 
            +
                    "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 673 | 
            +
                    "model.layers.4.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 674 | 
            +
                    "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 675 | 
            +
                    "model.layers.4.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 676 | 
            +
                    "model.layers.4.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 677 | 
            +
                    "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 678 | 
            +
                    "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 679 | 
            +
                    "model.layers.5.mlp.experts.down_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 680 | 
            +
                    "model.layers.5.mlp.experts.down_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 681 | 
            +
                    "model.layers.5.mlp.experts.down_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 682 | 
            +
                    "model.layers.5.mlp.experts.down_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 683 | 
            +
                    "model.layers.5.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 684 | 
            +
                    "model.layers.5.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 685 | 
            +
                    "model.layers.5.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 686 | 
            +
                    "model.layers.5.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 687 | 
            +
                    "model.layers.5.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 688 | 
            +
                    "model.layers.5.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 689 | 
            +
                    "model.layers.5.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 690 | 
            +
                    "model.layers.5.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 691 | 
            +
                    "model.layers.5.mlp.router.bias": "model-00001-of-00004.safetensors",
         | 
| 692 | 
            +
                    "model.layers.5.mlp.router.biases": "model-00001-of-00004.safetensors",
         | 
| 693 | 
            +
                    "model.layers.5.mlp.router.scales": "model-00001-of-00004.safetensors",
         | 
| 694 | 
            +
                    "model.layers.5.mlp.router.weight": "model-00001-of-00004.safetensors",
         | 
| 695 | 
            +
                    "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
         | 
| 696 | 
            +
                    "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 697 | 
            +
                    "model.layers.5.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 698 | 
            +
                    "model.layers.5.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 699 | 
            +
                    "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 700 | 
            +
                    "model.layers.5.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 701 | 
            +
                    "model.layers.5.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 702 | 
            +
                    "model.layers.5.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 703 | 
            +
                    "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 704 | 
            +
                    "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 705 | 
            +
                    "model.layers.5.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 706 | 
            +
                    "model.layers.5.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 707 | 
            +
                    "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 708 | 
            +
                    "model.layers.5.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 709 | 
            +
                    "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 710 | 
            +
                    "model.layers.5.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 711 | 
            +
                    "model.layers.5.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 712 | 
            +
                    "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 713 | 
            +
                    "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 714 | 
            +
                    "model.layers.6.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 715 | 
            +
                    "model.layers.6.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 716 | 
            +
                    "model.layers.6.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 717 | 
            +
                    "model.layers.6.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 718 | 
            +
                    "model.layers.6.mlp.experts.gate_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 719 | 
            +
                    "model.layers.6.mlp.experts.gate_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 720 | 
            +
                    "model.layers.6.mlp.experts.gate_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 721 | 
            +
                    "model.layers.6.mlp.experts.gate_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 722 | 
            +
                    "model.layers.6.mlp.experts.up_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 723 | 
            +
                    "model.layers.6.mlp.experts.up_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 724 | 
            +
                    "model.layers.6.mlp.experts.up_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 725 | 
            +
                    "model.layers.6.mlp.experts.up_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 726 | 
            +
                    "model.layers.6.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 727 | 
            +
                    "model.layers.6.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 728 | 
            +
                    "model.layers.6.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 729 | 
            +
                    "model.layers.6.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 730 | 
            +
                    "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 731 | 
            +
                    "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 732 | 
            +
                    "model.layers.6.self_attn.k_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 733 | 
            +
                    "model.layers.6.self_attn.k_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 734 | 
            +
                    "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 735 | 
            +
                    "model.layers.6.self_attn.o_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 736 | 
            +
                    "model.layers.6.self_attn.o_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 737 | 
            +
                    "model.layers.6.self_attn.o_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 738 | 
            +
                    "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 739 | 
            +
                    "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 740 | 
            +
                    "model.layers.6.self_attn.q_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 741 | 
            +
                    "model.layers.6.self_attn.q_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 742 | 
            +
                    "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 743 | 
            +
                    "model.layers.6.self_attn.sinks": "model-00001-of-00004.safetensors",
         | 
| 744 | 
            +
                    "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
         | 
| 745 | 
            +
                    "model.layers.6.self_attn.v_proj.biases": "model-00001-of-00004.safetensors",
         | 
| 746 | 
            +
                    "model.layers.6.self_attn.v_proj.scales": "model-00001-of-00004.safetensors",
         | 
| 747 | 
            +
                    "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
         | 
| 748 | 
            +
                    "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 749 | 
            +
                    "model.layers.7.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 750 | 
            +
                    "model.layers.7.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 751 | 
            +
                    "model.layers.7.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 752 | 
            +
                    "model.layers.7.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 753 | 
            +
                    "model.layers.7.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 754 | 
            +
                    "model.layers.7.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 755 | 
            +
                    "model.layers.7.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 756 | 
            +
                    "model.layers.7.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 757 | 
            +
                    "model.layers.7.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 758 | 
            +
                    "model.layers.7.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 759 | 
            +
                    "model.layers.7.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 760 | 
            +
                    "model.layers.7.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 761 | 
            +
                    "model.layers.7.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 762 | 
            +
                    "model.layers.7.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 763 | 
            +
                    "model.layers.7.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 764 | 
            +
                    "model.layers.7.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 765 | 
            +
                    "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 766 | 
            +
                    "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 767 | 
            +
                    "model.layers.7.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 768 | 
            +
                    "model.layers.7.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 769 | 
            +
                    "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 770 | 
            +
                    "model.layers.7.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 771 | 
            +
                    "model.layers.7.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 772 | 
            +
                    "model.layers.7.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 773 | 
            +
                    "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 774 | 
            +
                    "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 775 | 
            +
                    "model.layers.7.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 776 | 
            +
                    "model.layers.7.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 777 | 
            +
                    "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 778 | 
            +
                    "model.layers.7.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 779 | 
            +
                    "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 780 | 
            +
                    "model.layers.7.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 781 | 
            +
                    "model.layers.7.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 782 | 
            +
                    "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 783 | 
            +
                    "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 784 | 
            +
                    "model.layers.8.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 785 | 
            +
                    "model.layers.8.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 786 | 
            +
                    "model.layers.8.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 787 | 
            +
                    "model.layers.8.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 788 | 
            +
                    "model.layers.8.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 789 | 
            +
                    "model.layers.8.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 790 | 
            +
                    "model.layers.8.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 791 | 
            +
                    "model.layers.8.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 792 | 
            +
                    "model.layers.8.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 793 | 
            +
                    "model.layers.8.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 794 | 
            +
                    "model.layers.8.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 795 | 
            +
                    "model.layers.8.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 796 | 
            +
                    "model.layers.8.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 797 | 
            +
                    "model.layers.8.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 798 | 
            +
                    "model.layers.8.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 799 | 
            +
                    "model.layers.8.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 800 | 
            +
                    "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 801 | 
            +
                    "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 802 | 
            +
                    "model.layers.8.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 803 | 
            +
                    "model.layers.8.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 804 | 
            +
                    "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 805 | 
            +
                    "model.layers.8.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 806 | 
            +
                    "model.layers.8.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 807 | 
            +
                    "model.layers.8.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 808 | 
            +
                    "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 809 | 
            +
                    "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 810 | 
            +
                    "model.layers.8.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 811 | 
            +
                    "model.layers.8.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 812 | 
            +
                    "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 813 | 
            +
                    "model.layers.8.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 814 | 
            +
                    "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 815 | 
            +
                    "model.layers.8.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 816 | 
            +
                    "model.layers.8.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 817 | 
            +
                    "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 818 | 
            +
                    "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 819 | 
            +
                    "model.layers.9.mlp.experts.down_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 820 | 
            +
                    "model.layers.9.mlp.experts.down_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 821 | 
            +
                    "model.layers.9.mlp.experts.down_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 822 | 
            +
                    "model.layers.9.mlp.experts.down_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 823 | 
            +
                    "model.layers.9.mlp.experts.gate_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 824 | 
            +
                    "model.layers.9.mlp.experts.gate_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 825 | 
            +
                    "model.layers.9.mlp.experts.gate_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 826 | 
            +
                    "model.layers.9.mlp.experts.gate_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 827 | 
            +
                    "model.layers.9.mlp.experts.up_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 828 | 
            +
                    "model.layers.9.mlp.experts.up_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 829 | 
            +
                    "model.layers.9.mlp.experts.up_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 830 | 
            +
                    "model.layers.9.mlp.experts.up_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 831 | 
            +
                    "model.layers.9.mlp.router.bias": "model-00002-of-00004.safetensors",
         | 
| 832 | 
            +
                    "model.layers.9.mlp.router.biases": "model-00002-of-00004.safetensors",
         | 
| 833 | 
            +
                    "model.layers.9.mlp.router.scales": "model-00002-of-00004.safetensors",
         | 
| 834 | 
            +
                    "model.layers.9.mlp.router.weight": "model-00002-of-00004.safetensors",
         | 
| 835 | 
            +
                    "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
         | 
| 836 | 
            +
                    "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 837 | 
            +
                    "model.layers.9.self_attn.k_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 838 | 
            +
                    "model.layers.9.self_attn.k_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 839 | 
            +
                    "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 840 | 
            +
                    "model.layers.9.self_attn.o_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 841 | 
            +
                    "model.layers.9.self_attn.o_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 842 | 
            +
                    "model.layers.9.self_attn.o_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 843 | 
            +
                    "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 844 | 
            +
                    "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 845 | 
            +
                    "model.layers.9.self_attn.q_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 846 | 
            +
                    "model.layers.9.self_attn.q_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 847 | 
            +
                    "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 848 | 
            +
                    "model.layers.9.self_attn.sinks": "model-00002-of-00004.safetensors",
         | 
| 849 | 
            +
                    "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
         | 
| 850 | 
            +
                    "model.layers.9.self_attn.v_proj.biases": "model-00002-of-00004.safetensors",
         | 
| 851 | 
            +
                    "model.layers.9.self_attn.v_proj.scales": "model-00002-of-00004.safetensors",
         | 
| 852 | 
            +
                    "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
         | 
| 853 | 
            +
                    "model.norm.weight": "model-00001-of-00004.safetensors"
         | 
| 854 | 
            +
                }
         | 
| 855 | 
            +
            }
         | 
    	
        special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,23 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token": {
         | 
| 3 | 
            +
                "content": "<|startoftext|>",
         | 
| 4 | 
            +
                "lstrip": false,
         | 
| 5 | 
            +
                "normalized": false,
         | 
| 6 | 
            +
                "rstrip": false,
         | 
| 7 | 
            +
                "single_word": false
         | 
| 8 | 
            +
              },
         | 
| 9 | 
            +
              "eos_token": {
         | 
| 10 | 
            +
                "content": "<|return|>",
         | 
| 11 | 
            +
                "lstrip": false,
         | 
| 12 | 
            +
                "normalized": false,
         | 
| 13 | 
            +
                "rstrip": false,
         | 
| 14 | 
            +
                "single_word": false
         | 
| 15 | 
            +
              },
         | 
| 16 | 
            +
              "pad_token": {
         | 
| 17 | 
            +
                "content": "<|endoftext|>",
         | 
| 18 | 
            +
                "lstrip": false,
         | 
| 19 | 
            +
                "normalized": false,
         | 
| 20 | 
            +
                "rstrip": false,
         | 
| 21 | 
            +
                "single_word": false
         | 
| 22 | 
            +
              }
         | 
| 23 | 
            +
            }
         | 
    	
        tokenizer.json
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:0614fe83cadab421296e664e1f48f4261fa8fef6e03e63bb75c20f38e37d07d3
         | 
| 3 | 
            +
            size 27868174
         | 
    	
        tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,183 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "added_tokens_decoder": {
         | 
| 3 | 
            +
                "199998": {
         | 
| 4 | 
            +
                  "content": "<|startoftext|>",
         | 
| 5 | 
            +
                  "lstrip": false,
         | 
| 6 | 
            +
                  "normalized": false,
         | 
| 7 | 
            +
                  "rstrip": false,
         | 
| 8 | 
            +
                  "single_word": false,
         | 
| 9 | 
            +
                  "special": true
         | 
| 10 | 
            +
                },
         | 
| 11 | 
            +
                "199999": {
         | 
| 12 | 
            +
                  "content": "<|endoftext|>",
         | 
| 13 | 
            +
                  "lstrip": false,
         | 
| 14 | 
            +
                  "normalized": false,
         | 
| 15 | 
            +
                  "rstrip": false,
         | 
| 16 | 
            +
                  "single_word": false,
         | 
| 17 | 
            +
                  "special": true
         | 
| 18 | 
            +
                },
         | 
| 19 | 
            +
                "200000": {
         | 
| 20 | 
            +
                  "content": "<|reserved_200000|>",
         | 
| 21 | 
            +
                  "lstrip": false,
         | 
| 22 | 
            +
                  "normalized": false,
         | 
| 23 | 
            +
                  "rstrip": false,
         | 
| 24 | 
            +
                  "single_word": false,
         | 
| 25 | 
            +
                  "special": true
         | 
| 26 | 
            +
                },
         | 
| 27 | 
            +
                "200001": {
         | 
| 28 | 
            +
                  "content": "<|reserved_200001|>",
         | 
| 29 | 
            +
                  "lstrip": false,
         | 
| 30 | 
            +
                  "normalized": false,
         | 
| 31 | 
            +
                  "rstrip": false,
         | 
| 32 | 
            +
                  "single_word": false,
         | 
| 33 | 
            +
                  "special": true
         | 
| 34 | 
            +
                },
         | 
| 35 | 
            +
                "200002": {
         | 
| 36 | 
            +
                  "content": "<|return|>",
         | 
| 37 | 
            +
                  "lstrip": false,
         | 
| 38 | 
            +
                  "normalized": false,
         | 
| 39 | 
            +
                  "rstrip": false,
         | 
| 40 | 
            +
                  "single_word": false,
         | 
| 41 | 
            +
                  "special": true
         | 
| 42 | 
            +
                },
         | 
| 43 | 
            +
                "200003": {
         | 
| 44 | 
            +
                  "content": "<|constrain|>",
         | 
| 45 | 
            +
                  "lstrip": false,
         | 
| 46 | 
            +
                  "normalized": false,
         | 
| 47 | 
            +
                  "rstrip": false,
         | 
| 48 | 
            +
                  "single_word": false,
         | 
| 49 | 
            +
                  "special": true
         | 
| 50 | 
            +
                },
         | 
| 51 | 
            +
                "200004": {
         | 
| 52 | 
            +
                  "content": "<|reserved_200004|>",
         | 
| 53 | 
            +
                  "lstrip": false,
         | 
| 54 | 
            +
                  "normalized": false,
         | 
| 55 | 
            +
                  "rstrip": false,
         | 
| 56 | 
            +
                  "single_word": false,
         | 
| 57 | 
            +
                  "special": true
         | 
| 58 | 
            +
                },
         | 
| 59 | 
            +
                "200005": {
         | 
| 60 | 
            +
                  "content": "<|channel|>",
         | 
| 61 | 
            +
                  "lstrip": false,
         | 
| 62 | 
            +
                  "normalized": false,
         | 
| 63 | 
            +
                  "rstrip": false,
         | 
| 64 | 
            +
                  "single_word": false,
         | 
| 65 | 
            +
                  "special": true
         | 
| 66 | 
            +
                },
         | 
| 67 | 
            +
                "200006": {
         | 
| 68 | 
            +
                  "content": "<|start|>",
         | 
| 69 | 
            +
                  "lstrip": false,
         | 
| 70 | 
            +
                  "normalized": false,
         | 
| 71 | 
            +
                  "rstrip": false,
         | 
| 72 | 
            +
                  "single_word": false,
         | 
| 73 | 
            +
                  "special": true
         | 
| 74 | 
            +
                },
         | 
| 75 | 
            +
                "200007": {
         | 
| 76 | 
            +
                  "content": "<|end|>",
         | 
| 77 | 
            +
                  "lstrip": false,
         | 
| 78 | 
            +
                  "normalized": false,
         | 
| 79 | 
            +
                  "rstrip": false,
         | 
| 80 | 
            +
                  "single_word": false,
         | 
| 81 | 
            +
                  "special": true
         | 
| 82 | 
            +
                },
         | 
| 83 | 
            +
                "200008": {
         | 
| 84 | 
            +
                  "content": "<|message|>",
         | 
| 85 | 
            +
                  "lstrip": false,
         | 
| 86 | 
            +
                  "normalized": false,
         | 
| 87 | 
            +
                  "rstrip": false,
         | 
| 88 | 
            +
                  "single_word": false,
         | 
| 89 | 
            +
                  "special": true
         | 
| 90 | 
            +
                },
         | 
| 91 | 
            +
                "200009": {
         | 
| 92 | 
            +
                  "content": "<|reserved_200009|>",
         | 
| 93 | 
            +
                  "lstrip": false,
         | 
| 94 | 
            +
                  "normalized": false,
         | 
| 95 | 
            +
                  "rstrip": false,
         | 
| 96 | 
            +
                  "single_word": false,
         | 
| 97 | 
            +
                  "special": true
         | 
| 98 | 
            +
                },
         | 
| 99 | 
            +
                "200010": {
         | 
| 100 | 
            +
                  "content": "<|reserved_200010|>",
         | 
| 101 | 
            +
                  "lstrip": false,
         | 
| 102 | 
            +
                  "normalized": false,
         | 
| 103 | 
            +
                  "rstrip": false,
         | 
| 104 | 
            +
                  "single_word": false,
         | 
| 105 | 
            +
                  "special": true
         | 
| 106 | 
            +
                },
         | 
| 107 | 
            +
                "200011": {
         | 
| 108 | 
            +
                  "content": "<|reserved_200011|>",
         | 
| 109 | 
            +
                  "lstrip": false,
         | 
| 110 | 
            +
                  "normalized": false,
         | 
| 111 | 
            +
                  "rstrip": false,
         | 
| 112 | 
            +
                  "single_word": false,
         | 
| 113 | 
            +
                  "special": true
         | 
| 114 | 
            +
                },
         | 
| 115 | 
            +
                "200012": {
         | 
| 116 | 
            +
                  "content": "<|call|>",
         | 
| 117 | 
            +
                  "lstrip": false,
         | 
| 118 | 
            +
                  "normalized": false,
         | 
| 119 | 
            +
                  "rstrip": false,
         | 
| 120 | 
            +
                  "single_word": false,
         | 
| 121 | 
            +
                  "special": true
         | 
| 122 | 
            +
                },
         | 
| 123 | 
            +
                "200013": {
         | 
| 124 | 
            +
                  "content": "<|reserved_200013|>",
         | 
| 125 | 
            +
                  "lstrip": false,
         | 
| 126 | 
            +
                  "normalized": false,
         | 
| 127 | 
            +
                  "rstrip": false,
         | 
| 128 | 
            +
                  "single_word": false,
         | 
| 129 | 
            +
                  "special": true
         | 
| 130 | 
            +
                },
         | 
| 131 | 
            +
                "200014": {
         | 
| 132 | 
            +
                  "content": "<|reserved_200014|>",
         | 
| 133 | 
            +
                  "lstrip": false,
         | 
| 134 | 
            +
                  "normalized": false,
         | 
| 135 | 
            +
                  "rstrip": false,
         | 
| 136 | 
            +
                  "single_word": false,
         | 
| 137 | 
            +
                  "special": true
         | 
| 138 | 
            +
                },
         | 
| 139 | 
            +
                "200015": {
         | 
| 140 | 
            +
                  "content": "<|reserved_200015|>",
         | 
| 141 | 
            +
                  "lstrip": false,
         | 
| 142 | 
            +
                  "normalized": false,
         | 
| 143 | 
            +
                  "rstrip": false,
         | 
| 144 | 
            +
                  "single_word": false,
         | 
| 145 | 
            +
                  "special": true
         | 
| 146 | 
            +
                },
         | 
| 147 | 
            +
                "200016": {
         | 
| 148 | 
            +
                  "content": "<|reserved_200016|>",
         | 
| 149 | 
            +
                  "lstrip": false,
         | 
| 150 | 
            +
                  "normalized": false,
         | 
| 151 | 
            +
                  "rstrip": false,
         | 
| 152 | 
            +
                  "single_word": false,
         | 
| 153 | 
            +
                  "special": true
         | 
| 154 | 
            +
                },
         | 
| 155 | 
            +
                "200017": {
         | 
| 156 | 
            +
                  "content": "<|reserved_200017|>",
         | 
| 157 | 
            +
                  "lstrip": false,
         | 
| 158 | 
            +
                  "normalized": false,
         | 
| 159 | 
            +
                  "rstrip": false,
         | 
| 160 | 
            +
                  "single_word": false,
         | 
| 161 | 
            +
                  "special": true
         | 
| 162 | 
            +
                },
         | 
| 163 | 
            +
                "200018": {
         | 
| 164 | 
            +
                  "content": "<|endofprompt|>",
         | 
| 165 | 
            +
                  "lstrip": false,
         | 
| 166 | 
            +
                  "normalized": false,
         | 
| 167 | 
            +
                  "rstrip": false,
         | 
| 168 | 
            +
                  "single_word": false,
         | 
| 169 | 
            +
                  "special": true
         | 
| 170 | 
            +
                }
         | 
| 171 | 
            +
              },
         | 
| 172 | 
            +
              "bos_token": "<|startoftext|>",
         | 
| 173 | 
            +
              "clean_up_tokenization_spaces": false,
         | 
| 174 | 
            +
              "eos_token": "<|return|>",
         | 
| 175 | 
            +
              "extra_special_tokens": {},
         | 
| 176 | 
            +
              "model_input_names": [
         | 
| 177 | 
            +
                "input_ids",
         | 
| 178 | 
            +
                "attention_mask"
         | 
| 179 | 
            +
              ],
         | 
| 180 | 
            +
              "model_max_length": 1000000000000000019884624838656,
         | 
| 181 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 182 | 
            +
              "tokenizer_class": "PreTrainedTokenizerFast"
         | 
| 183 | 
            +
            }
         | 

