diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..52373fe24473b1aa44333d318f578ae6bf04b49b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +tokenizer.json filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md index 962f55e9a99b0eab2926ce2f9af55fb37cea8a91..c9801b281d23b8e558c94c6553c0cc0fbd1b61e1 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,218 @@ ---- -license: llama3.1 ---- +--- +language: +- en +- de +- fr +- it +- pt +- hi +- es +- th +library_name: transformers +base_model: meta-llama/Llama-3.1-405B-Instruct +license: llama3.1 +pipeline_tag: text-generation +tags: +- facebook +- meta +- pytorch +- llama +- llama-3 +- mlx +extra_gated_prompt: "### LLAMA 3.1 COMMUNITY LICENSE AGREEMENT\nLlama 3.1 Version\ + \ Release Date: July 23, 2024\n\"Agreement\" means the terms and conditions for\ + \ use, reproduction, distribution and modification of the Llama Materials set forth\ + \ herein.\n\"Documentation\" means the specifications, manuals and documentation\ + \ accompanying Llama 3.1 distributed by Meta at https://llama.meta.com/doc/overview.\n\ + \"Licensee\" or \"you\" means you, or your employer or any other person or entity\ + \ (if you are entering into this Agreement on such person or entity’s behalf), of\ + \ the age required under applicable laws, rules or regulations to provide legal\ + \ consent and that has legal authority to bind your employer or such other person\ + \ or entity if you are entering in this Agreement on their behalf.\n\"Llama 3.1\"\ + \ means the foundational large language models and software and algorithms, including\ + \ machine-learning model code, trained model weights, inference-enabling code, training-enabling\ + \ code, fine-tuning enabling code and other elements of the foregoing distributed\ + \ by Meta at https://llama.meta.com/llama-downloads.\n\"Llama Materials\" means,\ + \ collectively, Meta’s proprietary Llama 3.1 and Documentation (and any portion\ + \ thereof) made available under this Agreement.\n\"Meta\" or \"we\" means Meta Platforms\ + \ Ireland Limited (if you are located in or, if you are an entity, your principal\ + \ place of business is in the EEA or Switzerland) and Meta Platforms, Inc. (if you\ + \ are located outside of the EEA or Switzerland).\n \n1. License Rights and Redistribution.\n\ + a. Grant of Rights. You are granted a non-exclusive, worldwide, non-transferable\ + \ and royalty-free limited license under Meta’s intellectual property or other rights\ + \ owned by Meta embodied in the Llama Materials to use, reproduce, distribute, copy,\ + \ create derivative works of, and make modifications to the Llama Materials.\nb.\ + \ Redistribution and Use.\ni. If you distribute or make available the Llama Materials\ + \ (or any derivative works thereof), or a product or service (including another\ + \ AI model) that contains any of them, you shall (A) provide a copy of this Agreement\ + \ with any such Llama Materials; and (B) prominently display “Built with Llama”\ + \ on a related website, user interface, blogpost, about page, or product documentation.\ + \ If you use the Llama Materials or any outputs or results of the Llama Materials\ + \ to create, train, fine tune, or otherwise improve an AI model, which is distributed\ + \ or made available, you shall also include “Llama” at the beginning of any such\ + \ AI model name.\nii. If you receive Llama Materials, or any derivative works thereof,\ + \ from a Licensee as part of an integrated end user product, then Section 2 of\ + \ this Agreement will not apply to you.\niii. You must retain in all copies of the\ + \ Llama Materials that you distribute the following attribution notice within a\ + \ “Notice” text file distributed as a part of such copies: “Llama 3.1 is licensed\ + \ under the Llama 3.1 Community License, Copyright © Meta Platforms, Inc. All Rights\ + \ Reserved.”\niv. Your use of the Llama Materials must comply with applicable laws\ + \ and regulations (including trade compliance laws and regulations) and adhere to\ + \ the Acceptable Use Policy for the Llama Materials (available at https://llama.meta.com/llama3_1/use-policy),\ + \ which is hereby incorporated by reference into this Agreement.\n2. Additional\ + \ Commercial Terms. If, on the Llama 3.1 version release date, the monthly active\ + \ users of the products or services made available by or for Licensee, or Licensee’s\ + \ affiliates, is greater than 700 million monthly active users in the preceding\ + \ calendar month, you must request a license from Meta, which Meta may grant to\ + \ you in its sole discretion, and you are not authorized to exercise any of the\ + \ rights under this Agreement unless or until Meta otherwise expressly grants you\ + \ such rights.\n3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE\ + \ LLAMA MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE PROVIDED ON AN “AS IS”\ + \ BASIS, WITHOUT WARRANTIES OF ANY KIND, AND META DISCLAIMS ALL WARRANTIES OF ANY\ + \ KIND, BOTH EXPRESS AND IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES\ + \ OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.\ + \ YOU ARE SOLELY RESPONSIBLE FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING\ + \ THE LLAMA MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR USE OF THE LLAMA\ + \ MATERIALS AND ANY OUTPUT AND RESULTS.\n4. Limitation of Liability. IN NO EVENT\ + \ WILL META OR ITS AFFILIATES BE LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN\ + \ CONTRACT, TORT, NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS\ + \ AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL,\ + \ EXEMPLARY OR PUNITIVE DAMAGES, EVEN IF META OR ITS AFFILIATES HAVE BEEN ADVISED\ + \ OF THE POSSIBILITY OF ANY OF THE FOREGOING.\n5. Intellectual Property.\na. No\ + \ trademark licenses are granted under this Agreement, and in connection with the\ + \ Llama Materials, neither Meta nor Licensee may use any name or mark owned by or\ + \ associated with the other or any of its affiliates, except as required for reasonable\ + \ and customary use in describing and redistributing the Llama Materials or as set\ + \ forth in this Section 5(a). Meta hereby grants you a license to use “Llama” (the\ + \ “Mark”) solely as required to comply with the last sentence of Section 1.b.i.\ + \ You will comply with Meta’s brand guidelines (currently accessible at https://about.meta.com/brand/resources/meta/company-brand/\ + \ ). All goodwill arising out of your use of the Mark will inure to the benefit\ + \ of Meta.\nb. Subject to Meta’s ownership of Llama Materials and derivatives made\ + \ by or for Meta, with respect to any derivative works and modifications of the\ + \ Llama Materials that are made by you, as between you and Meta, you are and will\ + \ be the owner of such derivative works and modifications.\nc. If you institute\ + \ litigation or other proceedings against Meta or any entity (including a cross-claim\ + \ or counterclaim in a lawsuit) alleging that the Llama Materials or Llama 3.1 outputs\ + \ or results, or any portion of any of the foregoing, constitutes infringement of\ + \ intellectual property or other rights owned or licensable by you, then any licenses\ + \ granted to you under this Agreement shall terminate as of the date such litigation\ + \ or claim is filed or instituted. You will indemnify and hold harmless Meta from\ + \ and against any claim by any third party arising out of or related to your use\ + \ or distribution of the Llama Materials.\n6. Term and Termination. The term of\ + \ this Agreement will commence upon your acceptance of this Agreement or access\ + \ to the Llama Materials and will continue in full force and effect until terminated\ + \ in accordance with the terms and conditions herein. Meta may terminate this Agreement\ + \ if you are in breach of any term or condition of this Agreement. Upon termination\ + \ of this Agreement, you shall delete and cease use of the Llama Materials. Sections\ + \ 3, 4 and 7 shall survive the termination of this Agreement.\n7. Governing Law\ + \ and Jurisdiction. This Agreement will be governed and construed under the laws\ + \ of the State of California without regard to choice of law principles, and the\ + \ UN Convention on Contracts for the International Sale of Goods does not apply\ + \ to this Agreement. The courts of California shall have exclusive jurisdiction\ + \ of any dispute arising out of this Agreement.\n### Llama 3.1 Acceptable Use Policy\n\ + Meta is committed to promoting safe and fair use of its tools and features, including\ + \ Llama 3.1. If you access or use Llama 3.1, you agree to this Acceptable Use Policy\ + \ (“Policy”). The most recent copy of this policy can be found at [https://llama.meta.com/llama3_1/use-policy](https://llama.meta.com/llama3_1/use-policy)\n\ + #### Prohibited Uses\nWe want everyone to use Llama 3.1 safely and responsibly.\ + \ You agree you will not use, or allow others to use, Llama 3.1 to:\n 1. Violate\ + \ the law or others’ rights, including to:\n 1. Engage in, promote, generate,\ + \ contribute to, encourage, plan, incite, or further illegal or unlawful activity\ + \ or content, such as:\n 1. Violence or terrorism\n 2. Exploitation\ + \ or harm to children, including the solicitation, creation, acquisition, or dissemination\ + \ of child exploitative content or failure to report Child Sexual Abuse Material\n\ + \ 3. Human trafficking, exploitation, and sexual violence\n 4. The\ + \ illegal distribution of information or materials to minors, including obscene\ + \ materials, or failure to employ legally required age-gating in connection with\ + \ such information or materials.\n 5. Sexual solicitation\n 6. Any\ + \ other criminal activity\n 3. Engage in, promote, incite, or facilitate the\ + \ harassment, abuse, threatening, or bullying of individuals or groups of individuals\n\ + \ 4. Engage in, promote, incite, or facilitate discrimination or other unlawful\ + \ or harmful conduct in the provision of employment, employment benefits, credit,\ + \ housing, other economic benefits, or other essential goods and services\n 5.\ + \ Engage in the unauthorized or unlicensed practice of any profession including,\ + \ but not limited to, financial, legal, medical/health, or related professional\ + \ practices\n 6. Collect, process, disclose, generate, or infer health, demographic,\ + \ or other sensitive personal or private information about individuals without rights\ + \ and consents required by applicable laws\n 7. Engage in or facilitate any action\ + \ or generate any content that infringes, misappropriates, or otherwise violates\ + \ any third-party rights, including the outputs or results of any products or services\ + \ using the Llama Materials\n 8. Create, generate, or facilitate the creation\ + \ of malicious code, malware, computer viruses or do anything else that could disable,\ + \ overburden, interfere with or impair the proper working, integrity, operation\ + \ or appearance of a website or computer system\n2. Engage in, promote, incite,\ + \ facilitate, or assist in the planning or development of activities that present\ + \ a risk of death or bodily harm to individuals, including use of Llama 3.1 related\ + \ to the following:\n 1. Military, warfare, nuclear industries or applications,\ + \ espionage, use for materials or activities that are subject to the International\ + \ Traffic Arms Regulations (ITAR) maintained by the United States Department of\ + \ State\n 2. Guns and illegal weapons (including weapon development)\n 3.\ + \ Illegal drugs and regulated/controlled substances\n 4. Operation of critical\ + \ infrastructure, transportation technologies, or heavy machinery\n 5. Self-harm\ + \ or harm to others, including suicide, cutting, and eating disorders\n 6. Any\ + \ content intended to incite or promote violence, abuse, or any infliction of bodily\ + \ harm to an individual\n3. Intentionally deceive or mislead others, including use\ + \ of Llama 3.1 related to the following:\n 1. Generating, promoting, or furthering\ + \ fraud or the creation or promotion of disinformation\n 2. Generating, promoting,\ + \ or furthering defamatory content, including the creation of defamatory statements,\ + \ images, or other content\n 3. Generating, promoting, or further distributing\ + \ spam\n 4. Impersonating another individual without consent, authorization,\ + \ or legal right\n 5. Representing that the use of Llama 3.1 or outputs are human-generated\n\ + \ 6. Generating or facilitating false online engagement, including fake reviews\ + \ and other means of fake online engagement\n4. Fail to appropriately disclose to\ + \ end users any known dangers of your AI system\nPlease report any violation of\ + \ this Policy, software “bug,” or other problems that could lead to a violation\ + \ of this Policy through one of the following means:\n * Reporting issues with\ + \ the model: [https://github.com/meta-llama/llama-models/issues](https://github.com/meta-llama/llama-models/issues)\n\ + \ * Reporting risky content generated by the model:\n developers.facebook.com/llama_output_feedback\n\ + \ * Reporting bugs and security concerns: facebook.com/whitehat/info\n * Reporting\ + \ violations of the Acceptable Use Policy or unlicensed uses of Meta Llama 3: LlamaUseReport@meta.com" +extra_gated_fields: + First Name: text + Last Name: text + Date of birth: date_picker + Country: country + Affiliation: text + Job title: + type: select + options: + - Student + - Research Graduate + - AI researcher + - AI developer/engineer + - Reporter + - Other + geo: ip_location + ? By clicking Submit below I accept the terms of the license and acknowledge that + the information I provide will be collected stored processed and shared in accordance + with the Meta Privacy Policy + : checkbox +extra_gated_description: The information you provide will be collected, stored, processed + and shared in accordance with the [Meta Privacy Policy](https://www.facebook.com/privacy/policy/). +extra_gated_button_content: Submit +--- + +# mlx-community/Meta-Llama-3.1-405B-Instruct-8bit + +The Model [mlx-community/Meta-Llama-3.1-405B-Instruct-8bit](https://huggingface.co/mlx-community/Meta-Llama-3.1-405B-Instruct-8bit) was converted to MLX format from [meta-llama/Llama-3.1-405B-Instruct](https://huggingface.co/meta-llama/Llama-3.1-405B-Instruct) using mlx-lm version **0.19.3**. + +## Use with mlx + +```bash +pip install mlx-lm +``` + +```python +from mlx_lm import load, generate + +model, tokenizer = load("mlx-community/Meta-Llama-3.1-405B-Instruct-8bit") + +prompt="hello" + +if hasattr(tokenizer, "apply_chat_template") and tokenizer.chat_template is not None: + messages = [{"role": "user", "content": prompt}] + prompt = tokenizer.apply_chat_template( + messages, tokenize=False, add_generation_prompt=True + ) + +response = generate(model, tokenizer, prompt=prompt, verbose=True) +``` diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..36389c1b12a9e359e5c7e4fa16faeb3fcdcb932a --- /dev/null +++ b/config.json @@ -0,0 +1,46 @@ +{ + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "hidden_act": "silu", + "hidden_size": 16384, + "initializer_range": 0.02, + "intermediate_size": 53248, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 128, + "num_hidden_layers": 126, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "quantization": { + "group_size": 64, + "bits": 8 + }, + "quantization_config": { + "group_size": 64, + "bits": 8 + }, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "factor": 8.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_type": "llama3" + }, + "rope_theta": 500000.0, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.43.3", + "use_cache": true, + "vocab_size": 128256 +} \ No newline at end of file diff --git a/model-00001-of-00085.safetensors b/model-00001-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f87e69fd387fc0bb878d1e81e5013702a5ab0367 --- /dev/null +++ b/model-00001-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30384a8b3b0952954eada7444bc01b447667adb23bcebf52000ee5be3593bc3c +size 4692642146 diff --git a/model-00002-of-00085.safetensors b/model-00002-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f8df5f32641a9ed241344689892668892305d687 --- /dev/null +++ b/model-00002-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e2a53541e8682c54efc40645397b22fb71eadc3b98368826f7757162d3b7c19 +size 4920054237 diff --git a/model-00003-of-00085.safetensors b/model-00003-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..99b56bd9d7c9058f291bccd76aa914d05966821f --- /dev/null +++ b/model-00003-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bf622fcea87e94b6e9aa8948586a947fec7b5b72e502c5260f52b11b07c26d2 +size 5240851727 diff --git a/model-00004-of-00085.safetensors b/model-00004-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ad5eaeb5275ccd507a956a7b53c360fd0c1f1b94 --- /dev/null +++ b/model-00004-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cdf5d68efc3d4107aaef8693b396333352b069b739cda553e4f677c8f6f2e84f +size 4920054251 diff --git a/model-00005-of-00085.safetensors b/model-00005-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..83ffbc5ff99b4804a1ac5831d6fb761327f652e1 --- /dev/null +++ b/model-00005-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c49231a16fac2d37f3acb18c723bd6468381a179551ffb667b3b7c324920f4ce +size 5240851721 diff --git a/model-00006-of-00085.safetensors b/model-00006-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fd27ccea35c89170e62a458db471ffc9c93c4f6d --- /dev/null +++ b/model-00006-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7abc76ea1b9c71e64605068767838165426e8392e9deb7469a1ebafcdce3d4a4 +size 4920054239 diff --git a/model-00007-of-00085.safetensors b/model-00007-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c29dbddeba75fe55767235d768a695acb86e40dc --- /dev/null +++ b/model-00007-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d9d53f986a87d7e2af7903a7842663b31356e1750d8853d80bc3f9da7b80f77 +size 5240851719 diff --git a/model-00008-of-00085.safetensors b/model-00008-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..30a9de4fc12ef1456202e2770a13339581f909df --- /dev/null +++ b/model-00008-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16067a2a04ac0fc4e0f1a65bfa237ba7b0892cdd355339298ab08a8f8ab7bdc7 +size 4920054272 diff --git a/model-00009-of-00085.safetensors b/model-00009-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6edd78a340bc9f999c374fd42a2a2dd90f1a188b --- /dev/null +++ b/model-00009-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ff78029ac36838f9d1f2c90479ec0173ff3b03542b91fb54437e4c6012827c0 +size 5240851748 diff --git a/model-00010-of-00085.safetensors b/model-00010-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..da847e7b7f0c8723d75ee6c97ba3a1034e500919 --- /dev/null +++ b/model-00010-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71cf9ddffe57d00582b909c1e5e5a2acb5aedc04ef233e979005f175e35343e4 +size 4920054305 diff --git a/model-00011-of-00085.safetensors b/model-00011-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a7fec44c74b93828c309e93ac9e786fbeec9265c --- /dev/null +++ b/model-00011-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8521a93c0c7f4b54739bed39110d4ec838ee24d13236799ef72c8438db9fcdef +size 5240851752 diff --git a/model-00012-of-00085.safetensors b/model-00012-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e52a1650a0f67f4af9b28d57ae3411212eddceba --- /dev/null +++ b/model-00012-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6b9a176e7aa247c464d368b24f7196cf7f4683d10b4b1fe0381cbe0caa5cfb8 +size 4920054305 diff --git a/model-00013-of-00085.safetensors b/model-00013-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..082e60eed08fa2c8316c64c57fbcacd58fd39a3a --- /dev/null +++ b/model-00013-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30d6385a6f2136105f19a72a6c0887557f5c8fa2e50d6418c124781b32481b42 +size 5240851756 diff --git a/model-00014-of-00085.safetensors b/model-00014-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..11d3db4ac86553c9bc07c7912259bf482ee085a2 --- /dev/null +++ b/model-00014-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:214ad08a846cee60461aca5f7dc6a9146dc7d5c51aaf0556f754741e650b9c4b +size 4920054285 diff --git a/model-00015-of-00085.safetensors b/model-00015-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e535807ae2234221d1cc59f6763fc78ee883429d --- /dev/null +++ b/model-00015-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b72ba49a03d1b3f2434b0f8e49a315d15a24121c5441393543e29b58424c661 +size 5240851754 diff --git a/model-00016-of-00085.safetensors b/model-00016-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ddaef6a27d0e8b8aadf36992222bc125e4ed2154 --- /dev/null +++ b/model-00016-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:037f9ae5fa4c17786931e281352f0b99444925ea79ca91aedfbdaeeec1fa3608 +size 4920054299 diff --git a/model-00017-of-00085.safetensors b/model-00017-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e8fccd32be09873af1935b5d6d6dd1f3122c1c4d --- /dev/null +++ b/model-00017-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfefa2fede9afa524576cee310a2a199e4b754741fc4554206ec2544290ccf4d +size 5240851742 diff --git a/model-00018-of-00085.safetensors b/model-00018-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..16a7e7f08fd7b9b5b55704abf6ed8882c9ac16d9 --- /dev/null +++ b/model-00018-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:357c8bbdd9e2041d07b5e2039d7a9363439faefa2097b85609ad948010e23cd2 +size 4920054287 diff --git a/model-00019-of-00085.safetensors b/model-00019-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d701cd5c05b45bb07c961519ae569c76191f4f44 --- /dev/null +++ b/model-00019-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1db0d7517f9a50fe53c610a9a27ed2332d9b1561b45a28ba6a3284fb817759a0 +size 5240851742 diff --git a/model-00020-of-00085.safetensors b/model-00020-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c0743e087315053b4654abc32811b2957eff5472 --- /dev/null +++ b/model-00020-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95c85a08434f0076ad6798d1280e872eb3a22a4ddbf0c74153dfde80dd1d9518 +size 4920054303 diff --git a/model-00021-of-00085.safetensors b/model-00021-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8d96e4bd8fb24fe05e0ff28bb261d00ffe55d4a6 --- /dev/null +++ b/model-00021-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bac6df81cab98d0eabd4c458596068857cd85d82cc73d7eec268f43b5c920f3c +size 5240851748 diff --git a/model-00022-of-00085.safetensors b/model-00022-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a0439542876cbaf5cff8148fa71369efb4ee04d7 --- /dev/null +++ b/model-00022-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d244d8158e2b5cb6c5e1d3dc4d4e30d96b8370b9188ebba1a0313ee23e220663 +size 4920054295 diff --git a/model-00023-of-00085.safetensors b/model-00023-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c9e9f4148e5432bef7ea6ff96b0f33ef479e7c0b --- /dev/null +++ b/model-00023-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:011e88d862af2cb7ff4ff42d5f286a88aedb839d9ac7aa4a0f6d7179c2f8c1d0 +size 5240851718 diff --git a/model-00024-of-00085.safetensors b/model-00024-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c3896a93abc538d8c898a8561185e4cfa1ca7fdb --- /dev/null +++ b/model-00024-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6aedca0cffc1f823896d6f6e9baf8c3e21f37d26722552057233d258a8eaea1d +size 4920054277 diff --git a/model-00025-of-00085.safetensors b/model-00025-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1f556bf411d153bd1552ff3d5c6cdad893b6ba0d --- /dev/null +++ b/model-00025-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97ce4cd15cd717a10c012ba333ef9b084169984af773a228a79bc02d1dde31d7 +size 5240851748 diff --git a/model-00026-of-00085.safetensors b/model-00026-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e8cdfce6aed94d1b4a3c737d2a3719f3bfc26bd9 --- /dev/null +++ b/model-00026-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:939586e0aa81d0ff160a438c9c55b868afaf74e5e5f71d874f84a4dad9c10c9d +size 4920054299 diff --git a/model-00027-of-00085.safetensors b/model-00027-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3ce9ff3feff2ad522481ad9c1fb3f13855a46c5c --- /dev/null +++ b/model-00027-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce77d29930d3a5563b9803c3b83f851a7ff80999e35055e2baa314ad6fcdf4ad +size 5240851752 diff --git a/model-00028-of-00085.safetensors b/model-00028-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4543aaad89a67aa812fe2808649cccaea0cba6f6 --- /dev/null +++ b/model-00028-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa84520aacdc1f5d9c8e6c4b9538e0798e19a4185d7157f79e0b564c4105580c +size 4920054293 diff --git a/model-00029-of-00085.safetensors b/model-00029-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..42fb92ad007535711a05174cb165c160b9adde67 --- /dev/null +++ b/model-00029-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4088232f1a436445b68852ad0c6156d3d7ad8d9e2d67903d1e7a373f5860b2fe +size 5240851758 diff --git a/model-00030-of-00085.safetensors b/model-00030-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..271774ff2a8859bacfe7532bcf7cac4e7bc65585 --- /dev/null +++ b/model-00030-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:884d63eba3e5d0473c109398a39afec40fce192dc76a0be104fd4c52a24c9d6d +size 4920054303 diff --git a/model-00031-of-00085.safetensors b/model-00031-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b0a5cb7e7c0e3b5449345af312a80c256a6979ed --- /dev/null +++ b/model-00031-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c6c50aba95bbb1f789482dadfc49cdf94fe473509d4ed928d8ec61b2015d7f8 +size 5240851754 diff --git a/model-00032-of-00085.safetensors b/model-00032-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1b6250393b9c82f736aa2952271533c772cf80c6 --- /dev/null +++ b/model-00032-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d8034cc3a6d97320cc47f2b14378e0ac275498c2f1694535a0d6a1fa726f27b +size 4920054293 diff --git a/model-00033-of-00085.safetensors b/model-00033-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..614ea0d2eb958948d45d389c2308f4a89eaad5ce --- /dev/null +++ b/model-00033-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28d3dd1b082981b42cad3ac48fcaa6bcad3828a243710afe1e204c500adf6077 +size 5240851736 diff --git a/model-00034-of-00085.safetensors b/model-00034-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..793858164ff73d67d18db9465888ec3309071ead --- /dev/null +++ b/model-00034-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:425fb0b396fd8eaa2b07074b6167a6208473d96b6d5baec3302f51784caacbcf +size 4920054297 diff --git a/model-00035-of-00085.safetensors b/model-00035-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5ac80865339c89a48456865b2640f723361e7cfc --- /dev/null +++ b/model-00035-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0306d34be6816ccc9aa9b799eaf4dd868c344d6c66038cf5766840064fa78939 +size 5240851738 diff --git a/model-00036-of-00085.safetensors b/model-00036-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5a1f68d78b90684cf2cd985cf584538e169792eb --- /dev/null +++ b/model-00036-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3166c9fab9ea51010ce4f032046452d369e91b83e0a0e732661b2ec7ab993b37 +size 4920054307 diff --git a/model-00037-of-00085.safetensors b/model-00037-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c1963e632ba4a0f8c89f1fe3179a849f731d1957 --- /dev/null +++ b/model-00037-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc4dcb3881237bd5f4e790bad14198c18a47e5984e680d90adfd9523fc40e434 +size 5240851740 diff --git a/model-00038-of-00085.safetensors b/model-00038-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d9d22c5afea59e1d59006625458dfac6984bbc3f --- /dev/null +++ b/model-00038-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de9b7ad4c3f699ba4b748cdd9e7ebf9e06f53350b3b970fb8b85a196f3980ec9 +size 4920054295 diff --git a/model-00039-of-00085.safetensors b/model-00039-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..76bcce73143391fe3a3f4b88ff3a7ea25a77d856 --- /dev/null +++ b/model-00039-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d273dafdb32ace293e303c03b15661c30e3332869fa3e1994f4c958d231f940 +size 5240851744 diff --git a/model-00040-of-00085.safetensors b/model-00040-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3dd9125d0046f3d5ce8fd85c4cf6f0d96f11c9be --- /dev/null +++ b/model-00040-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dac485160ae8dbcc3e9bdde65977c80e6970fd25f63ff5472003cf578de8ca7e +size 4920054287 diff --git a/model-00041-of-00085.safetensors b/model-00041-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8e1a062aa66c1ed8a1b4c35e0629f2b1d014ca03 --- /dev/null +++ b/model-00041-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02be49d042a2713e366b9ece55512bbe40330455cf8d115f0578e59f9b4904e1 +size 5240851746 diff --git a/model-00042-of-00085.safetensors b/model-00042-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3ddc54276816b2834c3783a2270428a03d27e8ce --- /dev/null +++ b/model-00042-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55269c808b38f98630a2202ea197cbc463a26062af497fa9905e5de7b4213537 +size 4920054285 diff --git a/model-00043-of-00085.safetensors b/model-00043-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..72efbc9cb01e8c7c233f93be6da57cd170891ba1 --- /dev/null +++ b/model-00043-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21fe289e6e828c73eb8f67bfe32f764916aee4366b1e7d854a2dfdbec99da885 +size 5240851734 diff --git a/model-00044-of-00085.safetensors b/model-00044-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..315efbbe2f9914f82123aa3e05f4f0b9bc937d23 --- /dev/null +++ b/model-00044-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfaa1859192e341f9a3b3ae08a8e606a3c3c1d341c7a46207efb207d0832fb08 +size 4920054295 diff --git a/model-00045-of-00085.safetensors b/model-00045-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8541f3f1705eff83b331d74309fb9811ea9f8e00 --- /dev/null +++ b/model-00045-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bf31ebc92475ebcf4eac0173d60fc22caf319ed18400a44175400cec361f61c +size 5240851758 diff --git a/model-00046-of-00085.safetensors b/model-00046-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f5dbeca8266dfbf87352aead10171e6edeb8495d --- /dev/null +++ b/model-00046-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fa85fda28e19ffe73bed3520318ca3bb9d2fa6d77f5940107df8f301bcfc10e +size 4920054283 diff --git a/model-00047-of-00085.safetensors b/model-00047-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7accc960a30926aa671b6cb14319cbe27d776562 --- /dev/null +++ b/model-00047-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d19b9ebdbee901296016886af8bebeb59e8c5fd94c90a7e37fe7614edf60000 +size 5240851750 diff --git a/model-00048-of-00085.safetensors b/model-00048-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b2907d9121ad760799b863dcf244ef8da7336dac --- /dev/null +++ b/model-00048-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6effa959b39554ddbd9c7ef5ad953932ec601374a22de567cdeedcdb0e1d810 +size 4920054301 diff --git a/model-00049-of-00085.safetensors b/model-00049-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..74533b0a5174aa101ca46a18251bca1cf53fc41b --- /dev/null +++ b/model-00049-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a0df4907acb20280c1d38022901763a5d9438db9b6d6466a409f59b15d79256 +size 5240851748 diff --git a/model-00050-of-00085.safetensors b/model-00050-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8b1071eda838af69fe86eb930ebc71630ddb68d6 --- /dev/null +++ b/model-00050-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0c273d0e15898bc91bccf31c0a704462a01ea1d58ce4509c1e4ef9c00098647 +size 4920054303 diff --git a/model-00051-of-00085.safetensors b/model-00051-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6fd80c5a097cad48a0b2f1f568d887b6c8d84e9d --- /dev/null +++ b/model-00051-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59a6c2d97adddf83ce2b89ccbedb8a752f49b8799b3877a7f944ce3854f98df1 +size 5240851754 diff --git a/model-00052-of-00085.safetensors b/model-00052-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9201f54070a9076a4cbf274312a582d843ef697c --- /dev/null +++ b/model-00052-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c952d8c86e8802cac723932c91d4c300416adf8ca1be55e70d21d0a9693ed2ab +size 4920054295 diff --git a/model-00053-of-00085.safetensors b/model-00053-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..80b040888b856d245e50bd481266be1329291d96 --- /dev/null +++ b/model-00053-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6deba8f3895decbde8365775c277d8538c58634489dff6b89ec597dd393e5d57 +size 5240851742 diff --git a/model-00054-of-00085.safetensors b/model-00054-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7eec1f2d3c1f8915e78e759fdc9b6df9dd53c0e6 --- /dev/null +++ b/model-00054-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab41d5f101ab6fa6302775ce0ca552d7f9bbe21dd701564d6c4f2a5a7f456923 +size 4920054301 diff --git a/model-00055-of-00085.safetensors b/model-00055-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e7eb8f326c9cac0abe833451a31353163830da30 --- /dev/null +++ b/model-00055-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:652db4430c80be047d83b55eb4e48e63ae41deb32d24cd12f85e2e2ec5393373 +size 5240851738 diff --git a/model-00056-of-00085.safetensors b/model-00056-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d92509bf970500f73aa45092d9fac242b5262ace --- /dev/null +++ b/model-00056-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a44d82af2734f2dee70566f1e27e379d4be7b0a172c0f3b372c885666dd023f1 +size 4920054305 diff --git a/model-00057-of-00085.safetensors b/model-00057-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e4ddfd33410c95e0c9ea3e2ddcf62fe5e56a3cdf --- /dev/null +++ b/model-00057-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d81cbc0598ecdf6f40e473c0e7232d10f26801cc08f5053ff9dc9d2031c310d +size 5240851742 diff --git a/model-00058-of-00085.safetensors b/model-00058-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c3eeab50af52f848ceead9ecd5cbbc606fb589c2 --- /dev/null +++ b/model-00058-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2448963838ee33fb856fbcc9f1cdb3df57f3628e0f816bcdf7cd4c1d72a56a52 +size 4920054293 diff --git a/model-00059-of-00085.safetensors b/model-00059-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2b2ce88f2bf981d288a03ff2ec546f5102872da9 --- /dev/null +++ b/model-00059-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1f1cdafbde84ccc65288774e850c62e505542bd956268da1660e72519b45be85 +size 5240851746 diff --git a/model-00060-of-00085.safetensors b/model-00060-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2cc8848d1f2a08d27a73c7ae5192a0dffb5b276a --- /dev/null +++ b/model-00060-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a7384881bd9437b8cb30ee4ac6897ce4a16e8da130f446f680058de1c92838c +size 4920054309 diff --git a/model-00061-of-00085.safetensors b/model-00061-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4592b09d9c5255aeb4e93c4646798281f552d422 --- /dev/null +++ b/model-00061-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c63d3b83c0ac4692c705411dfae36f7b6d9a64efd899bdffa70e5110becb15f +size 5240851752 diff --git a/model-00062-of-00085.safetensors b/model-00062-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9d73050841b06ba60189146315eab4b54da753b7 --- /dev/null +++ b/model-00062-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:355213dcbe73231107667d006abdd0e4ea44f70a07884d30defc57b5a646b4e9 +size 4920054303 diff --git a/model-00063-of-00085.safetensors b/model-00063-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f44daabd54c4c4813092565337572680eae67766 --- /dev/null +++ b/model-00063-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e73e9d47f03b4b7809a48bc4e58f35bc8b2cca5c36f81c2af697d1447f6fd10 +size 5240851744 diff --git a/model-00064-of-00085.safetensors b/model-00064-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fc13a9dd585c82a4c7442025450f2184f32386d8 --- /dev/null +++ b/model-00064-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8425126e14c1c2b6980d14a0771288da3992fa088c21ed5e0c3c58d60206e1a8 +size 4920054281 diff --git a/model-00065-of-00085.safetensors b/model-00065-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..86c2b7c7d5eacff78654351d806036ceb5b931ae --- /dev/null +++ b/model-00065-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8389103d1947191055231b3b76f8e732150f0024e4e5ee5f641255ca2ab574a +size 5240851746 diff --git a/model-00066-of-00085.safetensors b/model-00066-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c7e596728fdb2a9b613f2109d23d1d5120ac13b8 --- /dev/null +++ b/model-00066-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96eba38c87e91fbbd66b1d5744778da567888f4285213a3ccb0f876ece39c5ea +size 4920054299 diff --git a/model-00067-of-00085.safetensors b/model-00067-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b3dd2daa478f6a5386905b72316687c5820822ab --- /dev/null +++ b/model-00067-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f930762c5517575c7eefb32d9167310399fe8ec29671c0d5c1d217f15e289ca +size 5240851750 diff --git a/model-00068-of-00085.safetensors b/model-00068-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6d00fe6d9652f4a5fae8af6ba099a4cb21f76d16 --- /dev/null +++ b/model-00068-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ef733f5b93d5f776f65ca865a3112a62f2e6838b73825252e533777413324fd +size 4920054322 diff --git a/model-00069-of-00085.safetensors b/model-00069-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..85a722ef64bf5de919d2b2b5ad4b39a20bce7728 --- /dev/null +++ b/model-00069-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:acb6161655f07fcff36de9e74d82809816469c41758d10ca9a57b3d4a6b7249c +size 5240851781 diff --git a/model-00070-of-00085.safetensors b/model-00070-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d01b6aec7910050e3dfdd0d5d8e69d5128e14f77 --- /dev/null +++ b/model-00070-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b12334080ff5a4f5cadc71257ad3d6dce07fab18991301bc4e71b32b3feae57 +size 4920054343 diff --git a/model-00071-of-00085.safetensors b/model-00071-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d136463b2fa1cb4026c0231b4fedb50decf9f49a --- /dev/null +++ b/model-00071-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b308cd557a9a70ab9ad2d08f27c84d5a19badcfd15d10b74f4fba50252baf05 +size 5240851777 diff --git a/model-00072-of-00085.safetensors b/model-00072-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..09d457555136afa047e3eae4b9089a1d78cb0fb1 --- /dev/null +++ b/model-00072-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a82607794189d23b674637d7d987a6dddbacfded473c83a698b285e198c3557 +size 4920054347 diff --git a/model-00073-of-00085.safetensors b/model-00073-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5487b6f2063c67f35fe1b204e056d7352c437bfa --- /dev/null +++ b/model-00073-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec78badf70cf010c2a51e485b2c40d61fa0ab9891c58fdda801e9b438e959498 +size 5240851761 diff --git a/model-00074-of-00085.safetensors b/model-00074-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c1bc7bb5f4b6563ba83a76380227d066e3cfc9f2 --- /dev/null +++ b/model-00074-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f60484a70feac92c9ffd19d0c388de19d8e7b016ceef9f27fd2dc872c4f7be42 +size 4920054315 diff --git a/model-00075-of-00085.safetensors b/model-00075-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fe1d670068b9ba0f5bb392a78622afeb1bb32ad4 --- /dev/null +++ b/model-00075-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a4ce2319edfc9a63ba3566a2ccd07768d5d5d201fc2eda3e38b3a3388c6c4303 +size 5240851781 diff --git a/model-00076-of-00085.safetensors b/model-00076-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c36b53edcc8ef695ef2503c9de1b5cf277bdcf43 --- /dev/null +++ b/model-00076-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b063db327ff374809ac0a53f7f6c0ec66ad5a265e1e87bab7872b0979609661 +size 4920054329 diff --git a/model-00077-of-00085.safetensors b/model-00077-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..43d509aa61cce2075572df4712db86994e599942 --- /dev/null +++ b/model-00077-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78cd7c63c4920c7aabb9c2b207316dcba3332e5cf5a7c2717e017228fc58673c +size 5240851783 diff --git a/model-00078-of-00085.safetensors b/model-00078-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e64a6e6367c0f2aceb96196c2b2fd455b0f553a5 --- /dev/null +++ b/model-00078-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85e2fc9a470e10cbeb7e55159e1e94bffbdad73ae256d02936951765bcccdb3a +size 4920054345 diff --git a/model-00079-of-00085.safetensors b/model-00079-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9ace0609cba60053d2bf6e7a895972b056cd80e6 --- /dev/null +++ b/model-00079-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abfd1a999c34811b943f979e8caae01e0d1f658470c1217b1ac5348d1b0b5677 +size 5240851779 diff --git a/model-00080-of-00085.safetensors b/model-00080-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..75dde665968c2c7ff3b107e8582ef518271f55b2 --- /dev/null +++ b/model-00080-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7eb226ae26e14b81e453f6ad1cdce40e9d60d7dc02d05f19378c5cf1d0f99d23 +size 4920054343 diff --git a/model-00081-of-00085.safetensors b/model-00081-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2826974db1f0179502762cf94a94042c67d25d4e --- /dev/null +++ b/model-00081-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c5c88416abfea8874cac2810966561af02213d738105b163bfa06d9a1331746 +size 5240851785 diff --git a/model-00082-of-00085.safetensors b/model-00082-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ee91f09b646e4eb19f0f4af462c19c48ac765a9a --- /dev/null +++ b/model-00082-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:053b2c5c496b85f1e9b8bf851abe38e35a7a966d2ba6467be55741b5b8b3bba7 +size 4920054343 diff --git a/model-00083-of-00085.safetensors b/model-00083-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..78f807ced6f59f216b219583abc766dc66e7eb73 --- /dev/null +++ b/model-00083-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e19fcfb4a7e4313ad31752cd211fa00bbf0c1839f4b600f425def9a88823fb9 +size 5240851787 diff --git a/model-00084-of-00085.safetensors b/model-00084-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..216b18f2a5ac53f0d7787f101b3f243363b9be73 --- /dev/null +++ b/model-00084-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1898c91a52d805c4ac9d244116ffa474a4c7c8a9600f2319a5df9c261256e7d0 +size 4920054335 diff --git a/model-00085-of-00085.safetensors b/model-00085-of-00085.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4f7c4fc4d887136d5fe48b9db09e982f6b0f44c7 --- /dev/null +++ b/model-00085-of-00085.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b971d608633e169fd60e8a51aacd5e610601005e8a7156abd3706e22ad6f90d +size 5013603971 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..54a91ff5db126d52a8298a71288f6f6cee5ae0ec --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,2912 @@ +{ + "metadata": { + "total_size": 431223111680 + }, + "weight_map": { + "lm_head.biases": "model-00085-of-00085.safetensors", + "lm_head.scales": "model-00085-of-00085.safetensors", + "lm_head.weight": "model-00085-of-00085.safetensors", + "model.embed_tokens.biases": "model-00001-of-00085.safetensors", + "model.embed_tokens.scales": "model-00001-of-00085.safetensors", + "model.embed_tokens.weight": "model-00001-of-00085.safetensors", + "model.layers.0.input_layernorm.weight": "model-00002-of-00085.safetensors", + "model.layers.0.mlp.down_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.down_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.gate_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.gate_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.0.mlp.up_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.0.mlp.up_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00085.safetensors", + "model.layers.0.self_attn.k_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.k_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.o_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.o_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.q_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.q_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.v_proj.biases": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.v_proj.scales": "model-00001-of-00085.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00085.safetensors", + "model.layers.1.input_layernorm.weight": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.down_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.down_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.gate_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.gate_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.up_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.up_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.k_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.k_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.o_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.o_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.q_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.q_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.v_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.v_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.10.input_layernorm.weight": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.down_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.down_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.gate_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.gate_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.up_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.up_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.k_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.k_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.o_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.o_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.q_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.q_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.v_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.v_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.100.input_layernorm.weight": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.down_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.down_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.down_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.gate_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.gate_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.gate_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.up_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.up_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.mlp.up_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.post_attention_layernorm.weight": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.k_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.k_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.k_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.o_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.o_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.o_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.q_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.q_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.q_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.v_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.v_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.100.self_attn.v_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.101.input_layernorm.weight": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.down_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.down_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.down_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.gate_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.gate_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.gate_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.up_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.up_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.101.mlp.up_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.101.post_attention_layernorm.weight": "model-00069-of-00085.safetensors", + "model.layers.101.self_attn.k_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.k_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.k_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.o_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.o_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.o_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.q_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.q_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.q_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.v_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.v_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.101.self_attn.v_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.102.input_layernorm.weight": "model-00070-of-00085.safetensors", + "model.layers.102.mlp.down_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.down_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.down_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.gate_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.gate_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.gate_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.102.mlp.up_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.102.mlp.up_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.102.mlp.up_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.102.post_attention_layernorm.weight": "model-00070-of-00085.safetensors", + "model.layers.102.self_attn.k_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.k_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.k_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.o_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.o_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.o_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.q_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.q_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.q_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.v_proj.biases": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.v_proj.scales": "model-00069-of-00085.safetensors", + "model.layers.102.self_attn.v_proj.weight": "model-00069-of-00085.safetensors", + "model.layers.103.input_layernorm.weight": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.down_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.down_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.down_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.gate_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.gate_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.gate_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.up_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.up_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.mlp.up_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.post_attention_layernorm.weight": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.k_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.k_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.k_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.o_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.o_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.o_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.q_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.q_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.q_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.v_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.v_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.103.self_attn.v_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.104.input_layernorm.weight": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.down_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.down_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.down_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.gate_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.gate_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.gate_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.up_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.up_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.104.mlp.up_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.104.post_attention_layernorm.weight": "model-00071-of-00085.safetensors", + "model.layers.104.self_attn.k_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.k_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.k_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.o_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.o_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.o_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.q_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.q_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.q_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.v_proj.biases": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.v_proj.scales": "model-00070-of-00085.safetensors", + "model.layers.104.self_attn.v_proj.weight": "model-00070-of-00085.safetensors", + "model.layers.105.input_layernorm.weight": "model-00072-of-00085.safetensors", + "model.layers.105.mlp.down_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.down_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.down_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.gate_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.gate_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.gate_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.105.mlp.up_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.105.mlp.up_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.105.mlp.up_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.105.post_attention_layernorm.weight": "model-00072-of-00085.safetensors", + "model.layers.105.self_attn.k_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.k_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.k_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.o_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.o_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.o_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.q_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.q_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.q_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.v_proj.biases": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.v_proj.scales": "model-00071-of-00085.safetensors", + "model.layers.105.self_attn.v_proj.weight": "model-00071-of-00085.safetensors", + "model.layers.106.input_layernorm.weight": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.down_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.down_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.down_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.gate_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.gate_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.gate_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.up_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.up_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.mlp.up_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.post_attention_layernorm.weight": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.k_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.k_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.k_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.o_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.o_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.o_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.q_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.q_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.q_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.v_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.v_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.106.self_attn.v_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.107.input_layernorm.weight": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.down_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.down_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.down_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.gate_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.gate_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.gate_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.up_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.up_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.107.mlp.up_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.107.post_attention_layernorm.weight": "model-00073-of-00085.safetensors", + "model.layers.107.self_attn.k_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.k_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.k_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.o_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.o_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.o_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.q_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.q_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.q_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.v_proj.biases": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.v_proj.scales": "model-00072-of-00085.safetensors", + "model.layers.107.self_attn.v_proj.weight": "model-00072-of-00085.safetensors", + "model.layers.108.input_layernorm.weight": "model-00074-of-00085.safetensors", + "model.layers.108.mlp.down_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.down_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.down_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.gate_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.gate_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.gate_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.108.mlp.up_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.108.mlp.up_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.108.mlp.up_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.108.post_attention_layernorm.weight": "model-00074-of-00085.safetensors", + "model.layers.108.self_attn.k_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.k_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.k_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.o_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.o_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.o_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.q_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.q_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.q_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.v_proj.biases": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.v_proj.scales": "model-00073-of-00085.safetensors", + "model.layers.108.self_attn.v_proj.weight": "model-00073-of-00085.safetensors", + "model.layers.109.input_layernorm.weight": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.down_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.down_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.down_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.gate_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.gate_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.gate_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.up_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.up_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.mlp.up_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.post_attention_layernorm.weight": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.k_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.k_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.k_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.o_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.o_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.o_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.q_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.q_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.q_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.v_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.v_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.109.self_attn.v_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.11.input_layernorm.weight": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.down_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.down_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.gate_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.gate_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.up_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.up_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00009-of-00085.safetensors", + "model.layers.11.self_attn.k_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.k_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.o_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.o_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.q_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.q_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.v_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.v_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.110.input_layernorm.weight": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.down_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.down_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.down_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.gate_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.gate_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.gate_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.up_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.up_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.110.mlp.up_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.110.post_attention_layernorm.weight": "model-00075-of-00085.safetensors", + "model.layers.110.self_attn.k_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.k_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.k_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.o_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.o_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.o_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.q_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.q_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.q_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.v_proj.biases": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.v_proj.scales": "model-00074-of-00085.safetensors", + "model.layers.110.self_attn.v_proj.weight": "model-00074-of-00085.safetensors", + "model.layers.111.input_layernorm.weight": "model-00076-of-00085.safetensors", + "model.layers.111.mlp.down_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.down_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.down_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.gate_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.gate_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.gate_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.111.mlp.up_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.111.mlp.up_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.111.mlp.up_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.111.post_attention_layernorm.weight": "model-00076-of-00085.safetensors", + "model.layers.111.self_attn.k_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.k_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.k_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.o_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.o_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.o_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.q_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.q_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.q_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.v_proj.biases": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.v_proj.scales": "model-00075-of-00085.safetensors", + "model.layers.111.self_attn.v_proj.weight": "model-00075-of-00085.safetensors", + "model.layers.112.input_layernorm.weight": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.down_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.down_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.down_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.gate_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.gate_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.gate_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.up_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.up_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.mlp.up_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.post_attention_layernorm.weight": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.k_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.k_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.k_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.o_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.o_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.o_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.q_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.q_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.q_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.v_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.v_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.112.self_attn.v_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.113.input_layernorm.weight": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.down_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.down_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.down_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.gate_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.gate_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.gate_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.up_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.up_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.113.mlp.up_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.113.post_attention_layernorm.weight": "model-00077-of-00085.safetensors", + "model.layers.113.self_attn.k_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.k_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.k_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.o_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.o_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.o_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.q_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.q_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.q_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.v_proj.biases": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.v_proj.scales": "model-00076-of-00085.safetensors", + "model.layers.113.self_attn.v_proj.weight": "model-00076-of-00085.safetensors", + "model.layers.114.input_layernorm.weight": "model-00078-of-00085.safetensors", + "model.layers.114.mlp.down_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.down_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.down_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.gate_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.gate_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.gate_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.114.mlp.up_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.114.mlp.up_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.114.mlp.up_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.114.post_attention_layernorm.weight": "model-00078-of-00085.safetensors", + "model.layers.114.self_attn.k_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.k_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.k_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.o_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.o_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.o_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.q_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.q_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.q_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.v_proj.biases": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.v_proj.scales": "model-00077-of-00085.safetensors", + "model.layers.114.self_attn.v_proj.weight": "model-00077-of-00085.safetensors", + "model.layers.115.input_layernorm.weight": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.down_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.down_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.down_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.gate_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.gate_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.gate_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.up_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.up_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.mlp.up_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.post_attention_layernorm.weight": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.k_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.k_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.k_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.o_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.o_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.o_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.q_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.q_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.q_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.v_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.v_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.115.self_attn.v_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.116.input_layernorm.weight": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.down_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.down_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.down_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.gate_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.gate_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.gate_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.up_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.up_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.116.mlp.up_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.116.post_attention_layernorm.weight": "model-00079-of-00085.safetensors", + "model.layers.116.self_attn.k_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.k_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.k_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.o_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.o_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.o_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.q_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.q_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.q_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.v_proj.biases": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.v_proj.scales": "model-00078-of-00085.safetensors", + "model.layers.116.self_attn.v_proj.weight": "model-00078-of-00085.safetensors", + "model.layers.117.input_layernorm.weight": "model-00080-of-00085.safetensors", + "model.layers.117.mlp.down_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.down_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.down_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.gate_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.gate_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.gate_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.117.mlp.up_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.117.mlp.up_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.117.mlp.up_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.117.post_attention_layernorm.weight": "model-00080-of-00085.safetensors", + "model.layers.117.self_attn.k_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.k_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.k_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.o_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.o_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.o_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.q_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.q_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.q_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.v_proj.biases": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.v_proj.scales": "model-00079-of-00085.safetensors", + "model.layers.117.self_attn.v_proj.weight": "model-00079-of-00085.safetensors", + "model.layers.118.input_layernorm.weight": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.down_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.down_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.down_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.gate_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.gate_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.gate_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.up_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.up_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.mlp.up_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.post_attention_layernorm.weight": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.k_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.k_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.k_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.o_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.o_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.o_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.q_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.q_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.q_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.v_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.v_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.118.self_attn.v_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.119.input_layernorm.weight": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.down_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.down_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.down_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.gate_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.gate_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.gate_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.up_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.up_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.119.mlp.up_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.119.post_attention_layernorm.weight": "model-00081-of-00085.safetensors", + "model.layers.119.self_attn.k_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.k_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.k_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.o_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.o_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.o_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.q_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.q_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.q_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.v_proj.biases": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.v_proj.scales": "model-00080-of-00085.safetensors", + "model.layers.119.self_attn.v_proj.weight": "model-00080-of-00085.safetensors", + "model.layers.12.input_layernorm.weight": "model-00010-of-00085.safetensors", + "model.layers.12.mlp.down_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.down_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.gate_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.gate_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.12.mlp.up_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.12.mlp.up_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00010-of-00085.safetensors", + "model.layers.12.self_attn.k_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.k_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.o_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.o_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.q_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.q_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.v_proj.biases": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.v_proj.scales": "model-00009-of-00085.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00009-of-00085.safetensors", + "model.layers.120.input_layernorm.weight": "model-00082-of-00085.safetensors", + "model.layers.120.mlp.down_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.down_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.down_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.gate_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.gate_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.gate_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.120.mlp.up_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.120.mlp.up_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.120.mlp.up_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.120.post_attention_layernorm.weight": "model-00082-of-00085.safetensors", + "model.layers.120.self_attn.k_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.k_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.k_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.o_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.o_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.o_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.q_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.q_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.q_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.v_proj.biases": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.v_proj.scales": "model-00081-of-00085.safetensors", + "model.layers.120.self_attn.v_proj.weight": "model-00081-of-00085.safetensors", + "model.layers.121.input_layernorm.weight": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.down_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.down_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.down_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.gate_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.gate_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.gate_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.up_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.up_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.mlp.up_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.post_attention_layernorm.weight": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.k_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.k_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.k_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.o_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.o_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.o_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.q_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.q_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.q_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.v_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.v_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.121.self_attn.v_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.122.input_layernorm.weight": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.down_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.down_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.down_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.gate_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.gate_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.gate_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.up_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.up_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.122.mlp.up_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.122.post_attention_layernorm.weight": "model-00083-of-00085.safetensors", + "model.layers.122.self_attn.k_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.k_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.k_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.o_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.o_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.o_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.q_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.q_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.q_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.v_proj.biases": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.v_proj.scales": "model-00082-of-00085.safetensors", + "model.layers.122.self_attn.v_proj.weight": "model-00082-of-00085.safetensors", + "model.layers.123.input_layernorm.weight": "model-00084-of-00085.safetensors", + "model.layers.123.mlp.down_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.down_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.down_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.gate_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.gate_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.gate_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.123.mlp.up_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.123.mlp.up_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.123.mlp.up_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.123.post_attention_layernorm.weight": "model-00084-of-00085.safetensors", + "model.layers.123.self_attn.k_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.k_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.k_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.o_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.o_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.o_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.q_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.q_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.q_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.v_proj.biases": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.v_proj.scales": "model-00083-of-00085.safetensors", + "model.layers.123.self_attn.v_proj.weight": "model-00083-of-00085.safetensors", + "model.layers.124.input_layernorm.weight": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.down_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.down_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.down_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.gate_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.gate_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.gate_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.up_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.up_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.mlp.up_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.post_attention_layernorm.weight": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.k_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.k_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.k_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.o_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.o_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.o_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.q_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.q_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.q_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.v_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.v_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.124.self_attn.v_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.125.input_layernorm.weight": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.down_proj.biases": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.down_proj.scales": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.down_proj.weight": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.gate_proj.biases": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.gate_proj.scales": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.gate_proj.weight": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.up_proj.biases": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.up_proj.scales": "model-00085-of-00085.safetensors", + "model.layers.125.mlp.up_proj.weight": "model-00085-of-00085.safetensors", + "model.layers.125.post_attention_layernorm.weight": "model-00085-of-00085.safetensors", + "model.layers.125.self_attn.k_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.k_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.k_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.o_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.o_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.o_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.q_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.q_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.q_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.v_proj.biases": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.v_proj.scales": "model-00084-of-00085.safetensors", + "model.layers.125.self_attn.v_proj.weight": "model-00084-of-00085.safetensors", + "model.layers.13.input_layernorm.weight": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.down_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.down_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.gate_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.gate_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.up_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.up_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.k_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.k_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.o_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.o_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.q_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.q_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.v_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.v_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.14.input_layernorm.weight": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.down_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.down_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.gate_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.gate_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.up_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.up_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00011-of-00085.safetensors", + "model.layers.14.self_attn.k_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.k_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.o_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.o_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.q_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.q_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.v_proj.biases": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.v_proj.scales": "model-00010-of-00085.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00010-of-00085.safetensors", + "model.layers.15.input_layernorm.weight": "model-00012-of-00085.safetensors", + "model.layers.15.mlp.down_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.down_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.gate_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.gate_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.15.mlp.up_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.15.mlp.up_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00012-of-00085.safetensors", + "model.layers.15.self_attn.k_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.k_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.o_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.o_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.q_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.q_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.v_proj.biases": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.v_proj.scales": "model-00011-of-00085.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00011-of-00085.safetensors", + "model.layers.16.input_layernorm.weight": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.down_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.down_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.gate_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.gate_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.up_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.up_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.k_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.k_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.o_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.o_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.q_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.q_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.v_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.v_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.17.input_layernorm.weight": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.down_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.down_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.gate_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.gate_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.up_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.up_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00013-of-00085.safetensors", + "model.layers.17.self_attn.k_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.k_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.o_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.o_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.q_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.q_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.v_proj.biases": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.v_proj.scales": "model-00012-of-00085.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00012-of-00085.safetensors", + "model.layers.18.input_layernorm.weight": "model-00014-of-00085.safetensors", + "model.layers.18.mlp.down_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.down_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.gate_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.gate_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.18.mlp.up_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.18.mlp.up_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00014-of-00085.safetensors", + "model.layers.18.self_attn.k_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.k_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.o_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.o_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.q_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.q_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.v_proj.biases": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.v_proj.scales": "model-00013-of-00085.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00013-of-00085.safetensors", + "model.layers.19.input_layernorm.weight": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.down_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.down_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.gate_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.gate_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.up_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.up_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.k_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.k_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.o_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.o_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.q_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.q_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.v_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.v_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.2.input_layernorm.weight": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.down_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.down_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.gate_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.gate_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.up_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.up_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00085.safetensors", + "model.layers.2.self_attn.k_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.k_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.o_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.o_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.q_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.q_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.v_proj.biases": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.v_proj.scales": "model-00002-of-00085.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00085.safetensors", + "model.layers.20.input_layernorm.weight": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.down_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.down_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.gate_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.gate_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.up_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.up_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00015-of-00085.safetensors", + "model.layers.20.self_attn.k_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.k_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.o_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.o_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.q_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.q_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.v_proj.biases": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.v_proj.scales": "model-00014-of-00085.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00014-of-00085.safetensors", + "model.layers.21.input_layernorm.weight": "model-00016-of-00085.safetensors", + "model.layers.21.mlp.down_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.down_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.gate_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.gate_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.21.mlp.up_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.21.mlp.up_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00016-of-00085.safetensors", + "model.layers.21.self_attn.k_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.k_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.o_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.o_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.q_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.q_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.v_proj.biases": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.v_proj.scales": "model-00015-of-00085.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00015-of-00085.safetensors", + "model.layers.22.input_layernorm.weight": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.down_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.down_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.gate_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.gate_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.up_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.up_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.k_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.k_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.o_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.o_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.q_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.q_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.v_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.v_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.23.input_layernorm.weight": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.down_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.down_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.gate_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.gate_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.up_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.up_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00017-of-00085.safetensors", + "model.layers.23.self_attn.k_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.k_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.o_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.o_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.q_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.q_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.v_proj.biases": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.v_proj.scales": "model-00016-of-00085.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00016-of-00085.safetensors", + "model.layers.24.input_layernorm.weight": "model-00018-of-00085.safetensors", + "model.layers.24.mlp.down_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.down_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.gate_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.gate_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.24.mlp.up_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.24.mlp.up_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00018-of-00085.safetensors", + "model.layers.24.self_attn.k_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.k_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.o_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.o_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.q_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.q_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.v_proj.biases": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.v_proj.scales": "model-00017-of-00085.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00017-of-00085.safetensors", + "model.layers.25.input_layernorm.weight": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.down_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.down_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.gate_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.gate_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.up_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.up_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.k_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.k_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.o_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.o_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.q_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.q_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.v_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.v_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.26.input_layernorm.weight": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.down_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.down_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.gate_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.gate_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.up_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.up_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00019-of-00085.safetensors", + "model.layers.26.self_attn.k_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.k_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.o_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.o_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.q_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.q_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.v_proj.biases": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.v_proj.scales": "model-00018-of-00085.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00018-of-00085.safetensors", + "model.layers.27.input_layernorm.weight": "model-00020-of-00085.safetensors", + "model.layers.27.mlp.down_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.down_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.gate_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.gate_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.27.mlp.up_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.27.mlp.up_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00020-of-00085.safetensors", + "model.layers.27.self_attn.k_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.k_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.o_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.o_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.q_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.q_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.v_proj.biases": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.v_proj.scales": "model-00019-of-00085.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00019-of-00085.safetensors", + "model.layers.28.input_layernorm.weight": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.down_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.down_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.gate_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.gate_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.up_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.up_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.k_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.k_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.o_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.o_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.q_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.q_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.v_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.v_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.29.input_layernorm.weight": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.down_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.down_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.gate_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.gate_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.up_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.up_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00021-of-00085.safetensors", + "model.layers.29.self_attn.k_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.k_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.o_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.o_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.q_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.q_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.v_proj.biases": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.v_proj.scales": "model-00020-of-00085.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00020-of-00085.safetensors", + "model.layers.3.input_layernorm.weight": "model-00004-of-00085.safetensors", + "model.layers.3.mlp.down_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.down_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.gate_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.gate_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.3.mlp.up_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.3.mlp.up_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00085.safetensors", + "model.layers.3.self_attn.k_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.k_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.o_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.o_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.q_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.q_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.v_proj.biases": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.v_proj.scales": "model-00003-of-00085.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00003-of-00085.safetensors", + "model.layers.30.input_layernorm.weight": "model-00022-of-00085.safetensors", + "model.layers.30.mlp.down_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.down_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.gate_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.gate_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.30.mlp.up_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.30.mlp.up_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00022-of-00085.safetensors", + "model.layers.30.self_attn.k_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.k_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.o_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.o_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.q_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.q_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.v_proj.biases": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.v_proj.scales": "model-00021-of-00085.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00021-of-00085.safetensors", + "model.layers.31.input_layernorm.weight": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.down_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.down_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.gate_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.gate_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.up_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.up_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.k_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.k_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.o_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.o_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.q_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.q_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.v_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.v_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.32.input_layernorm.weight": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.down_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.down_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.gate_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.gate_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.up_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.up_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00023-of-00085.safetensors", + "model.layers.32.self_attn.k_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.k_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.o_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.o_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.q_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.q_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.v_proj.biases": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.v_proj.scales": "model-00022-of-00085.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00022-of-00085.safetensors", + "model.layers.33.input_layernorm.weight": "model-00024-of-00085.safetensors", + "model.layers.33.mlp.down_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.down_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.gate_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.gate_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.33.mlp.up_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.33.mlp.up_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00024-of-00085.safetensors", + "model.layers.33.self_attn.k_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.k_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.o_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.o_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.q_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.q_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.v_proj.biases": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.v_proj.scales": "model-00023-of-00085.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00023-of-00085.safetensors", + "model.layers.34.input_layernorm.weight": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.down_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.down_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.gate_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.gate_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.up_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.up_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.k_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.k_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.o_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.o_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.q_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.q_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.v_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.v_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.35.input_layernorm.weight": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.down_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.down_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.gate_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.gate_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.up_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.up_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00025-of-00085.safetensors", + "model.layers.35.self_attn.k_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.k_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.o_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.o_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.q_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.q_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.v_proj.biases": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.v_proj.scales": "model-00024-of-00085.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00024-of-00085.safetensors", + "model.layers.36.input_layernorm.weight": "model-00026-of-00085.safetensors", + "model.layers.36.mlp.down_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.down_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.gate_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.gate_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.36.mlp.up_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.36.mlp.up_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00026-of-00085.safetensors", + "model.layers.36.self_attn.k_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.k_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.o_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.o_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.q_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.q_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.v_proj.biases": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.v_proj.scales": "model-00025-of-00085.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00025-of-00085.safetensors", + "model.layers.37.input_layernorm.weight": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.down_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.down_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.gate_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.gate_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.up_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.up_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.k_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.k_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.o_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.o_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.q_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.q_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.v_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.v_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.38.input_layernorm.weight": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.down_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.down_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.gate_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.gate_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.up_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.up_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00027-of-00085.safetensors", + "model.layers.38.self_attn.k_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.k_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.o_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.o_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.q_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.q_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.v_proj.biases": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.v_proj.scales": "model-00026-of-00085.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00026-of-00085.safetensors", + "model.layers.39.input_layernorm.weight": "model-00028-of-00085.safetensors", + "model.layers.39.mlp.down_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.down_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.gate_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.gate_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.39.mlp.up_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.39.mlp.up_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00028-of-00085.safetensors", + "model.layers.39.self_attn.k_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.k_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.o_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.o_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.q_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.q_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.v_proj.biases": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.v_proj.scales": "model-00027-of-00085.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00027-of-00085.safetensors", + "model.layers.4.input_layernorm.weight": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.down_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.down_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.gate_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.gate_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.up_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.up_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.k_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.k_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.o_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.o_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.q_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.q_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.v_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.v_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.40.input_layernorm.weight": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.down_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.down_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.gate_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.gate_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.up_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.up_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.k_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.k_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.o_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.o_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.q_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.q_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.v_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.v_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.41.input_layernorm.weight": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.down_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.down_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.gate_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.gate_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.up_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.up_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00029-of-00085.safetensors", + "model.layers.41.self_attn.k_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.k_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.o_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.o_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.q_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.q_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.v_proj.biases": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.v_proj.scales": "model-00028-of-00085.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00028-of-00085.safetensors", + "model.layers.42.input_layernorm.weight": "model-00030-of-00085.safetensors", + "model.layers.42.mlp.down_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.down_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.down_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.gate_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.gate_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.gate_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.42.mlp.up_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.42.mlp.up_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.42.mlp.up_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.42.post_attention_layernorm.weight": "model-00030-of-00085.safetensors", + "model.layers.42.self_attn.k_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.k_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.k_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.o_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.o_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.o_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.q_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.q_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.q_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.v_proj.biases": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.v_proj.scales": "model-00029-of-00085.safetensors", + "model.layers.42.self_attn.v_proj.weight": "model-00029-of-00085.safetensors", + "model.layers.43.input_layernorm.weight": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.down_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.down_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.down_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.gate_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.gate_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.gate_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.up_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.up_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.mlp.up_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.post_attention_layernorm.weight": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.k_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.k_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.k_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.o_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.o_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.o_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.q_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.q_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.q_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.v_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.v_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.43.self_attn.v_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.44.input_layernorm.weight": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.down_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.down_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.down_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.gate_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.gate_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.gate_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.up_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.up_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.44.mlp.up_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.44.post_attention_layernorm.weight": "model-00031-of-00085.safetensors", + "model.layers.44.self_attn.k_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.k_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.k_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.o_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.o_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.o_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.q_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.q_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.q_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.v_proj.biases": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.v_proj.scales": "model-00030-of-00085.safetensors", + "model.layers.44.self_attn.v_proj.weight": "model-00030-of-00085.safetensors", + "model.layers.45.input_layernorm.weight": "model-00032-of-00085.safetensors", + "model.layers.45.mlp.down_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.down_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.down_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.gate_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.gate_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.gate_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.45.mlp.up_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.45.mlp.up_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.45.mlp.up_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.45.post_attention_layernorm.weight": "model-00032-of-00085.safetensors", + "model.layers.45.self_attn.k_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.k_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.k_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.o_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.o_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.o_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.q_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.q_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.q_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.v_proj.biases": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.v_proj.scales": "model-00031-of-00085.safetensors", + "model.layers.45.self_attn.v_proj.weight": "model-00031-of-00085.safetensors", + "model.layers.46.input_layernorm.weight": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.down_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.down_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.down_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.gate_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.gate_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.gate_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.up_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.up_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.mlp.up_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.post_attention_layernorm.weight": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.k_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.k_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.k_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.o_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.o_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.o_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.q_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.q_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.q_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.v_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.v_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.46.self_attn.v_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.47.input_layernorm.weight": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.down_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.down_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.down_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.gate_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.gate_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.gate_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.up_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.up_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.47.mlp.up_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.47.post_attention_layernorm.weight": "model-00033-of-00085.safetensors", + "model.layers.47.self_attn.k_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.k_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.k_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.o_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.o_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.o_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.q_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.q_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.q_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.v_proj.biases": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.v_proj.scales": "model-00032-of-00085.safetensors", + "model.layers.47.self_attn.v_proj.weight": "model-00032-of-00085.safetensors", + "model.layers.48.input_layernorm.weight": "model-00034-of-00085.safetensors", + "model.layers.48.mlp.down_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.down_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.down_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.gate_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.gate_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.gate_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.48.mlp.up_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.48.mlp.up_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.48.mlp.up_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.48.post_attention_layernorm.weight": "model-00034-of-00085.safetensors", + "model.layers.48.self_attn.k_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.k_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.k_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.o_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.o_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.o_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.q_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.q_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.q_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.v_proj.biases": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.v_proj.scales": "model-00033-of-00085.safetensors", + "model.layers.48.self_attn.v_proj.weight": "model-00033-of-00085.safetensors", + "model.layers.49.input_layernorm.weight": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.down_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.down_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.down_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.gate_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.gate_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.gate_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.up_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.up_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.mlp.up_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.post_attention_layernorm.weight": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.k_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.k_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.k_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.o_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.o_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.o_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.q_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.q_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.q_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.v_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.v_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.49.self_attn.v_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.5.input_layernorm.weight": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.down_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.down_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.gate_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.gate_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.up_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.up_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00005-of-00085.safetensors", + "model.layers.5.self_attn.k_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.k_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.o_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.o_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.q_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.q_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.v_proj.biases": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.v_proj.scales": "model-00004-of-00085.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00004-of-00085.safetensors", + "model.layers.50.input_layernorm.weight": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.down_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.down_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.down_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.gate_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.gate_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.gate_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.up_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.up_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.50.mlp.up_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.50.post_attention_layernorm.weight": "model-00035-of-00085.safetensors", + "model.layers.50.self_attn.k_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.k_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.k_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.o_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.o_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.o_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.q_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.q_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.q_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.v_proj.biases": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.v_proj.scales": "model-00034-of-00085.safetensors", + "model.layers.50.self_attn.v_proj.weight": "model-00034-of-00085.safetensors", + "model.layers.51.input_layernorm.weight": "model-00036-of-00085.safetensors", + "model.layers.51.mlp.down_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.down_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.down_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.gate_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.gate_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.gate_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.51.mlp.up_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.51.mlp.up_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.51.mlp.up_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.51.post_attention_layernorm.weight": "model-00036-of-00085.safetensors", + "model.layers.51.self_attn.k_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.k_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.k_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.o_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.o_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.o_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.q_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.q_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.q_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.v_proj.biases": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.v_proj.scales": "model-00035-of-00085.safetensors", + "model.layers.51.self_attn.v_proj.weight": "model-00035-of-00085.safetensors", + "model.layers.52.input_layernorm.weight": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.down_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.down_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.down_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.gate_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.gate_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.gate_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.up_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.up_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.mlp.up_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.post_attention_layernorm.weight": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.k_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.k_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.k_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.o_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.o_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.o_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.q_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.q_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.q_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.v_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.v_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.52.self_attn.v_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.53.input_layernorm.weight": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.down_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.down_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.down_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.gate_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.gate_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.gate_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.up_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.up_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.53.mlp.up_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.53.post_attention_layernorm.weight": "model-00037-of-00085.safetensors", + "model.layers.53.self_attn.k_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.k_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.k_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.o_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.o_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.o_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.q_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.q_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.q_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.v_proj.biases": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.v_proj.scales": "model-00036-of-00085.safetensors", + "model.layers.53.self_attn.v_proj.weight": "model-00036-of-00085.safetensors", + "model.layers.54.input_layernorm.weight": "model-00038-of-00085.safetensors", + "model.layers.54.mlp.down_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.down_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.down_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.gate_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.gate_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.gate_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.54.mlp.up_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.54.mlp.up_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.54.mlp.up_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.54.post_attention_layernorm.weight": "model-00038-of-00085.safetensors", + "model.layers.54.self_attn.k_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.k_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.k_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.o_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.o_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.o_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.q_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.q_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.q_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.v_proj.biases": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.v_proj.scales": "model-00037-of-00085.safetensors", + "model.layers.54.self_attn.v_proj.weight": "model-00037-of-00085.safetensors", + "model.layers.55.input_layernorm.weight": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.down_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.down_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.down_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.gate_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.gate_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.gate_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.up_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.up_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.mlp.up_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.post_attention_layernorm.weight": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.k_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.k_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.k_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.o_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.o_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.o_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.q_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.q_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.q_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.v_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.v_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.55.self_attn.v_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.56.input_layernorm.weight": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.down_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.down_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.down_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.gate_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.gate_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.gate_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.up_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.up_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.56.mlp.up_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.56.post_attention_layernorm.weight": "model-00039-of-00085.safetensors", + "model.layers.56.self_attn.k_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.k_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.k_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.o_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.o_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.o_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.q_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.q_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.q_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.v_proj.biases": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.v_proj.scales": "model-00038-of-00085.safetensors", + "model.layers.56.self_attn.v_proj.weight": "model-00038-of-00085.safetensors", + "model.layers.57.input_layernorm.weight": "model-00040-of-00085.safetensors", + "model.layers.57.mlp.down_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.down_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.down_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.gate_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.gate_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.gate_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.57.mlp.up_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.57.mlp.up_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.57.mlp.up_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.57.post_attention_layernorm.weight": "model-00040-of-00085.safetensors", + "model.layers.57.self_attn.k_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.k_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.k_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.o_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.o_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.o_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.q_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.q_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.q_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.v_proj.biases": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.v_proj.scales": "model-00039-of-00085.safetensors", + "model.layers.57.self_attn.v_proj.weight": "model-00039-of-00085.safetensors", + "model.layers.58.input_layernorm.weight": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.down_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.down_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.down_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.gate_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.gate_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.gate_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.up_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.up_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.mlp.up_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.post_attention_layernorm.weight": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.k_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.k_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.k_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.o_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.o_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.o_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.q_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.q_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.q_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.v_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.v_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.58.self_attn.v_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.59.input_layernorm.weight": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.down_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.down_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.down_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.gate_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.gate_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.gate_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.up_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.up_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.59.mlp.up_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.59.post_attention_layernorm.weight": "model-00041-of-00085.safetensors", + "model.layers.59.self_attn.k_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.k_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.k_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.o_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.o_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.o_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.q_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.q_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.q_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.v_proj.biases": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.v_proj.scales": "model-00040-of-00085.safetensors", + "model.layers.59.self_attn.v_proj.weight": "model-00040-of-00085.safetensors", + "model.layers.6.input_layernorm.weight": "model-00006-of-00085.safetensors", + "model.layers.6.mlp.down_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.down_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.gate_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.gate_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.6.mlp.up_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.6.mlp.up_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00006-of-00085.safetensors", + "model.layers.6.self_attn.k_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.k_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.o_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.o_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.q_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.q_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.v_proj.biases": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.v_proj.scales": "model-00005-of-00085.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00005-of-00085.safetensors", + "model.layers.60.input_layernorm.weight": "model-00042-of-00085.safetensors", + "model.layers.60.mlp.down_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.down_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.down_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.gate_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.gate_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.gate_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.60.mlp.up_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.60.mlp.up_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.60.mlp.up_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.60.post_attention_layernorm.weight": "model-00042-of-00085.safetensors", + "model.layers.60.self_attn.k_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.k_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.k_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.o_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.o_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.o_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.q_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.q_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.q_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.v_proj.biases": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.v_proj.scales": "model-00041-of-00085.safetensors", + "model.layers.60.self_attn.v_proj.weight": "model-00041-of-00085.safetensors", + "model.layers.61.input_layernorm.weight": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.down_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.down_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.down_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.gate_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.gate_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.gate_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.up_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.up_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.mlp.up_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.post_attention_layernorm.weight": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.k_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.k_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.k_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.o_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.o_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.o_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.q_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.q_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.q_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.v_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.v_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.61.self_attn.v_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.62.input_layernorm.weight": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.down_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.down_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.down_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.gate_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.gate_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.gate_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.up_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.up_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.62.mlp.up_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.62.post_attention_layernorm.weight": "model-00043-of-00085.safetensors", + "model.layers.62.self_attn.k_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.k_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.k_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.o_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.o_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.o_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.q_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.q_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.q_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.v_proj.biases": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.v_proj.scales": "model-00042-of-00085.safetensors", + "model.layers.62.self_attn.v_proj.weight": "model-00042-of-00085.safetensors", + "model.layers.63.input_layernorm.weight": "model-00044-of-00085.safetensors", + "model.layers.63.mlp.down_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.down_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.down_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.gate_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.gate_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.gate_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.63.mlp.up_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.63.mlp.up_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.63.mlp.up_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.63.post_attention_layernorm.weight": "model-00044-of-00085.safetensors", + "model.layers.63.self_attn.k_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.k_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.k_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.o_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.o_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.o_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.q_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.q_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.q_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.v_proj.biases": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.v_proj.scales": "model-00043-of-00085.safetensors", + "model.layers.63.self_attn.v_proj.weight": "model-00043-of-00085.safetensors", + "model.layers.64.input_layernorm.weight": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.down_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.down_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.down_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.gate_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.gate_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.gate_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.up_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.up_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.mlp.up_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.post_attention_layernorm.weight": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.k_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.k_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.k_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.o_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.o_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.o_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.q_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.q_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.q_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.v_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.v_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.64.self_attn.v_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.65.input_layernorm.weight": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.down_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.down_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.down_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.gate_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.gate_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.gate_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.up_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.up_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.65.mlp.up_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.65.post_attention_layernorm.weight": "model-00045-of-00085.safetensors", + "model.layers.65.self_attn.k_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.k_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.k_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.o_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.o_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.o_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.q_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.q_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.q_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.v_proj.biases": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.v_proj.scales": "model-00044-of-00085.safetensors", + "model.layers.65.self_attn.v_proj.weight": "model-00044-of-00085.safetensors", + "model.layers.66.input_layernorm.weight": "model-00046-of-00085.safetensors", + "model.layers.66.mlp.down_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.down_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.down_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.gate_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.gate_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.gate_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.66.mlp.up_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.66.mlp.up_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.66.mlp.up_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.66.post_attention_layernorm.weight": "model-00046-of-00085.safetensors", + "model.layers.66.self_attn.k_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.k_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.k_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.o_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.o_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.o_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.q_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.q_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.q_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.v_proj.biases": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.v_proj.scales": "model-00045-of-00085.safetensors", + "model.layers.66.self_attn.v_proj.weight": "model-00045-of-00085.safetensors", + "model.layers.67.input_layernorm.weight": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.down_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.down_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.down_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.gate_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.gate_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.gate_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.up_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.up_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.mlp.up_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.post_attention_layernorm.weight": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.k_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.k_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.k_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.o_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.o_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.o_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.q_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.q_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.q_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.v_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.v_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.67.self_attn.v_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.68.input_layernorm.weight": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.down_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.down_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.down_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.gate_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.gate_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.gate_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.up_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.up_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.68.mlp.up_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.68.post_attention_layernorm.weight": "model-00047-of-00085.safetensors", + "model.layers.68.self_attn.k_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.k_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.k_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.o_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.o_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.o_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.q_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.q_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.q_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.v_proj.biases": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.v_proj.scales": "model-00046-of-00085.safetensors", + "model.layers.68.self_attn.v_proj.weight": "model-00046-of-00085.safetensors", + "model.layers.69.input_layernorm.weight": "model-00048-of-00085.safetensors", + "model.layers.69.mlp.down_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.down_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.down_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.gate_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.gate_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.gate_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.69.mlp.up_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.69.mlp.up_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.69.mlp.up_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.69.post_attention_layernorm.weight": "model-00048-of-00085.safetensors", + "model.layers.69.self_attn.k_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.k_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.k_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.o_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.o_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.o_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.q_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.q_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.q_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.v_proj.biases": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.v_proj.scales": "model-00047-of-00085.safetensors", + "model.layers.69.self_attn.v_proj.weight": "model-00047-of-00085.safetensors", + "model.layers.7.input_layernorm.weight": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.down_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.down_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.gate_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.gate_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.up_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.up_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.k_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.k_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.o_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.o_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.q_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.q_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.v_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.v_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.70.input_layernorm.weight": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.down_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.down_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.down_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.gate_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.gate_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.gate_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.up_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.up_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.mlp.up_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.post_attention_layernorm.weight": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.k_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.k_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.k_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.o_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.o_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.o_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.q_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.q_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.q_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.v_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.v_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.70.self_attn.v_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.71.input_layernorm.weight": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.down_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.down_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.down_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.gate_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.gate_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.gate_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.up_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.up_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.71.mlp.up_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.71.post_attention_layernorm.weight": "model-00049-of-00085.safetensors", + "model.layers.71.self_attn.k_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.k_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.k_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.o_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.o_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.o_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.q_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.q_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.q_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.v_proj.biases": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.v_proj.scales": "model-00048-of-00085.safetensors", + "model.layers.71.self_attn.v_proj.weight": "model-00048-of-00085.safetensors", + "model.layers.72.input_layernorm.weight": "model-00050-of-00085.safetensors", + "model.layers.72.mlp.down_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.down_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.down_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.gate_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.gate_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.gate_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.72.mlp.up_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.72.mlp.up_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.72.mlp.up_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.72.post_attention_layernorm.weight": "model-00050-of-00085.safetensors", + "model.layers.72.self_attn.k_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.k_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.k_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.o_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.o_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.o_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.q_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.q_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.q_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.v_proj.biases": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.v_proj.scales": "model-00049-of-00085.safetensors", + "model.layers.72.self_attn.v_proj.weight": "model-00049-of-00085.safetensors", + "model.layers.73.input_layernorm.weight": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.down_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.down_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.down_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.gate_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.gate_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.gate_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.up_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.up_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.mlp.up_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.post_attention_layernorm.weight": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.k_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.k_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.k_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.o_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.o_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.o_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.q_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.q_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.q_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.v_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.v_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.73.self_attn.v_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.74.input_layernorm.weight": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.down_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.down_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.down_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.gate_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.gate_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.gate_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.up_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.up_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.74.mlp.up_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.74.post_attention_layernorm.weight": "model-00051-of-00085.safetensors", + "model.layers.74.self_attn.k_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.k_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.k_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.o_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.o_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.o_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.q_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.q_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.q_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.v_proj.biases": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.v_proj.scales": "model-00050-of-00085.safetensors", + "model.layers.74.self_attn.v_proj.weight": "model-00050-of-00085.safetensors", + "model.layers.75.input_layernorm.weight": "model-00052-of-00085.safetensors", + "model.layers.75.mlp.down_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.down_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.down_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.gate_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.gate_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.gate_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.75.mlp.up_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.75.mlp.up_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.75.mlp.up_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.75.post_attention_layernorm.weight": "model-00052-of-00085.safetensors", + "model.layers.75.self_attn.k_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.k_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.k_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.o_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.o_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.o_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.q_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.q_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.q_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.v_proj.biases": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.v_proj.scales": "model-00051-of-00085.safetensors", + "model.layers.75.self_attn.v_proj.weight": "model-00051-of-00085.safetensors", + "model.layers.76.input_layernorm.weight": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.down_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.down_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.down_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.gate_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.gate_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.gate_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.up_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.up_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.mlp.up_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.post_attention_layernorm.weight": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.k_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.k_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.k_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.o_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.o_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.o_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.q_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.q_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.q_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.v_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.v_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.76.self_attn.v_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.77.input_layernorm.weight": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.down_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.down_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.down_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.gate_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.gate_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.gate_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.up_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.up_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.77.mlp.up_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.77.post_attention_layernorm.weight": "model-00053-of-00085.safetensors", + "model.layers.77.self_attn.k_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.k_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.k_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.o_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.o_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.o_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.q_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.q_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.q_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.v_proj.biases": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.v_proj.scales": "model-00052-of-00085.safetensors", + "model.layers.77.self_attn.v_proj.weight": "model-00052-of-00085.safetensors", + "model.layers.78.input_layernorm.weight": "model-00054-of-00085.safetensors", + "model.layers.78.mlp.down_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.down_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.down_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.gate_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.gate_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.gate_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.78.mlp.up_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.78.mlp.up_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.78.mlp.up_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.78.post_attention_layernorm.weight": "model-00054-of-00085.safetensors", + "model.layers.78.self_attn.k_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.k_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.k_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.o_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.o_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.o_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.q_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.q_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.q_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.v_proj.biases": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.v_proj.scales": "model-00053-of-00085.safetensors", + "model.layers.78.self_attn.v_proj.weight": "model-00053-of-00085.safetensors", + "model.layers.79.input_layernorm.weight": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.down_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.down_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.down_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.gate_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.gate_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.gate_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.up_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.up_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.mlp.up_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.post_attention_layernorm.weight": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.k_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.k_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.k_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.o_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.o_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.o_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.q_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.q_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.q_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.v_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.v_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.79.self_attn.v_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.8.input_layernorm.weight": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.down_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.down_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.gate_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.gate_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.up_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.up_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00007-of-00085.safetensors", + "model.layers.8.self_attn.k_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.k_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.o_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.o_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.q_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.q_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.v_proj.biases": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.v_proj.scales": "model-00006-of-00085.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00006-of-00085.safetensors", + "model.layers.80.input_layernorm.weight": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.down_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.down_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.down_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.gate_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.gate_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.gate_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.up_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.up_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.80.mlp.up_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.80.post_attention_layernorm.weight": "model-00055-of-00085.safetensors", + "model.layers.80.self_attn.k_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.k_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.k_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.o_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.o_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.o_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.q_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.q_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.q_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.v_proj.biases": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.v_proj.scales": "model-00054-of-00085.safetensors", + "model.layers.80.self_attn.v_proj.weight": "model-00054-of-00085.safetensors", + "model.layers.81.input_layernorm.weight": "model-00056-of-00085.safetensors", + "model.layers.81.mlp.down_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.down_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.down_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.gate_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.gate_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.gate_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.81.mlp.up_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.81.mlp.up_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.81.mlp.up_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.81.post_attention_layernorm.weight": "model-00056-of-00085.safetensors", + "model.layers.81.self_attn.k_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.k_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.k_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.o_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.o_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.o_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.q_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.q_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.q_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.v_proj.biases": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.v_proj.scales": "model-00055-of-00085.safetensors", + "model.layers.81.self_attn.v_proj.weight": "model-00055-of-00085.safetensors", + "model.layers.82.input_layernorm.weight": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.down_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.down_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.down_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.gate_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.gate_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.gate_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.up_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.up_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.mlp.up_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.post_attention_layernorm.weight": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.k_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.k_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.k_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.o_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.o_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.o_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.q_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.q_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.q_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.v_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.v_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.82.self_attn.v_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.83.input_layernorm.weight": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.down_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.down_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.down_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.gate_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.gate_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.gate_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.up_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.up_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.83.mlp.up_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.83.post_attention_layernorm.weight": "model-00057-of-00085.safetensors", + "model.layers.83.self_attn.k_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.k_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.k_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.o_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.o_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.o_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.q_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.q_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.q_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.v_proj.biases": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.v_proj.scales": "model-00056-of-00085.safetensors", + "model.layers.83.self_attn.v_proj.weight": "model-00056-of-00085.safetensors", + "model.layers.84.input_layernorm.weight": "model-00058-of-00085.safetensors", + "model.layers.84.mlp.down_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.down_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.down_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.gate_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.gate_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.gate_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.84.mlp.up_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.84.mlp.up_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.84.mlp.up_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.84.post_attention_layernorm.weight": "model-00058-of-00085.safetensors", + "model.layers.84.self_attn.k_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.k_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.k_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.o_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.o_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.o_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.q_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.q_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.q_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.v_proj.biases": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.v_proj.scales": "model-00057-of-00085.safetensors", + "model.layers.84.self_attn.v_proj.weight": "model-00057-of-00085.safetensors", + "model.layers.85.input_layernorm.weight": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.down_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.down_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.down_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.gate_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.gate_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.gate_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.up_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.up_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.mlp.up_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.post_attention_layernorm.weight": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.k_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.k_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.k_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.o_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.o_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.o_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.q_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.q_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.q_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.v_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.v_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.85.self_attn.v_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.86.input_layernorm.weight": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.down_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.down_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.down_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.gate_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.gate_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.gate_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.up_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.up_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.86.mlp.up_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.86.post_attention_layernorm.weight": "model-00059-of-00085.safetensors", + "model.layers.86.self_attn.k_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.k_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.k_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.o_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.o_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.o_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.q_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.q_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.q_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.v_proj.biases": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.v_proj.scales": "model-00058-of-00085.safetensors", + "model.layers.86.self_attn.v_proj.weight": "model-00058-of-00085.safetensors", + "model.layers.87.input_layernorm.weight": "model-00060-of-00085.safetensors", + "model.layers.87.mlp.down_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.down_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.down_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.gate_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.gate_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.gate_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.87.mlp.up_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.87.mlp.up_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.87.mlp.up_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.87.post_attention_layernorm.weight": "model-00060-of-00085.safetensors", + "model.layers.87.self_attn.k_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.k_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.k_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.o_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.o_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.o_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.q_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.q_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.q_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.v_proj.biases": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.v_proj.scales": "model-00059-of-00085.safetensors", + "model.layers.87.self_attn.v_proj.weight": "model-00059-of-00085.safetensors", + "model.layers.88.input_layernorm.weight": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.down_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.down_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.down_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.gate_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.gate_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.gate_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.up_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.up_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.mlp.up_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.post_attention_layernorm.weight": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.k_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.k_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.k_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.o_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.o_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.o_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.q_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.q_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.q_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.v_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.v_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.88.self_attn.v_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.89.input_layernorm.weight": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.down_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.down_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.down_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.gate_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.gate_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.gate_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.up_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.up_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.89.mlp.up_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.89.post_attention_layernorm.weight": "model-00061-of-00085.safetensors", + "model.layers.89.self_attn.k_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.k_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.k_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.o_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.o_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.o_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.q_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.q_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.q_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.v_proj.biases": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.v_proj.scales": "model-00060-of-00085.safetensors", + "model.layers.89.self_attn.v_proj.weight": "model-00060-of-00085.safetensors", + "model.layers.9.input_layernorm.weight": "model-00008-of-00085.safetensors", + "model.layers.9.mlp.down_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.down_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.gate_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.gate_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.9.mlp.up_proj.biases": "model-00008-of-00085.safetensors", + "model.layers.9.mlp.up_proj.scales": "model-00008-of-00085.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00008-of-00085.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00008-of-00085.safetensors", + "model.layers.9.self_attn.k_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.k_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.o_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.o_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.q_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.q_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.v_proj.biases": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.v_proj.scales": "model-00007-of-00085.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00007-of-00085.safetensors", + "model.layers.90.input_layernorm.weight": "model-00062-of-00085.safetensors", + "model.layers.90.mlp.down_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.down_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.down_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.gate_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.gate_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.gate_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.90.mlp.up_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.90.mlp.up_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.90.mlp.up_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.90.post_attention_layernorm.weight": "model-00062-of-00085.safetensors", + "model.layers.90.self_attn.k_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.k_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.k_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.o_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.o_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.o_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.q_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.q_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.q_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.v_proj.biases": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.v_proj.scales": "model-00061-of-00085.safetensors", + "model.layers.90.self_attn.v_proj.weight": "model-00061-of-00085.safetensors", + "model.layers.91.input_layernorm.weight": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.down_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.down_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.down_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.gate_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.gate_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.gate_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.up_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.up_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.mlp.up_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.post_attention_layernorm.weight": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.k_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.k_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.k_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.o_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.o_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.o_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.q_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.q_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.q_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.v_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.v_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.91.self_attn.v_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.92.input_layernorm.weight": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.down_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.down_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.down_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.gate_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.gate_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.gate_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.up_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.up_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.92.mlp.up_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.92.post_attention_layernorm.weight": "model-00063-of-00085.safetensors", + "model.layers.92.self_attn.k_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.k_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.k_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.o_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.o_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.o_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.q_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.q_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.q_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.v_proj.biases": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.v_proj.scales": "model-00062-of-00085.safetensors", + "model.layers.92.self_attn.v_proj.weight": "model-00062-of-00085.safetensors", + "model.layers.93.input_layernorm.weight": "model-00064-of-00085.safetensors", + "model.layers.93.mlp.down_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.down_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.down_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.gate_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.gate_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.gate_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.93.mlp.up_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.93.mlp.up_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.93.mlp.up_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.93.post_attention_layernorm.weight": "model-00064-of-00085.safetensors", + "model.layers.93.self_attn.k_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.k_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.k_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.o_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.o_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.o_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.q_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.q_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.q_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.v_proj.biases": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.v_proj.scales": "model-00063-of-00085.safetensors", + "model.layers.93.self_attn.v_proj.weight": "model-00063-of-00085.safetensors", + "model.layers.94.input_layernorm.weight": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.down_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.down_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.down_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.gate_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.gate_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.gate_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.up_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.up_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.mlp.up_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.post_attention_layernorm.weight": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.k_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.k_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.k_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.o_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.o_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.o_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.q_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.q_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.q_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.v_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.v_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.94.self_attn.v_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.95.input_layernorm.weight": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.down_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.down_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.down_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.gate_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.gate_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.gate_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.up_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.up_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.95.mlp.up_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.95.post_attention_layernorm.weight": "model-00065-of-00085.safetensors", + "model.layers.95.self_attn.k_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.k_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.k_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.o_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.o_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.o_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.q_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.q_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.q_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.v_proj.biases": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.v_proj.scales": "model-00064-of-00085.safetensors", + "model.layers.95.self_attn.v_proj.weight": "model-00064-of-00085.safetensors", + "model.layers.96.input_layernorm.weight": "model-00066-of-00085.safetensors", + "model.layers.96.mlp.down_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.down_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.down_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.gate_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.gate_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.gate_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.96.mlp.up_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.96.mlp.up_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.96.mlp.up_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.96.post_attention_layernorm.weight": "model-00066-of-00085.safetensors", + "model.layers.96.self_attn.k_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.k_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.k_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.o_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.o_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.o_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.q_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.q_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.q_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.v_proj.biases": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.v_proj.scales": "model-00065-of-00085.safetensors", + "model.layers.96.self_attn.v_proj.weight": "model-00065-of-00085.safetensors", + "model.layers.97.input_layernorm.weight": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.down_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.down_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.down_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.gate_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.gate_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.gate_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.up_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.up_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.mlp.up_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.post_attention_layernorm.weight": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.k_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.k_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.k_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.o_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.o_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.o_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.q_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.q_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.q_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.v_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.v_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.97.self_attn.v_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.98.input_layernorm.weight": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.down_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.down_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.down_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.gate_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.gate_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.gate_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.up_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.up_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.98.mlp.up_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.98.post_attention_layernorm.weight": "model-00067-of-00085.safetensors", + "model.layers.98.self_attn.k_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.k_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.k_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.o_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.o_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.o_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.q_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.q_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.q_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.v_proj.biases": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.v_proj.scales": "model-00066-of-00085.safetensors", + "model.layers.98.self_attn.v_proj.weight": "model-00066-of-00085.safetensors", + "model.layers.99.input_layernorm.weight": "model-00068-of-00085.safetensors", + "model.layers.99.mlp.down_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.down_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.down_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.gate_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.gate_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.gate_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.99.mlp.up_proj.biases": "model-00068-of-00085.safetensors", + "model.layers.99.mlp.up_proj.scales": "model-00068-of-00085.safetensors", + "model.layers.99.mlp.up_proj.weight": "model-00068-of-00085.safetensors", + "model.layers.99.post_attention_layernorm.weight": "model-00068-of-00085.safetensors", + "model.layers.99.self_attn.k_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.k_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.k_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.o_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.o_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.o_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.q_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.q_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.q_proj.weight": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.v_proj.biases": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.v_proj.scales": "model-00067-of-00085.safetensors", + "model.layers.99.self_attn.v_proj.weight": "model-00067-of-00085.safetensors", + "model.norm.weight": "model-00085-of-00085.safetensors" + } +} \ No newline at end of file diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..02ee80b6196926a5ad790a004d9efd6ab1ba6542 --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,16 @@ +{ + "bos_token": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2 --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..db88166e2bc4c799fd5d1ae643b75e84d03ee70e --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,2062 @@ +{ + "added_tokens_decoder": { + "128000": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128001": { + "content": "<|end_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128002": { + "content": "<|reserved_special_token_0|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128003": { + "content": "<|reserved_special_token_1|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128004": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128005": { + "content": "<|reserved_special_token_2|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128006": { + "content": "<|start_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128007": { + "content": "<|end_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128008": { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128009": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128010": { + "content": "<|python_tag|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128011": { + "content": "<|reserved_special_token_3|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128012": { + "content": "<|reserved_special_token_4|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128013": { + "content": "<|reserved_special_token_5|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128014": { + "content": "<|reserved_special_token_6|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128015": { + "content": "<|reserved_special_token_7|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128016": { + "content": "<|reserved_special_token_8|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128017": { + "content": "<|reserved_special_token_9|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128018": { + "content": "<|reserved_special_token_10|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128019": { + "content": "<|reserved_special_token_11|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128020": { + "content": "<|reserved_special_token_12|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128021": { + "content": "<|reserved_special_token_13|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128022": { + "content": "<|reserved_special_token_14|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128023": { + "content": "<|reserved_special_token_15|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128024": { + "content": "<|reserved_special_token_16|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128025": { + "content": "<|reserved_special_token_17|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128026": { + "content": "<|reserved_special_token_18|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128027": { + "content": "<|reserved_special_token_19|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128028": { + "content": "<|reserved_special_token_20|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128029": { + "content": "<|reserved_special_token_21|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128030": { + "content": "<|reserved_special_token_22|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128031": { + "content": "<|reserved_special_token_23|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128032": { + "content": "<|reserved_special_token_24|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128033": { + "content": "<|reserved_special_token_25|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128034": { + "content": "<|reserved_special_token_26|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128035": { + "content": "<|reserved_special_token_27|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128036": { + "content": "<|reserved_special_token_28|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128037": { + "content": "<|reserved_special_token_29|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128038": { + "content": "<|reserved_special_token_30|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128039": { + "content": "<|reserved_special_token_31|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128040": { + "content": "<|reserved_special_token_32|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128041": { + "content": "<|reserved_special_token_33|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128042": { + "content": "<|reserved_special_token_34|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128043": { + "content": "<|reserved_special_token_35|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128044": { + "content": "<|reserved_special_token_36|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128045": { + "content": "<|reserved_special_token_37|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128046": { + "content": "<|reserved_special_token_38|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128047": { + "content": "<|reserved_special_token_39|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128048": { + "content": "<|reserved_special_token_40|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128049": { + "content": "<|reserved_special_token_41|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128050": { + "content": "<|reserved_special_token_42|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128051": { + "content": "<|reserved_special_token_43|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128052": { + "content": "<|reserved_special_token_44|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128053": { + "content": "<|reserved_special_token_45|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128054": { + "content": "<|reserved_special_token_46|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128055": { + "content": "<|reserved_special_token_47|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128056": { + "content": "<|reserved_special_token_48|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128057": { + "content": "<|reserved_special_token_49|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128058": { + "content": "<|reserved_special_token_50|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128059": { + "content": "<|reserved_special_token_51|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128060": { + "content": "<|reserved_special_token_52|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128061": { + "content": "<|reserved_special_token_53|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128062": { + "content": "<|reserved_special_token_54|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128063": { + "content": "<|reserved_special_token_55|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128064": { + "content": "<|reserved_special_token_56|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128065": { + "content": "<|reserved_special_token_57|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128066": { + "content": "<|reserved_special_token_58|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128067": { + "content": "<|reserved_special_token_59|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128068": { + "content": "<|reserved_special_token_60|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128069": { + "content": "<|reserved_special_token_61|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128070": { + "content": "<|reserved_special_token_62|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128071": { + "content": "<|reserved_special_token_63|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128072": { + "content": "<|reserved_special_token_64|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128073": { + "content": "<|reserved_special_token_65|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128074": { + "content": "<|reserved_special_token_66|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128075": { + "content": "<|reserved_special_token_67|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128076": { + "content": "<|reserved_special_token_68|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128077": { + "content": "<|reserved_special_token_69|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128078": { + "content": "<|reserved_special_token_70|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128079": { + "content": "<|reserved_special_token_71|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128080": { + "content": "<|reserved_special_token_72|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128081": { + "content": "<|reserved_special_token_73|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128082": { + "content": "<|reserved_special_token_74|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128083": { + "content": "<|reserved_special_token_75|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128084": { + "content": "<|reserved_special_token_76|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128085": { + "content": "<|reserved_special_token_77|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128086": { + "content": "<|reserved_special_token_78|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128087": { + "content": "<|reserved_special_token_79|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128088": { + "content": "<|reserved_special_token_80|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128089": { + "content": "<|reserved_special_token_81|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128090": { + "content": "<|reserved_special_token_82|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128091": { + "content": "<|reserved_special_token_83|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128092": { + "content": "<|reserved_special_token_84|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128093": { + "content": "<|reserved_special_token_85|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128094": { + "content": "<|reserved_special_token_86|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128095": { + "content": "<|reserved_special_token_87|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128096": { + "content": "<|reserved_special_token_88|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128097": { + "content": "<|reserved_special_token_89|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128098": { + "content": "<|reserved_special_token_90|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128099": { + "content": "<|reserved_special_token_91|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128100": { + "content": "<|reserved_special_token_92|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128101": { + "content": "<|reserved_special_token_93|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128102": { + "content": "<|reserved_special_token_94|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128103": { + "content": "<|reserved_special_token_95|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128104": { + "content": "<|reserved_special_token_96|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128105": { + "content": "<|reserved_special_token_97|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128106": { + "content": "<|reserved_special_token_98|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128107": { + "content": "<|reserved_special_token_99|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128108": { + "content": "<|reserved_special_token_100|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128109": { + "content": "<|reserved_special_token_101|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128110": { + "content": "<|reserved_special_token_102|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128111": { + "content": "<|reserved_special_token_103|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128112": { + "content": "<|reserved_special_token_104|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128113": { + "content": "<|reserved_special_token_105|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128114": { + "content": "<|reserved_special_token_106|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128115": { + "content": "<|reserved_special_token_107|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128116": { + "content": "<|reserved_special_token_108|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128117": { + "content": "<|reserved_special_token_109|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128118": { + "content": "<|reserved_special_token_110|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128119": { + "content": "<|reserved_special_token_111|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128120": { + "content": "<|reserved_special_token_112|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128121": { + "content": "<|reserved_special_token_113|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128122": { + "content": "<|reserved_special_token_114|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128123": { + "content": "<|reserved_special_token_115|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128124": { + "content": "<|reserved_special_token_116|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128125": { + "content": "<|reserved_special_token_117|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128126": { + "content": "<|reserved_special_token_118|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128127": { + "content": "<|reserved_special_token_119|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128128": { + "content": "<|reserved_special_token_120|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128129": { + "content": "<|reserved_special_token_121|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128130": { + "content": "<|reserved_special_token_122|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128131": { + "content": "<|reserved_special_token_123|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128132": { + "content": "<|reserved_special_token_124|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128133": { + "content": "<|reserved_special_token_125|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128134": { + "content": "<|reserved_special_token_126|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128135": { + "content": "<|reserved_special_token_127|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128136": { + "content": "<|reserved_special_token_128|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128137": { + "content": "<|reserved_special_token_129|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128138": { + "content": "<|reserved_special_token_130|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128139": { + "content": "<|reserved_special_token_131|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128140": { + "content": "<|reserved_special_token_132|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128141": { + "content": "<|reserved_special_token_133|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128142": { + "content": "<|reserved_special_token_134|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128143": { + "content": "<|reserved_special_token_135|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128144": { + "content": "<|reserved_special_token_136|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128145": { + "content": "<|reserved_special_token_137|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128146": { + "content": "<|reserved_special_token_138|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128147": { + "content": "<|reserved_special_token_139|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128148": { + "content": "<|reserved_special_token_140|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128149": { + "content": "<|reserved_special_token_141|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128150": { + "content": "<|reserved_special_token_142|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128151": { + "content": "<|reserved_special_token_143|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128152": { + "content": "<|reserved_special_token_144|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128153": { + "content": "<|reserved_special_token_145|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128154": { + "content": "<|reserved_special_token_146|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128155": { + "content": "<|reserved_special_token_147|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128156": { + "content": "<|reserved_special_token_148|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128157": { + "content": "<|reserved_special_token_149|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128158": { + "content": "<|reserved_special_token_150|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128159": { + "content": "<|reserved_special_token_151|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128160": { + "content": "<|reserved_special_token_152|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128161": { + "content": "<|reserved_special_token_153|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128162": { + "content": "<|reserved_special_token_154|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128163": { + "content": "<|reserved_special_token_155|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128164": { + "content": "<|reserved_special_token_156|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128165": { + "content": "<|reserved_special_token_157|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128166": { + "content": "<|reserved_special_token_158|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128167": { + "content": "<|reserved_special_token_159|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128168": { + "content": "<|reserved_special_token_160|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128169": { + "content": "<|reserved_special_token_161|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128170": { + "content": "<|reserved_special_token_162|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128171": { + "content": "<|reserved_special_token_163|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128172": { + "content": "<|reserved_special_token_164|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128173": { + "content": "<|reserved_special_token_165|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128174": { + "content": "<|reserved_special_token_166|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128175": { + "content": "<|reserved_special_token_167|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128176": { + "content": "<|reserved_special_token_168|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128177": { + "content": "<|reserved_special_token_169|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128178": { + "content": "<|reserved_special_token_170|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128179": { + "content": "<|reserved_special_token_171|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128180": { + "content": "<|reserved_special_token_172|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128181": { + "content": "<|reserved_special_token_173|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128182": { + "content": "<|reserved_special_token_174|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128183": { + "content": "<|reserved_special_token_175|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128184": { + "content": "<|reserved_special_token_176|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128185": { + "content": "<|reserved_special_token_177|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128186": { + "content": "<|reserved_special_token_178|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128187": { + "content": "<|reserved_special_token_179|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128188": { + "content": "<|reserved_special_token_180|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128189": { + "content": "<|reserved_special_token_181|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128190": { + "content": "<|reserved_special_token_182|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128191": { + "content": "<|reserved_special_token_183|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128192": { + "content": "<|reserved_special_token_184|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128193": { + "content": "<|reserved_special_token_185|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128194": { + "content": "<|reserved_special_token_186|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128195": { + "content": "<|reserved_special_token_187|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128196": { + "content": "<|reserved_special_token_188|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128197": { + "content": "<|reserved_special_token_189|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128198": { + "content": "<|reserved_special_token_190|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128199": { + "content": "<|reserved_special_token_191|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128200": { + "content": "<|reserved_special_token_192|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128201": { + "content": "<|reserved_special_token_193|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128202": { + "content": "<|reserved_special_token_194|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128203": { + "content": "<|reserved_special_token_195|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128204": { + "content": "<|reserved_special_token_196|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128205": { + "content": "<|reserved_special_token_197|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128206": { + "content": "<|reserved_special_token_198|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128207": { + "content": "<|reserved_special_token_199|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128208": { + "content": "<|reserved_special_token_200|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128209": { + "content": "<|reserved_special_token_201|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128210": { + "content": "<|reserved_special_token_202|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128211": { + "content": "<|reserved_special_token_203|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128212": { + "content": "<|reserved_special_token_204|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128213": { + "content": "<|reserved_special_token_205|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128214": { + "content": "<|reserved_special_token_206|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128215": { + "content": "<|reserved_special_token_207|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128216": { + "content": "<|reserved_special_token_208|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128217": { + "content": "<|reserved_special_token_209|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128218": { + "content": "<|reserved_special_token_210|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128219": { + "content": "<|reserved_special_token_211|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128220": { + "content": "<|reserved_special_token_212|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128221": { + "content": "<|reserved_special_token_213|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128222": { + "content": "<|reserved_special_token_214|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128223": { + "content": "<|reserved_special_token_215|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128224": { + "content": "<|reserved_special_token_216|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128225": { + "content": "<|reserved_special_token_217|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128226": { + "content": "<|reserved_special_token_218|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128227": { + "content": "<|reserved_special_token_219|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128228": { + "content": "<|reserved_special_token_220|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128229": { + "content": "<|reserved_special_token_221|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128230": { + "content": "<|reserved_special_token_222|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128231": { + "content": "<|reserved_special_token_223|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128232": { + "content": "<|reserved_special_token_224|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128233": { + "content": "<|reserved_special_token_225|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128234": { + "content": "<|reserved_special_token_226|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128235": { + "content": "<|reserved_special_token_227|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128236": { + "content": "<|reserved_special_token_228|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128237": { + "content": "<|reserved_special_token_229|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128238": { + "content": "<|reserved_special_token_230|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128239": { + "content": "<|reserved_special_token_231|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128240": { + "content": "<|reserved_special_token_232|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128241": { + "content": "<|reserved_special_token_233|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128242": { + "content": "<|reserved_special_token_234|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128243": { + "content": "<|reserved_special_token_235|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128244": { + "content": "<|reserved_special_token_236|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128245": { + "content": "<|reserved_special_token_237|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128246": { + "content": "<|reserved_special_token_238|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128247": { + "content": "<|reserved_special_token_239|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128248": { + "content": "<|reserved_special_token_240|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128249": { + "content": "<|reserved_special_token_241|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128250": { + "content": "<|reserved_special_token_242|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128251": { + "content": "<|reserved_special_token_243|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128252": { + "content": "<|reserved_special_token_244|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128253": { + "content": "<|reserved_special_token_245|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128254": { + "content": "<|reserved_special_token_246|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128255": { + "content": "<|reserved_special_token_247|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "bos_token": "<|begin_of_text|>", + "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", + "clean_up_tokenization_spaces": true, + "eos_token": "<|eot_id|>", + "model_input_names": [ + "input_ids", + "attention_mask" + ], + "model_max_length": 131072, + "tokenizer_class": "PreTrainedTokenizerFast" +}