{ | |
"model_type": "nanogpt_compressed", | |
"vocab_size": 6060, | |
"block_size": 1024, | |
"n_layer": 8, | |
"n_head": 8, | |
"n_embd": 512, | |
"dropout": 0.1, | |
"bias": false, | |
"compression_method": "fixed_low_rank_mlp", | |
"compression_rank": 128, | |
"compressed_layers": [ | |
1 | |
], | |
"architectures": [ | |
"NanoGPTCompressedModel" | |
], | |
"torch_dtype": "float32", | |
"transformers_version": "4.35.0", | |
"auto_map": { | |
"AutoConfig": "modeling_nanogpt.NanoGPTCompressedConfig", | |
"AutoModel": "modeling_nanogpt.NanoGPTCompressedModel", | |
"AutoModelForCausalLM": "modeling_nanogpt.NanoGPTCompressedModel" | |
} | |
} |