{ "architectures": [ "ArcticLSTMSpeculatorPreTrainedModel" ], "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct", "input_hidden_dim": 8192, "inner_dim": "6144", "proj_dim": "6144", "emb_dim": "6144", "model_type": "mlp_speculator", "n_candidates": 3, "n_predict": 3, "scale_input": true, "tie_weights": true, "tie_lstm_embs": true, "top_k_tokens_per_head": [ 1, 1, 1 ], "torch_dtype": "bfloat16", "transformers_version": "4.47.0", "vocab_size": 128256, "method": "sum_lstm" }