|
{
|
|
"_attn_implementation_autoset": true,
|
|
"activation_function": "gelu_new",
|
|
"architectures": [
|
|
"GPT2LMHeadModel"
|
|
],
|
|
"bos_token_id": 50256,
|
|
"eos_token_id": 50256,
|
|
"hidden_size": 1024,
|
|
"n_embd": 1024,
|
|
"n_head": 16,
|
|
"n_layer": 24,
|
|
"n_inner": 4096,
|
|
"n_ctx": 1024,
|
|
"n_positions": 2048,
|
|
"n_special": 0,
|
|
"num_moe_experts": 16,
|
|
"moe_load_balancing_weight": 0.01,
|
|
"moe_router_z_loss_weight": 0.001,
|
|
"moe_top_k": 4,
|
|
"max_neurons": 8192,
|
|
"max_position_embeddings": 1024,
|
|
"pad_token_id": 50256,
|
|
"predict_special_tokens": true,
|
|
"reward_loss_weight": 0.1,
|
|
"summary_first_dropout": 0.1,
|
|
"summary_proj_to_labels": true,
|
|
"summary_type": "cls_index",
|
|
"summary_use_proj": true,
|
|
"task_specific_params": {
|
|
"text-generation": {
|
|
"do_sample": true,
|
|
"max_length": 50
|
|
}
|
|
},
|
|
"torch_dtype": "float32",
|
|
"vocab_size": 50308,
|
|
"model_type": "gpt2"
|
|
}
|
|
|