convaiinnovations's picture
Update config.json
c88b467 verified
{
"vocab_size": 16000,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 16,
"intermediate_size": 3072,
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"layer_norm_eps": 1e-12,
"pad_token_id": 0,
"bos_token_id": 1,
"eos_token_id": 2,
"tie_word_embeddings": true,
"hidden_act": "silu",
"normalization_layer": "rmsnorm",
"positional_encoding_type": "rope",
"unk_token_id": 3,
"architectures": [
"HindiCausalLM"
],
"model_type": "hindi_causal_lm"
}