{ | |
"model_type": "gpt2", | |
"vocab_size": 50257, | |
"context_length": 256, | |
"embedding_dim": 512, | |
"num_heads": 16, | |
"n_layers": 12, | |
"dropout": 0.1, | |
"qkv_bias": false | |
} |
{ | |
"model_type": "gpt2", | |
"vocab_size": 50257, | |
"context_length": 256, | |
"embedding_dim": 512, | |
"num_heads": 16, | |
"n_layers": 12, | |
"dropout": 0.1, | |
"qkv_bias": false | |
} |