{ "attn": null, "attn_mode": "chunk", "bos_token_id": 1, "eos_token_id": 2, "expand_k": 0.5, "expand_v": 1, "fuse_cross_entropy": true, "fuse_norm": true, "hidden_act": "swish", "hidden_ratio": 4, "hidden_size": 4096, "initializer_range": 0.006, "intermediate_size": 11008, "model_type": "gla", "norm_eps": 1e-06, "num_heads": 16, "num_hidden_layers": 32, "tie_word_embeddings": false, "use_cache": true, "use_gk": true, "use_gv": false, "use_output_gate": true, "use_short_conv": false }