{ "_name_or_path": "google/flan-t5-large", "adapters": { "adapters": { "paragraph_segmentation": "2e3d1fb61268c5a4" }, "config_map": { "2e3d1fb61268c5a4": { "alpha": 16, "architecture": "lora", "attn_matrices": [ "q", "v" ], "composition_mode": "add", "dropout": 0.0, "init_weights": "lora", "intermediate_lora": true, "output_lora": true, "r": 8, "selfattn_lora": true, "use_gating": false } }, "fusion_config_map": {}, "fusions": {} }, "architectures": [ "T5ForConditionalGeneration" ], "d_ff": 2816, "d_kv": 64, "d_model": 1024, "decoder_start_token_id": 0, "dense_act_fn": "gelu_new", "dropout_rate": 0.1, "eos_token_id": 1, "feed_forward_proj": "gated-gelu", "initializer_factor": 1.0, "is_encoder_decoder": true, "is_gated_act": true, "layer_norm_epsilon": 1e-06, "model_type": "t5", "n_positions": 512, "num_decoder_layers": 24, "num_heads": 16, "num_layers": 24, "output_past": true, "pad_token_id": 0, "relative_attention_max_distance": 128, "relative_attention_num_buckets": 32, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.26.1", "use_cache": true, "vocab_size": 32128 }