{ "name_or_path": "tclf90/GLM-4.1V-9B-Thinking-GPTQ-Int4-Int8Mix", "architectures": [ "Glm4vForConditionalGeneration" ], "model_type": "glm4v", "attention_bias": true, "attention_dropout": 0.0, "pad_token_id": 151329, "eos_token_id": [ 151329, 151336, 151338, 151348 ], "image_start_token_id": 151339, "image_end_token_id": 151340, "video_start_token_id": 151341, "video_end_token_id": 151342, "image_token_id": 151343, "video_token_id": 151344, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 13824, "max_position_embeddings": 65536, "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.53.0dev", "use_cache": true, "vocab_size": 151552, "partial_rotary_factor": 0.5, "vision_config": { "hidden_size": 1536, "depth": 24, "num_heads": 12, "attention_bias": false, "intermediate_size": 13824, "hidden_act": "silu", "hidden_dropout_prob": 0.0, "initializer_range": 0.02, "image_size": 336, "patch_size": 14, "out_hidden_size": 4096, "rms_norm_eps": 1e-05, "spatial_merge_size": 2, "temporal_patch_size": 2 }, "rope_scaling": { "type": "default", "mrope_section": [ 8, 12, 12 ] }, "quantization_config": { "quant_method": "gptq", "bits": 4, "group_size": 128, "sym": false, "desc_act": false, "dynamic": { "+:model.layers\\.([0-4]|3[6-9])\\..*": { "bits": 8 }, "+:model.layers.*.self_attn.o_proj.*": { "bits": 8 }, "+:model.layers.*.mlp.down_proj.*": { "bits": 8 }, "+:visual.blocks.*.attn.proj.*": { "bits": 8 }, "+:visual.blocks.*.mlp.down_proj.*": { "bits": 8 } } } }