ihsanabdulhakim's picture
Training in progress, epoch 0
b0b0f45 verified
{
"_name_or_path": "MCG-NJU/videomae-base",
"architectures": [
"VideoMAEForVideoClassification"
],
"attention_probs_dropout_prob": 0.0,
"decoder_hidden_size": 384,
"decoder_intermediate_size": 1536,
"decoder_num_attention_heads": 6,
"decoder_num_hidden_layers": 4,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "\uac00\ub2e4",
"1": "\uac00\ubc29",
"2": "\uac10\uc0ac\ud569\ub2c8\ub2e4",
"3": "\uacbd\ucc30",
"4": "\uae30\ub2e4\ub9ac\ub2e4",
"5": "\ub2e4\uc74c",
"6": "\ub3c4\ucc29",
"7": "\ub9cc\ub098\ub2e4",
"8": "\ubc18\uac11\ub2e4",
"9": "\ubcd1\uc6d0",
"10": "\ubd80\ub974\ub2e4",
"11": "\uc2e4\uc885",
"12": "\uc544\ud504\ub2e4",
"13": "\uc548\ub155\ud558\uc138\uc694",
"14": "\uc54c\ub824\uc8fc\ub2e4",
"15": "\uc5b4\ub514",
"16": "\uc5b4\ub5bb\uac8c",
"17": "\uc5ec\uae30",
"18": "\uc7a0\uae50",
"19": "\uc88b\ub2e4",
"20": "\ucc28\ub0b4\ub9ac\ub2e4"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"\uac00\ub2e4": 0,
"\uac00\ubc29": 1,
"\uac10\uc0ac\ud569\ub2c8\ub2e4": 2,
"\uacbd\ucc30": 3,
"\uae30\ub2e4\ub9ac\ub2e4": 4,
"\ub2e4\uc74c": 5,
"\ub3c4\ucc29": 6,
"\ub9cc\ub098\ub2e4": 7,
"\ubc18\uac11\ub2e4": 8,
"\ubcd1\uc6d0": 9,
"\ubd80\ub974\ub2e4": 10,
"\uc2e4\uc885": 11,
"\uc544\ud504\ub2e4": 12,
"\uc548\ub155\ud558\uc138\uc694": 13,
"\uc54c\ub824\uc8fc\ub2e4": 14,
"\uc5b4\ub514": 15,
"\uc5b4\ub5bb\uac8c": 16,
"\uc5ec\uae30": 17,
"\uc7a0\uae50": 18,
"\uc88b\ub2e4": 19,
"\ucc28\ub0b4\ub9ac\ub2e4": 20
},
"layer_norm_eps": 1e-12,
"model_type": "videomae",
"norm_pix_loss": true,
"num_attention_heads": 12,
"num_channels": 3,
"num_frames": 16,
"num_hidden_layers": 12,
"patch_size": 16,
"problem_type": "single_label_classification",
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.48.0",
"tubelet_size": 2,
"use_mean_pooling": false
}