Archisman Karmakar commited on
Commit
86e8733
·
1 Parent(s): 763f551
emotionMoodtag_analysis/hmv_cfg_base_stage2/model1.py CHANGED
@@ -19,7 +19,7 @@ MODEL_OPTIONS = {
19
  "name": "DeBERTa v3 Base for Sequence Classification",
20
  "type": "hf_automodel_finetuned_dbt3",
21
  "module_path": "hmv_cfg_base_stage2.model1",
22
- "hf_location": "tachygraphy-microtext-normalization-iemk/DeBERTa-v3-seqClassfication-LV2-EmotionMoodtags-Batch8",
23
  "tokenizer_class": "DebertaV2Tokenizer",
24
  "model_class": "DebertaV2ForSequenceClassification",
25
  "problem_type": "regression",
 
19
  "name": "DeBERTa v3 Base for Sequence Classification",
20
  "type": "hf_automodel_finetuned_dbt3",
21
  "module_path": "hmv_cfg_base_stage2.model1",
22
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/DeBERTa-v3-seqClassfication-LV2-EmotionMoodtags-Batch8",
23
  "tokenizer_class": "DebertaV2Tokenizer",
24
  "model_class": "DebertaV2ForSequenceClassification",
25
  "problem_type": "regression",
emotionMoodtag_analysis/hmv_cfg_base_stage2/model2.py CHANGED
@@ -25,7 +25,7 @@ MODEL_OPTIONS = {
25
  "name": "DeBERTa v3 Base Custom Model with minimal Regularized Loss",
26
  "type": "db3_base_custom",
27
  "module_path": "hmv_cfg_base_stage2.model2",
28
- "hf_location": "tachygraphy-microtext-normalization-iemk/DeBERTa-v3-Base-Cust-LV2-EmotionMoodtags-minRegLoss",
29
  "tokenizer_class": "DebertaV2Tokenizer",
30
  "model_class": "EmotionModel",
31
  "problem_type": "regression",
 
25
  "name": "DeBERTa v3 Base Custom Model with minimal Regularized Loss",
26
  "type": "db3_base_custom",
27
  "module_path": "hmv_cfg_base_stage2.model2",
28
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/DeBERTa-v3-Base-Cust-LV2-EmotionMoodtags-minRegLoss",
29
  "tokenizer_class": "DebertaV2Tokenizer",
30
  "model_class": "EmotionModel",
31
  "problem_type": "regression",
sentimentPolarity_analysis/hmv_cfg_base_stage1/model1.py CHANGED
@@ -19,7 +19,7 @@ MODEL_OPTIONS = {
19
  "name": "DeBERTa v3 Base for Sequence Classification",
20
  "type": "hf_automodel_finetuned_dbt3",
21
  "module_path": "hmv_cfg_base_stage1.model1",
22
- "hf_location": "tachygraphy-microtext-normalization-iemk/DeBERTa-v3-seqClassfication-LV1-SentimentPolarities-Batch8",
23
  "tokenizer_class": "DebertaV2Tokenizer",
24
  "model_class": "DebertaV2ForSequenceClassification",
25
  "problem_type": "multi_label_classification",
 
19
  "name": "DeBERTa v3 Base for Sequence Classification",
20
  "type": "hf_automodel_finetuned_dbt3",
21
  "module_path": "hmv_cfg_base_stage1.model1",
22
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/DeBERTa-v3-seqClassfication-LV1-SentimentPolarities-Batch8",
23
  "tokenizer_class": "DebertaV2Tokenizer",
24
  "model_class": "DebertaV2ForSequenceClassification",
25
  "problem_type": "multi_label_classification",
sentimentPolarity_analysis/hmv_cfg_base_stage1/model2.py CHANGED
@@ -27,7 +27,7 @@ MODEL_OPTIONS = {
27
  "name": "DeBERTa v3 Base Custom Model with minimal Regularized Loss",
28
  "type": "db3_base_custom",
29
  "module_path": "hmv_cfg_base_stage1.model2",
30
- "hf_location": "tachygraphy-microtext-normalization-iemk/DeBERTa-v3-Base-Cust-LV1-SentimentPolarities-minRegLoss",
31
  "tokenizer_class": "DebertaV2Tokenizer",
32
  "model_class": "SentimentModel",
33
  "problem_type": "multi_label_classification",
 
27
  "name": "DeBERTa v3 Base Custom Model with minimal Regularized Loss",
28
  "type": "db3_base_custom",
29
  "module_path": "hmv_cfg_base_stage1.model2",
30
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/DeBERTa-v3-Base-Cust-LV1-SentimentPolarities-minRegLoss",
31
  "tokenizer_class": "DebertaV2Tokenizer",
32
  "model_class": "SentimentModel",
33
  "problem_type": "multi_label_classification",
sentimentPolarity_analysis/hmv_cfg_base_stage1/model3.py CHANGED
@@ -29,7 +29,7 @@ MODEL_OPTIONS = {
29
  "name": "BERT Base Uncased Custom Model",
30
  "type": "bert_base_uncased_custom",
31
  "module_path": "hmv_cfg_base_stage1.model3",
32
- "hf_location": "https://huggingface.co/tachygraphy-microtext-normalization-iemk/BERT-LV1-SentimentPolarities/resolve/main/saved_weights.pt",
33
  "tokenizer_class": "AutoTokenizer",
34
  "model_class": "BERT_architecture",
35
  "problem_type": "multi_label_classification",
 
29
  "name": "BERT Base Uncased Custom Model",
30
  "type": "bert_base_uncased_custom",
31
  "module_path": "hmv_cfg_base_stage1.model3",
32
+ "hf_location": "https://huggingface.co/Tachygraphy-Microtext-Normalization-IEMK25/BERT-LV1-SentimentPolarities/resolve/main/saved_weights.pt",
33
  "tokenizer_class": "AutoTokenizer",
34
  "model_class": "BERT_architecture",
35
  "problem_type": "multi_label_classification",
sentimentPolarity_analysis/hmv_cfg_base_stage1/model4.py CHANGED
@@ -37,7 +37,7 @@ MODEL_OPTIONS = {
37
  "name": "LSTM Custom Model",
38
  "type": "lstm_uncased_custom",
39
  "module_path": "hmv_cfg_base_stage1.model4",
40
- "hf_location": "tachygraphy-microtext-normalization-iemk/LSTM-LV1-SentimentPolarities",
41
  "tokenizer_class": "",
42
  "model_class": "",
43
  "problem_type": "multi_label_classification",
 
37
  "name": "LSTM Custom Model",
38
  "type": "lstm_uncased_custom",
39
  "module_path": "hmv_cfg_base_stage1.model4",
40
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/LSTM-LV1-SentimentPolarities",
41
  "tokenizer_class": "",
42
  "model_class": "",
43
  "problem_type": "multi_label_classification",
transformation_and_Normalization/hmv_cfg_base_stage3/model1.py CHANGED
@@ -17,7 +17,7 @@ MODEL_OPTIONS = {
17
  "name": "Facebook BART Base for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_fbtctg",
19
  "module_path": "hmv_cfg_base_stage3.model1",
20
- "hf_location": "tachygraphy-microtext-normalization-iemk/BART-base-HF-Seq2Seq-Trainer-Batch4",
21
  "tokenizer_class": "BartTokenizer",
22
  "model_class": "BartForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",
 
17
  "name": "Facebook BART Base for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_fbtctg",
19
  "module_path": "hmv_cfg_base_stage3.model1",
20
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/BART-base-HF-Seq2Seq-Trainer-Batch4",
21
  "tokenizer_class": "BartTokenizer",
22
  "model_class": "BartForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",
transformation_and_Normalization/hmv_cfg_base_stage3/model2.py CHANGED
@@ -17,7 +17,7 @@ MODEL_OPTIONS = {
17
  "name": "Microsoft Prophet Net Uncased Large for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_mstctg",
19
  "module_path": "hmv_cfg_base_stage3.model2",
20
- "hf_location": "tachygraphy-microtext-normalization-iemk/ProphetNet_ForCondGen_Uncased_Large_HFTSeq2Seq_Batch4_ngram3",
21
  "tokenizer_class": "ProphetNetTokenizer",
22
  "model_class": "ProphetNetForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",
 
17
  "name": "Microsoft Prophet Net Uncased Large for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_mstctg",
19
  "module_path": "hmv_cfg_base_stage3.model2",
20
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/ProphetNet_ForCondGen_Uncased_Large_HFTSeq2Seq_Batch4_ngram3",
21
  "tokenizer_class": "ProphetNetTokenizer",
22
  "model_class": "ProphetNetForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",
transformation_and_Normalization/hmv_cfg_base_stage3/model3.py CHANGED
@@ -17,7 +17,7 @@ MODEL_OPTIONS = {
17
  "name": "Google T5 v1.1 Base for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_gt5tctg",
19
  "module_path": "hmv_cfg_base_stage3.model3",
20
- "hf_location": "tachygraphy-microtext-normalization-iemk/T5-1.1-HF-seq2seq-Trainer-Batch4",
21
  "tokenizer_class": "T5Tokenizer",
22
  "model_class": "T5ForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",
 
17
  "name": "Google T5 v1.1 Base for Conditional Text Generation",
18
  "type": "hf_automodel_finetuned_gt5tctg",
19
  "module_path": "hmv_cfg_base_stage3.model3",
20
+ "hf_location": "Tachygraphy-Microtext-Normalization-IEMK25/T5-1.1-HF-seq2seq-Trainer-Batch4",
21
  "tokenizer_class": "T5Tokenizer",
22
  "model_class": "T5ForConditionalGeneration",
23
  "problem_type": "text_transformamtion_and_normalization",