Commit
·
427163b
1
Parent(s):
2c3d9da
lets try to change the pipeline
Browse files- modeling_stacked.py +8 -2
modeling_stacked.py
CHANGED
|
@@ -40,7 +40,7 @@ class ExtendedMultitaskModelForTokenClassification(PreTrainedModel):
|
|
| 40 |
# self.bert = AutoModel.from_pretrained(
|
| 41 |
# config.pretrained_config["_name_or_path"], config=config.pretrained_config
|
| 42 |
# )
|
| 43 |
-
|
| 44 |
# print(f"Model loaded: {self.model_floret}")
|
| 45 |
# if "classifier_dropout" not in config.__dict__:
|
| 46 |
# classifier_dropout = 0.1
|
|
@@ -77,7 +77,13 @@ class ExtendedMultitaskModelForTokenClassification(PreTrainedModel):
|
|
| 77 |
@classmethod
|
| 78 |
def from_pretrained(cls, *args, **kwargs):
|
| 79 |
print("Ignoring weights and using custom initialization.")
|
| 80 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
|
| 82 |
# def forward(
|
| 83 |
# self,
|
|
|
|
| 40 |
# self.bert = AutoModel.from_pretrained(
|
| 41 |
# config.pretrained_config["_name_or_path"], config=config.pretrained_config
|
| 42 |
# )
|
| 43 |
+
self.model_floret = floret.load_model(self.config.filename)
|
| 44 |
# print(f"Model loaded: {self.model_floret}")
|
| 45 |
# if "classifier_dropout" not in config.__dict__:
|
| 46 |
# classifier_dropout = 0.1
|
|
|
|
| 77 |
@classmethod
|
| 78 |
def from_pretrained(cls, *args, **kwargs):
|
| 79 |
print("Ignoring weights and using custom initialization.")
|
| 80 |
+
|
| 81 |
+
# Manually create the config
|
| 82 |
+
config = ImpressoConfig()
|
| 83 |
+
|
| 84 |
+
# Pass the manually created config to the class
|
| 85 |
+
model = cls(config)
|
| 86 |
+
return model
|
| 87 |
|
| 88 |
# def forward(
|
| 89 |
# self,
|