Update precious3_gpt_multi_modal.py
Browse files
precious3_gpt_multi_modal.py
CHANGED
@@ -23,6 +23,12 @@ from mpt_7b.attention import ATTN_CLASS_REGISTRY, attn_bias_shape, build_attn_bi
|
|
23 |
import logging
|
24 |
log = logging.getLogger(__name__)
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
class Custom_MptModel(MPTModel): # MptModel
|
27 |
def __init__(self, config: MPTConfig, modality0_dim=128, modality2_dim=1536):
|
28 |
config._validate_config()
|
|
|
23 |
import logging
|
24 |
log = logging.getLogger(__name__)
|
25 |
|
26 |
+
|
27 |
+
class CustomTokenizer(PreTrainedTokenizerFast):
|
28 |
+
def __init__(self, vocab_file, tokenizer_file=None, **kwargs):
|
29 |
+
super().__init__(vocab_file=vocab_file, tokenizer_file=tokenizer_file, **kwargs)
|
30 |
+
|
31 |
+
|
32 |
class Custom_MptModel(MPTModel): # MptModel
|
33 |
def __init__(self, config: MPTConfig, modality0_dim=128, modality2_dim=1536):
|
34 |
config._validate_config()
|