Cicciokr commited on
Commit
a11940a
·
verified ·
1 Parent(s): 4f01f4d

Upload 9 files

Browse files
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
4
  "architectures": [
@@ -38,7 +39,7 @@
38
  "pad_token_id": 1,
39
  "scale_embedding": false,
40
  "torch_dtype": "float32",
41
- "transformers_version": "4.47.0",
42
  "use_cache": true,
43
  "vocab_size": 52000
44
  }
 
1
  {
2
+ "_name_or_path": "Cicciokr/BART-CC100-la",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
 
39
  "pad_token_id": 1,
40
  "scale_embedding": false,
41
  "torch_dtype": "float32",
42
+ "transformers_version": "4.48.3",
43
  "use_cache": true,
44
  "vocab_size": 52000
45
  }
generation_config.json CHANGED
@@ -5,5 +5,5 @@
5
  "eos_token_id": 2,
6
  "forced_eos_token_id": 2,
7
  "pad_token_id": 1,
8
- "transformers_version": "4.47.0"
9
  }
 
5
  "eos_token_id": 2,
6
  "forced_eos_token_id": 2,
7
  "pad_token_id": 1,
8
+ "transformers_version": "4.48.3"
9
  }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a363065ac9bd9eb731c7ed36e98157c3808a13018fbee56c4ef92db4b162e298
3
  size 563249480
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37be88b4233f01b12591aaecf3046d97286292e67b8733be0bab8b3837279fa0
3
  size 563249480
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -49,9 +49,11 @@
49
  "errors": "replace",
50
  "extra_special_tokens": {},
51
  "mask_token": "<mask>",
52
- "model_max_length": 1000000000000000019884624838656,
53
  "pad_token": "<pad>",
54
  "sep_token": "</s>",
55
  "tokenizer_class": "BartTokenizer",
 
 
56
  "unk_token": "<unk>"
57
  }
 
49
  "errors": "replace",
50
  "extra_special_tokens": {},
51
  "mask_token": "<mask>",
52
+ "model_max_length": 1024,
53
  "pad_token": "<pad>",
54
  "sep_token": "</s>",
55
  "tokenizer_class": "BartTokenizer",
56
+ "trim_offsets": true,
57
+ "trucantion": true,
58
  "unk_token": "<unk>"
59
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8923cbf06cdbb65bc89b18e2f61aae0a8179b53d6833105c00924f8a5d9abe5c
3
- size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:512765c39c7494d911cdbeeb3b9357c9c3f2d819e389b2d46d040210462f5686
3
+ size 5368
vocab.json CHANGED
The diff for this file is too large to render. See raw diff