TusharGoel commited on
Commit
623d851
·
1 Parent(s): a60ca85

Upload 6 files

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +1 -1
  3. tokenizer_config.json +7 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "microsoft/layoutlmv2-base-uncased",
3
  "architectures": [
4
  "LayoutLMv2ForQuestionAnswering"
5
  ],
 
1
  {
2
+ "_name_or_path": "TusharGoel/LayoutLMv2-finetuned-docvqa",
3
  "architectures": [
4
  "LayoutLMv2ForQuestionAnswering"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a2bd729a64b19ca0d2205c21dab3412347c0be7519479ab7bc827861918d30bf
3
  size 802214001
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81071a98f459ad9441c803f2260d1dd18c2ded4a1c58772cb7399244338adabe
3
  size 802214001
tokenizer_config.json CHANGED
@@ -11,9 +11,11 @@
11
  "do_basic_tokenize": true,
12
  "do_lower_case": true,
13
  "mask_token": "[MASK]",
 
14
  "model_max_length": 512,
15
  "never_split": null,
16
  "only_label_first_subword": true,
 
17
  "pad_token": "[PAD]",
18
  "pad_token_box": [
19
  0,
@@ -22,6 +24,8 @@
22
  0
23
  ],
24
  "pad_token_label": -100,
 
 
25
  "sep_token": "[SEP]",
26
  "sep_token_box": [
27
  1000,
@@ -29,8 +33,11 @@
29
  1000,
30
  1000
31
  ],
 
32
  "strip_accents": null,
33
  "tokenize_chinese_chars": true,
34
  "tokenizer_class": "LayoutLMv2Tokenizer",
 
 
35
  "unk_token": "[UNK]"
36
  }
 
11
  "do_basic_tokenize": true,
12
  "do_lower_case": true,
13
  "mask_token": "[MASK]",
14
+ "max_length": 512,
15
  "model_max_length": 512,
16
  "never_split": null,
17
  "only_label_first_subword": true,
18
+ "pad_to_multiple_of": null,
19
  "pad_token": "[PAD]",
20
  "pad_token_box": [
21
  0,
 
24
  0
25
  ],
26
  "pad_token_label": -100,
27
+ "pad_token_type_id": 0,
28
+ "padding_side": "right",
29
  "sep_token": "[SEP]",
30
  "sep_token_box": [
31
  1000,
 
33
  1000,
34
  1000
35
  ],
36
+ "stride": 0,
37
  "strip_accents": null,
38
  "tokenize_chinese_chars": true,
39
  "tokenizer_class": "LayoutLMv2Tokenizer",
40
+ "truncation_side": "right",
41
+ "truncation_strategy": "longest_first",
42
  "unk_token": "[UNK]"
43
  }