DavidLanz commited on
Commit
38cea23
·
1 Parent(s): ba3441f

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,17 +1,15 @@
1
  {
2
- "_name_or_path": "DavidLanz/tcp2023",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
9
- "do_sample": true,
10
  "embd_pdrop": 0.1,
11
  "eos_token_id": 50256,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
- "max_length": 50,
15
  "model_type": "gpt2",
16
  "n_ctx": 1024,
17
  "n_embd": 768,
@@ -35,7 +33,7 @@
35
  }
36
  },
37
  "torch_dtype": "float32",
38
- "transformers_version": "4.34.0",
39
  "use_cache": true,
40
  "vocab_size": 50257
41
  }
 
1
  {
2
+ "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
 
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
 
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
 
33
  }
34
  },
35
  "torch_dtype": "float32",
36
+ "transformers_version": "4.34.1",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
generation_config.json CHANGED
@@ -1,8 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
- "do_sample": true,
5
  "eos_token_id": 50256,
6
- "max_length": 50,
7
- "transformers_version": "4.34.0"
8
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
 
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.34.1"
 
6
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53a44f0f76d45b341a61685efcede5ee2e61610a46145f07fcc73fd64abe897b
3
- size 497807197
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa9b3c82bb6bf9f0b65d7230af2848eb99e72252fe2ea25f75ec15f502fe3661
3
+ size 497807706
special_tokens_map.json CHANGED
@@ -1,5 +1,23 @@
1
  {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
  }
tokenizer_config.json CHANGED
@@ -5,13 +5,12 @@
5
  "50256": {
6
  "content": "<|endoftext|>",
7
  "lstrip": false,
8
- "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
  }
13
  },
14
- "additional_special_tokens": [],
15
  "bos_token": "<|endoftext|>",
16
  "clean_up_tokenization_spaces": true,
17
  "eos_token": "<|endoftext|>",
 
5
  "50256": {
6
  "content": "<|endoftext|>",
7
  "lstrip": false,
8
+ "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
  }
13
  },
 
14
  "bos_token": "<|endoftext|>",
15
  "clean_up_tokenization_spaces": true,
16
  "eos_token": "<|endoftext|>",
vocab.json CHANGED
The diff for this file is too large to render. See raw diff