set model_max_length to the maximum length of model context (131072 tokens)

#15
by x0wllaar - opened
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -9011,7 +9011,7 @@
9011
  "eos_token": "</s>",
9012
  "extra_special_tokens": {},
9013
  "legacy": true,
9014
- "model_max_length": 1000000000000000019884624838656,
9015
  "pad_token": "<pad>",
9016
  "processor_class": "PixtralProcessor",
9017
  "tokenizer_class": "LlamaTokenizerFast",
 
9011
  "eos_token": "</s>",
9012
  "extra_special_tokens": {},
9013
  "legacy": true,
9014
+ "model_max_length": 131072,
9015
  "pad_token": "<pad>",
9016
  "processor_class": "PixtralProcessor",
9017
  "tokenizer_class": "LlamaTokenizerFast",