File size: 466 Bytes
478fade
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "add_prefix_space": false,
  "backend": "tokenizers",
  "bos_token": "<|endoftext|>",
  "eos_token": "<|end|>",
  "errors": "replace",
  "extra_special_tokens": [
    "<|system|>",
    "<|user|>",
    "<|assistant|>",
    "<|end|>"
  ],
  "is_local": false,
  "local_files_only": false,
  "model_max_length": 1000000000000000019884624838656,
  "pad_token": "<|end|>",
  "tokenizer_class": "GPT2Tokenizer",
  "unk_token": "<|endoftext|>",
  "vocab_size": 49152
}