| { | |
| "added_tokens_decoder": { | |
| "27": { | |
| "content": "*", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "28": { | |
| "content": "#", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "29": { | |
| "content": "@", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "30": { | |
| "content": "!", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "31": { | |
| "content": "/", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "auto_map": { | |
| "AutoTokenizer": [ | |
| "tokenizers.ProteinTokenizer", | |
| null | |
| ] | |
| }, | |
| "bos_token": "@", | |
| "clean_up_tokenization_spaces": true, | |
| "eos_token": "*", | |
| "mask_token": "#", | |
| "model_max_length": 2048, | |
| "pad_token": "!", | |
| "sep_token": "/", | |
| "tokenizer_class": "ProteinTokenizer" | |
| } | |