| { | |
| "tokenizer_class": "CharacterLevelTokenizer", | |
| "vocab_size": 6060, | |
| "model_max_length": 1024, | |
| "clean_up_tokenization_spaces": false | |
| } |
| { | |
| "tokenizer_class": "CharacterLevelTokenizer", | |
| "vocab_size": 6060, | |
| "model_max_length": 1024, | |
| "clean_up_tokenization_spaces": false | |
| } |