Upload tokenizer
8054815 verified | { |
| "version": "1.0", |
| "truncation": null, |
| "padding": null, |
| "added_tokens": [ |
| { |
| "id": 0, |
| "content": "<pad>", |
| "single_word": false, |
| "lstrip": false, |
| "rstrip": false, |
| "normalized": false, |
| "special": true |
| }, |
| { |
| "id": 1, |
| "content": "<s>", |
| "single_word": false, |
| "lstrip": false, |
| "rstrip": false, |
| "normalized": false, |
| "special": true |
| }, |
| { |
| "id": 2, |
| "content": "</s>", |
| "single_word": false, |
| "lstrip": false, |
| "rstrip": false, |
| "normalized": false, |
| "special": true |
| }, |
| { |
| "id": 3, |
| "content": "<unk>", |
| "single_word": false, |
| "lstrip": false, |
| "rstrip": false, |
| "normalized": false, |
| "special": true |
| } |
| ], |
| "normalizer": null, |
| "pre_tokenizer": { |
| "type": "Whitespace" |
| }, |
| "post_processor": null, |
| "decoder": null, |
| "model": { |
| "type": "WordLevel", |
| "vocab": { |
| "<pad>": 0, |
| "<s>": 1, |
| "</s>": 2, |
| "<unk>": 3, |
| "A": 4, |
| "B": 5, |
| "0": 6, |
| "1": 7, |
| "2": 8, |
| "3": 9, |
| "4": 10, |
| "5": 11, |
| "6": 12, |
| "7": 13, |
| "8": 14, |
| "9": 15, |
| "K1": 16, |
| "K2": 17, |
| "K3": 18, |
| "K4": 19, |
| "K5": 20, |
| "K6": 21, |
| "K7": 22, |
| "EQUALS": 23, |
| "END": 24, |
| "K8": 25, |
| "K9": 26, |
| "K10": 27, |
| "SELECT": 28, |
| "SEP": 29 |
| }, |
| "unk_token": "<unk>" |
| } |
| } |