| { | |
| "tokenizer_class": "NeuralQuantumTokenizer", | |
| "auto_map": { | |
| "AutoTokenizer": "tokenization_nqlm.NeuralQuantumTokenizer" | |
| }, | |
| "vocab_size": 50257, | |
| "model_max_length": 512, | |
| "padding_side": "right", | |
| "truncation_side": "right", | |
| "special_tokens": { | |
| "bos_token": "<|endoftext|>", | |
| "eos_token": "<|endoftext|>", | |
| "unk_token": "<|endoftext|>", | |
| "pad_token": "<|endoftext|>", | |
| "quantum_token": "<|quantum|>", | |
| "classical_token": "<|classical|>" | |
| }, | |
| "clean_up_tokenization_spaces": true, | |
| "model_input_names": [ | |
| "input_ids", | |
| "attention_mask" | |
| ], | |
| "quantum_enhanced": true, | |
| "quantum_token_processing": true | |
| } |