ford442 commited on
Commit
faf9562
·
verified ·
1 Parent(s): e399188

Upload 4 files

Browse files
tokenizer_3/tokenizer.json CHANGED
@@ -955,7 +955,7 @@
955
  "pre_tokenizer": {
956
  "type": "Metaspace",
957
  "replacement": "▁",
958
- "prepend_scheme": "always",
959
  "split": true
960
  },
961
  "post_processor": {
@@ -1015,7 +1015,7 @@
1015
  "decoder": {
1016
  "type": "Metaspace",
1017
  "replacement": "▁",
1018
- "prepend_scheme": "always",
1019
  "split": true
1020
  },
1021
  "model": {
 
955
  "pre_tokenizer": {
956
  "type": "Metaspace",
957
  "replacement": "▁",
958
+ "prepend_scheme": "never",
959
  "split": true
960
  },
961
  "post_processor": {
 
1015
  "decoder": {
1016
  "type": "Metaspace",
1017
  "replacement": "▁",
1018
+ "prepend_scheme": "never",
1019
  "split": true
1020
  },
1021
  "model": {
tokenizer_3/tokenizer_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "add_prefix_space": true,
3
  "added_tokens_decoder": {
4
  "0": {
5
  "content": "<pad>",
@@ -931,10 +931,12 @@
931
  "clean_up_tokenization_spaces": true,
932
  "eos_token": "</s>",
933
  "extra_ids": 100,
 
934
  "legacy": true,
935
  "model_max_length": 512,
936
  "pad_token": "<pad>",
937
  "sp_model_kwargs": {},
938
  "tokenizer_class": "T5Tokenizer",
939
- "unk_token": "<unk>"
 
940
  }
 
1
  {
2
+ "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
5
  "content": "<pad>",
 
931
  "clean_up_tokenization_spaces": true,
932
  "eos_token": "</s>",
933
  "extra_ids": 100,
934
+ "extra_special_tokens": {},
935
  "legacy": true,
936
  "model_max_length": 512,
937
  "pad_token": "<pad>",
938
  "sp_model_kwargs": {},
939
  "tokenizer_class": "T5Tokenizer",
940
+ "unk_token": "<unk>",
941
+ "use_fast": true
942
  }