Echo9Zulu commited on
Commit
7037c2a
·
verified ·
1 Parent(s): 66b748f

Upload 12 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/media/ecomm/c0889304-9e30-4f04-b290-c7db463872c6/Models/Pytorch/Dolphin3.0-Mistral-24B/",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": [
9
+ 2,
10
+ 131072
11
+ ],
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 32768,
17
+ "max_position_embeddings": 32768,
18
+ "model_type": "mistral",
19
+ "num_attention_heads": 32,
20
+ "num_hidden_layers": 40,
21
+ "num_key_value_heads": 8,
22
+ "rms_norm_eps": 1e-05,
23
+ "rope_theta": 100000000.0,
24
+ "sliding_window": null,
25
+ "tie_word_embeddings": false,
26
+ "torch_dtype": "bfloat16",
27
+ "transformers_version": "4.46.3",
28
+ "use_cache": false,
29
+ "vocab_size": 131074
30
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 2,
7
+ 131072
8
+ ],
9
+ "transformers_version": "4.46.3"
10
+ }
openvino_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compression": null,
3
+ "dtype": "int4",
4
+ "input_info": null,
5
+ "optimum_version": "1.24.0",
6
+ "quantization_config": {
7
+ "all_layers": true,
8
+ "backup_precision": null,
9
+ "bits": 4,
10
+ "dataset": "wikitext2",
11
+ "gptq": null,
12
+ "group_size": 128,
13
+ "ignored_scope": null,
14
+ "lora_correction": null,
15
+ "num_samples": null,
16
+ "processor": null,
17
+ "quant_method": "awq",
18
+ "ratio": 1.0,
19
+ "scale_estimation": true,
20
+ "sensitivity_metric": null,
21
+ "sym": false,
22
+ "tokenizer": null,
23
+ "trust_remote_code": false,
24
+ "weight_format": "int4"
25
+ },
26
+ "save_onnx_model": false,
27
+ "transformers_version": "4.46.3"
28
+ }
openvino_detokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4363759773f9ac7efe6e3f45f85aa82fc5eaa2f61fc8e855b22d5eea210f2c32
3
+ size 1415444
openvino_detokenizer.xml ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="detokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_226008" type="Parameter" version="opset1">
5
+ <data shape="?,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="Parameter_226008">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="Convert_226019" type="Convert" version="opset1">
14
+ <data destination_type="i32" />
15
+ <input>
16
+ <port id="0" precision="I64">
17
+ <dim>-1</dim>
18
+ <dim>-1</dim>
19
+ </port>
20
+ </input>
21
+ <output>
22
+ <port id="1" precision="I32">
23
+ <dim>-1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </output>
27
+ </layer>
28
+ <layer id="2" name="Constant_225975" type="Const" version="opset1">
29
+ <data element_type="u8" shape="1415444" offset="0" size="1415444" />
30
+ <output>
31
+ <port id="0" precision="U8">
32
+ <dim>1415444</dim>
33
+ </port>
34
+ </output>
35
+ </layer>
36
+ <layer id="3" name="StringTensorUnpack_225976" type="StringTensorUnpack" version="extension">
37
+ <data mode="begins_ends" />
38
+ <input>
39
+ <port id="0" precision="U8">
40
+ <dim>1415444</dim>
41
+ </port>
42
+ </input>
43
+ <output>
44
+ <port id="1" precision="I32">
45
+ <dim>-1</dim>
46
+ </port>
47
+ <port id="2" precision="I32">
48
+ <dim>-1</dim>
49
+ </port>
50
+ <port id="3" precision="U8">
51
+ <dim>-1</dim>
52
+ </port>
53
+ </output>
54
+ </layer>
55
+ <layer id="4" name="VocabDecoder_226009" type="VocabDecoder" version="extension">
56
+ <data skip_tokens="0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 131072" />
57
+ <input>
58
+ <port id="0" precision="I32">
59
+ <dim>-1</dim>
60
+ <dim>-1</dim>
61
+ </port>
62
+ <port id="1" precision="I32">
63
+ <dim>-1</dim>
64
+ </port>
65
+ <port id="2" precision="I32">
66
+ <dim>-1</dim>
67
+ </port>
68
+ <port id="3" precision="U8">
69
+ <dim>-1</dim>
70
+ </port>
71
+ </input>
72
+ <output>
73
+ <port id="4" precision="I32">
74
+ <dim>-1</dim>
75
+ </port>
76
+ <port id="5" precision="I32">
77
+ <dim>-1</dim>
78
+ </port>
79
+ <port id="6" precision="I32">
80
+ <dim>-1</dim>
81
+ </port>
82
+ <port id="7" precision="I32">
83
+ <dim>-1</dim>
84
+ </port>
85
+ <port id="8" precision="U8">
86
+ <dim>-1</dim>
87
+ </port>
88
+ </output>
89
+ </layer>
90
+ <layer id="5" name="FuzeRagged_226010" type="FuzeRagged" version="extension">
91
+ <input>
92
+ <port id="0" precision="I32">
93
+ <dim>-1</dim>
94
+ </port>
95
+ <port id="1" precision="I32">
96
+ <dim>-1</dim>
97
+ </port>
98
+ <port id="2" precision="I32">
99
+ <dim>-1</dim>
100
+ </port>
101
+ <port id="3" precision="I32">
102
+ <dim>-1</dim>
103
+ </port>
104
+ </input>
105
+ <output>
106
+ <port id="4" precision="I32">
107
+ <dim>-1</dim>
108
+ </port>
109
+ <port id="5" precision="I32">
110
+ <dim>-1</dim>
111
+ </port>
112
+ </output>
113
+ </layer>
114
+ <layer id="6" name="StringTensorPack_226011" type="StringTensorPack" version="extension">
115
+ <data mode="begins_ends" />
116
+ <input>
117
+ <port id="0" precision="I32">
118
+ <dim>-1</dim>
119
+ </port>
120
+ <port id="1" precision="I32">
121
+ <dim>-1</dim>
122
+ </port>
123
+ <port id="2" precision="U8">
124
+ <dim>-1</dim>
125
+ </port>
126
+ </input>
127
+ <output>
128
+ <port id="3" precision="STRING" names="string_output">
129
+ <dim>-1</dim>
130
+ </port>
131
+ </output>
132
+ </layer>
133
+ <layer id="7" name="Result_226012" type="Result" version="opset1">
134
+ <input>
135
+ <port id="0" precision="STRING">
136
+ <dim>-1</dim>
137
+ </port>
138
+ </input>
139
+ </layer>
140
+ </layers>
141
+ <edges>
142
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
143
+ <edge from-layer="1" from-port="1" to-layer="4" to-port="0" />
144
+ <edge from-layer="2" from-port="0" to-layer="3" to-port="0" />
145
+ <edge from-layer="3" from-port="1" to-layer="4" to-port="1" />
146
+ <edge from-layer="3" from-port="2" to-layer="4" to-port="2" />
147
+ <edge from-layer="3" from-port="3" to-layer="4" to-port="3" />
148
+ <edge from-layer="4" from-port="4" to-layer="5" to-port="0" />
149
+ <edge from-layer="4" from-port="5" to-layer="5" to-port="1" />
150
+ <edge from-layer="4" from-port="6" to-layer="5" to-port="2" />
151
+ <edge from-layer="4" from-port="7" to-layer="5" to-port="3" />
152
+ <edge from-layer="4" from-port="8" to-layer="6" to-port="2" />
153
+ <edge from-layer="5" from-port="4" to-layer="6" to-port="0" />
154
+ <edge from-layer="5" from-port="5" to-layer="6" to-port="1" />
155
+ <edge from-layer="6" from-port="3" to-layer="7" to-port="0" />
156
+ </edges>
157
+ <rt_info>
158
+ <add_attention_mask value="True" />
159
+ <add_prefix_space />
160
+ <add_special_tokens value="True" />
161
+ <bos_token_id value="1" />
162
+ <chat_template value="{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'&lt;|im_start|>' + message['role'] + '&#10;' + message['content'] + '&lt;|im_end|>' + '&#10;'}}{% endfor %}{% if add_generation_prompt %}{{ '&lt;|im_start|>assistant&#10;' }}{% endif %}" />
163
+ <clean_up_tokenization_spaces />
164
+ <detokenizer_input_type value="i64" />
165
+ <eos_token_id value="131072" />
166
+ <handle_special_tokens_with_re />
167
+ <number_of_inputs value="1" />
168
+ <openvino_tokenizers_version value="2024.5.0.0" />
169
+ <openvino_version value="2024.5.0" />
170
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
171
+ <pad_token_id value="11" />
172
+ <sentencepiece_version value="0.2.0" />
173
+ <skip_special_tokens value="True" />
174
+ <streaming_detokenizer value="False" />
175
+ <tiktoken_version value="0.7.0" />
176
+ <tokenizer_output_type value="i64" />
177
+ <tokenizers_version value="0.20.3" />
178
+ <transformers_version value="4.46.3" />
179
+ <use_max_padding value="False" />
180
+ <use_sentencepiece_backend value="False" />
181
+ <utf8_replace_mode />
182
+ <with_detokenizer value="True" />
183
+ </rt_info>
184
+ </net>
openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6803ebf3e33c3e163d57c4f065f8d9c8ee205af6e5d50ea362321eea7f91f8ab
3
+ size 12248054776
openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
openvino_tokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4008572e30ff718448da054976ab192bc6413552eabec636453c66fd875763bc
3
+ size 5467313
openvino_tokenizer.xml ADDED
@@ -0,0 +1,778 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="tokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_225894" type="Parameter" version="opset1">
5
+ <data shape="?" element_type="string" />
6
+ <output>
7
+ <port id="0" precision="STRING" names="Parameter_225894">
8
+ <dim>-1</dim>
9
+ </port>
10
+ </output>
11
+ </layer>
12
+ <layer id="1" name="Constant_225992" type="Const" version="opset1">
13
+ <data element_type="i32" shape="" offset="0" size="4" />
14
+ <output>
15
+ <port id="0" precision="I32" />
16
+ </output>
17
+ </layer>
18
+ <layer id="2" name="Constant_225993" type="Const" version="opset1">
19
+ <data element_type="i32" shape="" offset="4" size="4" />
20
+ <output>
21
+ <port id="0" precision="I32" />
22
+ </output>
23
+ </layer>
24
+ <layer id="3" name="Constant_225994" type="Const" version="opset1">
25
+ <data element_type="i32" shape="1" offset="4" size="4" />
26
+ <output>
27
+ <port id="0" precision="I32">
28
+ <dim>1</dim>
29
+ </port>
30
+ </output>
31
+ </layer>
32
+ <layer id="4" name="Constant_225900" type="Const" version="opset1">
33
+ <data element_type="i64" shape="" offset="8" size="8" />
34
+ <output>
35
+ <port id="0" precision="I64" />
36
+ </output>
37
+ </layer>
38
+ <layer id="5" name="StringTensorUnpack_225895" type="StringTensorUnpack" version="extension">
39
+ <data mode="begins_ends" />
40
+ <input>
41
+ <port id="0" precision="STRING">
42
+ <dim>-1</dim>
43
+ </port>
44
+ </input>
45
+ <output>
46
+ <port id="1" precision="I32">
47
+ <dim>-1</dim>
48
+ </port>
49
+ <port id="2" precision="I32">
50
+ <dim>-1</dim>
51
+ </port>
52
+ <port id="3" precision="U8">
53
+ <dim>-1</dim>
54
+ </port>
55
+ </output>
56
+ </layer>
57
+ <layer id="6" name="ShapeOf_225896" type="ShapeOf" version="opset3">
58
+ <data output_type="i64" />
59
+ <input>
60
+ <port id="0" precision="I32">
61
+ <dim>-1</dim>
62
+ </port>
63
+ </input>
64
+ <output>
65
+ <port id="1" precision="I64">
66
+ <dim>1</dim>
67
+ </port>
68
+ </output>
69
+ </layer>
70
+ <layer id="7" name="Constant_225897" type="Const" version="opset1">
71
+ <data element_type="i64" shape="" offset="8" size="8" />
72
+ <output>
73
+ <port id="0" precision="I64" />
74
+ </output>
75
+ </layer>
76
+ <layer id="8" name="Constant_225898" type="Const" version="opset1">
77
+ <data element_type="i64" shape="" offset="8" size="8" />
78
+ <output>
79
+ <port id="0" precision="I64" />
80
+ </output>
81
+ </layer>
82
+ <layer id="9" name="Gather_225899" type="Gather" version="opset8">
83
+ <data batch_dims="0" />
84
+ <input>
85
+ <port id="0" precision="I64">
86
+ <dim>1</dim>
87
+ </port>
88
+ <port id="1" precision="I64" />
89
+ <port id="2" precision="I64" />
90
+ </input>
91
+ <output>
92
+ <port id="3" precision="I64" />
93
+ </output>
94
+ </layer>
95
+ <layer id="10" name="Constant_225901" type="Const" version="opset1">
96
+ <data element_type="i64" shape="" offset="16" size="8" />
97
+ <output>
98
+ <port id="0" precision="I64" />
99
+ </output>
100
+ </layer>
101
+ <layer id="11" name="Range_225902" type="Range" version="opset4">
102
+ <data output_type="i32" />
103
+ <input>
104
+ <port id="0" precision="I64" />
105
+ <port id="1" precision="I64" />
106
+ <port id="2" precision="I64" />
107
+ </input>
108
+ <output>
109
+ <port id="3" precision="I32">
110
+ <dim>-1</dim>
111
+ </port>
112
+ </output>
113
+ </layer>
114
+ <layer id="12" name="Constant_225903" type="Const" version="opset1">
115
+ <data element_type="i64" shape="" offset="16" size="8" />
116
+ <output>
117
+ <port id="0" precision="I64" />
118
+ </output>
119
+ </layer>
120
+ <layer id="13" name="Constant_225904" type="Const" version="opset1">
121
+ <data element_type="i64" shape="" offset="16" size="8" />
122
+ <output>
123
+ <port id="0" precision="I64" />
124
+ </output>
125
+ </layer>
126
+ <layer id="14" name="Add_225905" type="Add" version="opset1">
127
+ <data auto_broadcast="numpy" />
128
+ <input>
129
+ <port id="0" precision="I64" />
130
+ <port id="1" precision="I64" />
131
+ </input>
132
+ <output>
133
+ <port id="2" precision="I64" />
134
+ </output>
135
+ </layer>
136
+ <layer id="15" name="Constant_225906" type="Const" version="opset1">
137
+ <data element_type="i64" shape="" offset="16" size="8" />
138
+ <output>
139
+ <port id="0" precision="I64" />
140
+ </output>
141
+ </layer>
142
+ <layer id="16" name="Range_225907" type="Range" version="opset4">
143
+ <data output_type="i32" />
144
+ <input>
145
+ <port id="0" precision="I64" />
146
+ <port id="1" precision="I64" />
147
+ <port id="2" precision="I64" />
148
+ </input>
149
+ <output>
150
+ <port id="3" precision="I32">
151
+ <dim>-1</dim>
152
+ </port>
153
+ </output>
154
+ </layer>
155
+ <layer id="17" name="Constant_225969" type="Const" version="opset1">
156
+ <data element_type="u8" shape="17900" offset="24" size="17900" />
157
+ <output>
158
+ <port id="0" precision="U8">
159
+ <dim>17900</dim>
160
+ </port>
161
+ </output>
162
+ </layer>
163
+ <layer id="18" name="SpecialTokensSplit_225970" type="SpecialTokensSplit" version="extension">
164
+ <input>
165
+ <port id="0" precision="I32">
166
+ <dim>-1</dim>
167
+ </port>
168
+ <port id="1" precision="I32">
169
+ <dim>-1</dim>
170
+ </port>
171
+ <port id="2" precision="I32">
172
+ <dim>-1</dim>
173
+ </port>
174
+ <port id="3" precision="I32">
175
+ <dim>-1</dim>
176
+ </port>
177
+ <port id="4" precision="U8">
178
+ <dim>-1</dim>
179
+ </port>
180
+ <port id="5" precision="U8">
181
+ <dim>17900</dim>
182
+ </port>
183
+ </input>
184
+ <output>
185
+ <port id="6" precision="I32">
186
+ <dim>-1</dim>
187
+ </port>
188
+ <port id="7" precision="I32">
189
+ <dim>-1</dim>
190
+ </port>
191
+ <port id="8" precision="I32">
192
+ <dim>-1</dim>
193
+ </port>
194
+ <port id="9" precision="I32">
195
+ <dim>-1</dim>
196
+ </port>
197
+ <port id="10" precision="U8">
198
+ <dim>-1</dim>
199
+ </port>
200
+ <port id="11" precision="BOOL">
201
+ <dim>-1</dim>
202
+ </port>
203
+ </output>
204
+ </layer>
205
+ <layer id="19" name="Constant_225972" type="Const" version="opset1">
206
+ <data element_type="u8" shape="115" offset="17924" size="115" />
207
+ <output>
208
+ <port id="0" precision="U8">
209
+ <dim>115</dim>
210
+ </port>
211
+ </output>
212
+ </layer>
213
+ <layer id="20" name="RegexSplit_225973" type="RegexSplit" version="extension">
214
+ <data behaviour="isolate" invert="false" max_splits="-1" />
215
+ <input>
216
+ <port id="0" precision="I32">
217
+ <dim>-1</dim>
218
+ </port>
219
+ <port id="1" precision="I32">
220
+ <dim>-1</dim>
221
+ </port>
222
+ <port id="2" precision="I32">
223
+ <dim>-1</dim>
224
+ </port>
225
+ <port id="3" precision="I32">
226
+ <dim>-1</dim>
227
+ </port>
228
+ <port id="4" precision="U8">
229
+ <dim>-1</dim>
230
+ </port>
231
+ <port id="5" precision="BOOL">
232
+ <dim>-1</dim>
233
+ </port>
234
+ <port id="6" precision="U8">
235
+ <dim>115</dim>
236
+ </port>
237
+ </input>
238
+ <output>
239
+ <port id="7" precision="I32">
240
+ <dim>-1</dim>
241
+ </port>
242
+ <port id="8" precision="I32">
243
+ <dim>-1</dim>
244
+ </port>
245
+ <port id="9" precision="I32">
246
+ <dim>-1</dim>
247
+ </port>
248
+ <port id="10" precision="I32">
249
+ <dim>-1</dim>
250
+ </port>
251
+ <port id="11" precision="U8">
252
+ <dim>-1</dim>
253
+ </port>
254
+ <port id="12" precision="BOOL">
255
+ <dim>-1</dim>
256
+ </port>
257
+ </output>
258
+ </layer>
259
+ <layer id="21" name="Constant_225975" type="Const" version="opset1">
260
+ <data element_type="u8" shape="1415444" offset="18039" size="1415444" />
261
+ <output>
262
+ <port id="0" precision="U8">
263
+ <dim>1415444</dim>
264
+ </port>
265
+ </output>
266
+ </layer>
267
+ <layer id="22" name="StringTensorUnpack_225976" type="StringTensorUnpack" version="extension">
268
+ <data mode="begins_ends" />
269
+ <input>
270
+ <port id="0" precision="U8">
271
+ <dim>1415444</dim>
272
+ </port>
273
+ </input>
274
+ <output>
275
+ <port id="1" precision="I32">
276
+ <dim>-1</dim>
277
+ </port>
278
+ <port id="2" precision="I32">
279
+ <dim>-1</dim>
280
+ </port>
281
+ <port id="3" precision="U8">
282
+ <dim>-1</dim>
283
+ </port>
284
+ </output>
285
+ </layer>
286
+ <layer id="23" name="Constant_225981" type="Const" version="opset1">
287
+ <data element_type="u8" shape="2067404" offset="1433483" size="2067404" />
288
+ <output>
289
+ <port id="0" precision="U8">
290
+ <dim>2067404</dim>
291
+ </port>
292
+ </output>
293
+ </layer>
294
+ <layer id="24" name="StringTensorUnpack_225982" type="StringTensorUnpack" version="extension">
295
+ <data mode="begins_ends" />
296
+ <input>
297
+ <port id="0" precision="U8">
298
+ <dim>2067404</dim>
299
+ </port>
300
+ </input>
301
+ <output>
302
+ <port id="1" precision="I32">
303
+ <dim>-1</dim>
304
+ </port>
305
+ <port id="2" precision="I32">
306
+ <dim>-1</dim>
307
+ </port>
308
+ <port id="3" precision="U8">
309
+ <dim>-1</dim>
310
+ </port>
311
+ </output>
312
+ </layer>
313
+ <layer id="25" name="Constant_225984" type="Const" version="opset1">
314
+ <data element_type="u8" shape="1945525" offset="3500887" size="1945525" />
315
+ <output>
316
+ <port id="0" precision="U8">
317
+ <dim>1945525</dim>
318
+ </port>
319
+ </output>
320
+ </layer>
321
+ <layer id="26" name="StringTensorUnpack_225985" type="StringTensorUnpack" version="extension">
322
+ <data mode="begins_ends" />
323
+ <input>
324
+ <port id="0" precision="U8">
325
+ <dim>1945525</dim>
326
+ </port>
327
+ </input>
328
+ <output>
329
+ <port id="1" precision="I32">
330
+ <dim>-1</dim>
331
+ </port>
332
+ <port id="2" precision="I32">
333
+ <dim>-1</dim>
334
+ </port>
335
+ <port id="3" precision="U8">
336
+ <dim>-1</dim>
337
+ </port>
338
+ </output>
339
+ </layer>
340
+ <layer id="27" name="Constant_225978" type="Const" version="opset1">
341
+ <data element_type="u8" shape="16889" offset="5446412" size="16889" />
342
+ <output>
343
+ <port id="0" precision="U8">
344
+ <dim>16889</dim>
345
+ </port>
346
+ </output>
347
+ </layer>
348
+ <layer id="28" name="StringTensorUnpack_225979" type="StringTensorUnpack" version="extension">
349
+ <data mode="begins_ends" />
350
+ <input>
351
+ <port id="0" precision="U8">
352
+ <dim>16889</dim>
353
+ </port>
354
+ </input>
355
+ <output>
356
+ <port id="1" precision="I32">
357
+ <dim>-1</dim>
358
+ </port>
359
+ <port id="2" precision="I32">
360
+ <dim>-1</dim>
361
+ </port>
362
+ <port id="3" precision="U8">
363
+ <dim>-1</dim>
364
+ </port>
365
+ </output>
366
+ </layer>
367
+ <layer id="29" name="Constant_225986" type="Const" version="opset1">
368
+ <data element_type="i32" shape="1001" offset="5463301" size="4004" />
369
+ <output>
370
+ <port id="0" precision="I32">
371
+ <dim>1001</dim>
372
+ </port>
373
+ </output>
374
+ </layer>
375
+ <layer id="30" name="BPETokenizer_225987" type="BPETokenizer" version="extension">
376
+ <data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="26214" />
377
+ <input>
378
+ <port id="0" precision="I32">
379
+ <dim>-1</dim>
380
+ </port>
381
+ <port id="1" precision="I32">
382
+ <dim>-1</dim>
383
+ </port>
384
+ <port id="2" precision="I32">
385
+ <dim>-1</dim>
386
+ </port>
387
+ <port id="3" precision="I32">
388
+ <dim>-1</dim>
389
+ </port>
390
+ <port id="4" precision="U8">
391
+ <dim>-1</dim>
392
+ </port>
393
+ <port id="5" precision="I32">
394
+ <dim>-1</dim>
395
+ </port>
396
+ <port id="6" precision="I32">
397
+ <dim>-1</dim>
398
+ </port>
399
+ <port id="7" precision="U8">
400
+ <dim>-1</dim>
401
+ </port>
402
+ <port id="8" precision="I32">
403
+ <dim>-1</dim>
404
+ </port>
405
+ <port id="9" precision="I32">
406
+ <dim>-1</dim>
407
+ </port>
408
+ <port id="10" precision="U8">
409
+ <dim>-1</dim>
410
+ </port>
411
+ <port id="11" precision="I32">
412
+ <dim>-1</dim>
413
+ </port>
414
+ <port id="12" precision="I32">
415
+ <dim>-1</dim>
416
+ </port>
417
+ <port id="13" precision="U8">
418
+ <dim>-1</dim>
419
+ </port>
420
+ <port id="14" precision="I32">
421
+ <dim>-1</dim>
422
+ </port>
423
+ <port id="15" precision="I32">
424
+ <dim>-1</dim>
425
+ </port>
426
+ <port id="16" precision="U8">
427
+ <dim>-1</dim>
428
+ </port>
429
+ <port id="17" precision="I32">
430
+ <dim>1001</dim>
431
+ </port>
432
+ </input>
433
+ <output>
434
+ <port id="18" precision="I32">
435
+ <dim>-1</dim>
436
+ </port>
437
+ <port id="19" precision="I32">
438
+ <dim>-1</dim>
439
+ </port>
440
+ <port id="20" precision="I32">
441
+ <dim>-1</dim>
442
+ </port>
443
+ </output>
444
+ </layer>
445
+ <layer id="31" name="Subtract_225988" type="Subtract" version="opset1">
446
+ <data auto_broadcast="numpy" />
447
+ <input>
448
+ <port id="0" precision="I32">
449
+ <dim>-1</dim>
450
+ </port>
451
+ <port id="1" precision="I32">
452
+ <dim>-1</dim>
453
+ </port>
454
+ </input>
455
+ <output>
456
+ <port id="2" precision="I32">
457
+ <dim>-1</dim>
458
+ </port>
459
+ </output>
460
+ </layer>
461
+ <layer id="32" name="Constant_225989" type="Const" version="opset1">
462
+ <data element_type="i32" shape="" offset="5467305" size="4" />
463
+ <output>
464
+ <port id="0" precision="I32" />
465
+ </output>
466
+ </layer>
467
+ <layer id="33" name="Minimum_225990" type="Minimum" version="opset1">
468
+ <data auto_broadcast="numpy" />
469
+ <input>
470
+ <port id="0" precision="I32">
471
+ <dim>-1</dim>
472
+ </port>
473
+ <port id="1" precision="I32" />
474
+ </input>
475
+ <output>
476
+ <port id="2" precision="I32">
477
+ <dim>-1</dim>
478
+ </port>
479
+ </output>
480
+ </layer>
481
+ <layer id="34" name="Subtract_225991" type="Subtract" version="opset1">
482
+ <data auto_broadcast="numpy" />
483
+ <input>
484
+ <port id="0" precision="I32">
485
+ <dim>-1</dim>
486
+ </port>
487
+ <port id="1" precision="I32">
488
+ <dim>-1</dim>
489
+ </port>
490
+ </input>
491
+ <output>
492
+ <port id="2" precision="I32">
493
+ <dim>-1</dim>
494
+ </port>
495
+ </output>
496
+ </layer>
497
+ <layer id="35" name="Constant_225995" type="Const" version="opset1">
498
+ <data element_type="i32" shape="2" offset="8" size="8" />
499
+ <output>
500
+ <port id="0" precision="I32">
501
+ <dim>2</dim>
502
+ </port>
503
+ </output>
504
+ </layer>
505
+ <layer id="36" name="CombineSegments_225996" type="CombineSegments" version="extension">
506
+ <input>
507
+ <port id="0" precision="I32" />
508
+ <port id="1" precision="I32" />
509
+ <port id="2" precision="I32">
510
+ <dim>1</dim>
511
+ </port>
512
+ <port id="3" precision="I32">
513
+ <dim>-1</dim>
514
+ </port>
515
+ <port id="4" precision="I32">
516
+ <dim>-1</dim>
517
+ </port>
518
+ <port id="5" precision="I32">
519
+ <dim>-1</dim>
520
+ </port>
521
+ <port id="6" precision="I32">
522
+ <dim>2</dim>
523
+ </port>
524
+ </input>
525
+ <output>
526
+ <port id="7" precision="I32">
527
+ <dim>-1</dim>
528
+ </port>
529
+ <port id="8" precision="I32">
530
+ <dim>-1</dim>
531
+ </port>
532
+ <port id="9" precision="I32">
533
+ <dim>-1</dim>
534
+ </port>
535
+ <port id="10" precision="I32">
536
+ <dim>-1</dim>
537
+ </port>
538
+ <port id="11" precision="I32">
539
+ <dim>-1</dim>
540
+ </port>
541
+ <port id="12" precision="I32">
542
+ <dim>-1</dim>
543
+ </port>
544
+ </output>
545
+ </layer>
546
+ <layer id="37" name="Subtract_225997" type="Subtract" version="opset1">
547
+ <data auto_broadcast="numpy" />
548
+ <input>
549
+ <port id="0" precision="I32">
550
+ <dim>-1</dim>
551
+ </port>
552
+ <port id="1" precision="I32">
553
+ <dim>-1</dim>
554
+ </port>
555
+ </input>
556
+ <output>
557
+ <port id="2" precision="I32">
558
+ <dim>-1</dim>
559
+ </port>
560
+ </output>
561
+ </layer>
562
+ <layer id="38" name="Constant_225998" type="Const" version="opset1">
563
+ <data element_type="i32" shape="" offset="0" size="4" />
564
+ <output>
565
+ <port id="0" precision="I32" />
566
+ </output>
567
+ </layer>
568
+ <layer id="39" name="ReduceMax_225999" type="ReduceMax" version="opset1">
569
+ <data keep_dims="false" />
570
+ <input>
571
+ <port id="0" precision="I32">
572
+ <dim>-1</dim>
573
+ </port>
574
+ <port id="1" precision="I32" />
575
+ </input>
576
+ <output>
577
+ <port id="2" precision="I32" />
578
+ </output>
579
+ </layer>
580
+ <layer id="40" name="Constant_226000" type="Const" version="opset1">
581
+ <data element_type="i32" shape="" offset="5467309" size="4" />
582
+ <output>
583
+ <port id="0" precision="I32" />
584
+ </output>
585
+ </layer>
586
+ <layer id="41" name="RaggedToDense_226001" type="RaggedToDense" version="extension">
587
+ <data pad_right="false" />
588
+ <input>
589
+ <port id="0" precision="I32">
590
+ <dim>-1</dim>
591
+ </port>
592
+ <port id="1" precision="I32">
593
+ <dim>-1</dim>
594
+ </port>
595
+ <port id="2" precision="I32">
596
+ <dim>-1</dim>
597
+ </port>
598
+ <port id="3" precision="I32" />
599
+ <port id="4" precision="I32" />
600
+ </input>
601
+ <output>
602
+ <port id="5" precision="I32">
603
+ <dim>-1</dim>
604
+ <dim>-1</dim>
605
+ </port>
606
+ <port id="6" precision="BOOL">
607
+ <dim>-1</dim>
608
+ <dim>-1</dim>
609
+ </port>
610
+ </output>
611
+ </layer>
612
+ <layer id="42" name="Convert_226002" type="Convert" version="opset1">
613
+ <data destination_type="i32" />
614
+ <input>
615
+ <port id="0" precision="BOOL">
616
+ <dim>-1</dim>
617
+ <dim>-1</dim>
618
+ </port>
619
+ </input>
620
+ <output>
621
+ <port id="1" precision="I32">
622
+ <dim>-1</dim>
623
+ <dim>-1</dim>
624
+ </port>
625
+ </output>
626
+ </layer>
627
+ <layer id="43" name="Convert_226002" type="Convert" version="opset1">
628
+ <data destination_type="i64" />
629
+ <input>
630
+ <port id="0" precision="I32">
631
+ <dim>-1</dim>
632
+ <dim>-1</dim>
633
+ </port>
634
+ </input>
635
+ <output>
636
+ <port id="1" precision="I64" names="attention_mask">
637
+ <dim>-1</dim>
638
+ <dim>-1</dim>
639
+ </port>
640
+ </output>
641
+ </layer>
642
+ <layer id="45" name="RaggedToDense_226001.0" type="Convert" version="opset1">
643
+ <data destination_type="i64" />
644
+ <input>
645
+ <port id="0" precision="I32">
646
+ <dim>-1</dim>
647
+ <dim>-1</dim>
648
+ </port>
649
+ </input>
650
+ <output>
651
+ <port id="1" precision="I64" names="input_ids">
652
+ <dim>-1</dim>
653
+ <dim>-1</dim>
654
+ </port>
655
+ </output>
656
+ </layer>
657
+ <layer id="46" name="Result_226005" type="Result" version="opset1">
658
+ <input>
659
+ <port id="0" precision="I64">
660
+ <dim>-1</dim>
661
+ <dim>-1</dim>
662
+ </port>
663
+ </input>
664
+ </layer>
665
+ <layer id="44" name="Result_226007" type="Result" version="opset1">
666
+ <input>
667
+ <port id="0" precision="I64">
668
+ <dim>-1</dim>
669
+ <dim>-1</dim>
670
+ </port>
671
+ </input>
672
+ </layer>
673
+ </layers>
674
+ <edges>
675
+ <edge from-layer="0" from-port="0" to-layer="5" to-port="0" />
676
+ <edge from-layer="1" from-port="0" to-layer="36" to-port="0" />
677
+ <edge from-layer="2" from-port="0" to-layer="36" to-port="1" />
678
+ <edge from-layer="3" from-port="0" to-layer="36" to-port="2" />
679
+ <edge from-layer="4" from-port="0" to-layer="11" to-port="0" />
680
+ <edge from-layer="5" from-port="1" to-layer="6" to-port="0" />
681
+ <edge from-layer="5" from-port="3" to-layer="18" to-port="4" />
682
+ <edge from-layer="5" from-port="2" to-layer="18" to-port="3" />
683
+ <edge from-layer="5" from-port="1" to-layer="18" to-port="2" />
684
+ <edge from-layer="6" from-port="1" to-layer="9" to-port="0" />
685
+ <edge from-layer="7" from-port="0" to-layer="9" to-port="1" />
686
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="2" />
687
+ <edge from-layer="9" from-port="3" to-layer="14" to-port="0" />
688
+ <edge from-layer="9" from-port="3" to-layer="11" to-port="1" />
689
+ <edge from-layer="10" from-port="0" to-layer="11" to-port="2" />
690
+ <edge from-layer="11" from-port="3" to-layer="18" to-port="0" />
691
+ <edge from-layer="12" from-port="0" to-layer="16" to-port="0" />
692
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
693
+ <edge from-layer="14" from-port="2" to-layer="16" to-port="1" />
694
+ <edge from-layer="15" from-port="0" to-layer="16" to-port="2" />
695
+ <edge from-layer="16" from-port="3" to-layer="18" to-port="1" />
696
+ <edge from-layer="17" from-port="0" to-layer="18" to-port="5" />
697
+ <edge from-layer="18" from-port="9" to-layer="20" to-port="3" />
698
+ <edge from-layer="18" from-port="11" to-layer="20" to-port="5" />
699
+ <edge from-layer="18" from-port="10" to-layer="20" to-port="4" />
700
+ <edge from-layer="18" from-port="8" to-layer="20" to-port="2" />
701
+ <edge from-layer="18" from-port="7" to-layer="20" to-port="1" />
702
+ <edge from-layer="18" from-port="6" to-layer="20" to-port="0" />
703
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="6" />
704
+ <edge from-layer="20" from-port="7" to-layer="30" to-port="0" />
705
+ <edge from-layer="20" from-port="8" to-layer="30" to-port="1" />
706
+ <edge from-layer="20" from-port="9" to-layer="30" to-port="2" />
707
+ <edge from-layer="20" from-port="10" to-layer="30" to-port="3" />
708
+ <edge from-layer="20" from-port="11" to-layer="30" to-port="4" />
709
+ <edge from-layer="21" from-port="0" to-layer="22" to-port="0" />
710
+ <edge from-layer="22" from-port="3" to-layer="30" to-port="7" />
711
+ <edge from-layer="22" from-port="1" to-layer="30" to-port="5" />
712
+ <edge from-layer="22" from-port="2" to-layer="30" to-port="6" />
713
+ <edge from-layer="23" from-port="0" to-layer="24" to-port="0" />
714
+ <edge from-layer="24" from-port="1" to-layer="30" to-port="8" />
715
+ <edge from-layer="24" from-port="2" to-layer="30" to-port="9" />
716
+ <edge from-layer="24" from-port="3" to-layer="30" to-port="10" />
717
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="0" />
718
+ <edge from-layer="26" from-port="3" to-layer="30" to-port="13" />
719
+ <edge from-layer="26" from-port="1" to-layer="30" to-port="11" />
720
+ <edge from-layer="26" from-port="2" to-layer="30" to-port="12" />
721
+ <edge from-layer="27" from-port="0" to-layer="28" to-port="0" />
722
+ <edge from-layer="28" from-port="1" to-layer="30" to-port="14" />
723
+ <edge from-layer="28" from-port="2" to-layer="30" to-port="15" />
724
+ <edge from-layer="28" from-port="3" to-layer="30" to-port="16" />
725
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="17" />
726
+ <edge from-layer="30" from-port="20" to-layer="36" to-port="5" />
727
+ <edge from-layer="30" from-port="19" to-layer="36" to-port="4" />
728
+ <edge from-layer="30" from-port="19" to-layer="34" to-port="0" />
729
+ <edge from-layer="30" from-port="18" to-layer="31" to-port="1" />
730
+ <edge from-layer="30" from-port="19" to-layer="31" to-port="0" />
731
+ <edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
732
+ <edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
733
+ <edge from-layer="33" from-port="2" to-layer="34" to-port="1" />
734
+ <edge from-layer="34" from-port="2" to-layer="36" to-port="3" />
735
+ <edge from-layer="35" from-port="0" to-layer="36" to-port="6" />
736
+ <edge from-layer="36" from-port="8" to-layer="37" to-port="0" />
737
+ <edge from-layer="36" from-port="7" to-layer="37" to-port="1" />
738
+ <edge from-layer="36" from-port="7" to-layer="41" to-port="0" />
739
+ <edge from-layer="36" from-port="8" to-layer="41" to-port="1" />
740
+ <edge from-layer="36" from-port="9" to-layer="41" to-port="2" />
741
+ <edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
742
+ <edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
743
+ <edge from-layer="39" from-port="2" to-layer="41" to-port="3" />
744
+ <edge from-layer="40" from-port="0" to-layer="41" to-port="4" />
745
+ <edge from-layer="41" from-port="6" to-layer="42" to-port="0" />
746
+ <edge from-layer="41" from-port="5" to-layer="45" to-port="0" />
747
+ <edge from-layer="42" from-port="1" to-layer="43" to-port="0" />
748
+ <edge from-layer="43" from-port="1" to-layer="44" to-port="0" />
749
+ <edge from-layer="45" from-port="1" to-layer="46" to-port="0" />
750
+ </edges>
751
+ <rt_info>
752
+ <add_attention_mask value="True" />
753
+ <add_prefix_space />
754
+ <add_special_tokens value="True" />
755
+ <bos_token_id value="1" />
756
+ <chat_template value="{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'&lt;|im_start|>' + message['role'] + '&#10;' + message['content'] + '&lt;|im_end|>' + '&#10;'}}{% endfor %}{% if add_generation_prompt %}{{ '&lt;|im_start|>assistant&#10;' }}{% endif %}" />
757
+ <clean_up_tokenization_spaces />
758
+ <detokenizer_input_type value="i64" />
759
+ <eos_token_id value="131072" />
760
+ <handle_special_tokens_with_re />
761
+ <number_of_inputs value="1" />
762
+ <openvino_tokenizers_version value="2024.5.0.0" />
763
+ <openvino_version value="2024.5.0" />
764
+ <original_tokenizer_class value="&lt;class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
765
+ <pad_token_id value="11" />
766
+ <sentencepiece_version value="0.2.0" />
767
+ <skip_special_tokens value="True" />
768
+ <streaming_detokenizer value="False" />
769
+ <tiktoken_version value="0.7.0" />
770
+ <tokenizer_output_type value="i64" />
771
+ <tokenizers_version value="0.20.3" />
772
+ <transformers_version value="4.46.3" />
773
+ <use_max_padding value="False" />
774
+ <use_sentencepiece_backend value="False" />
775
+ <utf8_replace_mode />
776
+ <with_detokenizer value="True" />
777
+ </rt_info>
778
+ </net>
special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "<SPECIAL_32>",
36
+ "<SPECIAL_33>",
37
+ "<SPECIAL_34>",
38
+ "<SPECIAL_35>",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "<|im_end|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:575ad6971cfda40b8c4117b03dcc6875214d1fc2fabf3bc29241ffa13eb611f6
3
+ size 17078414
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff