AntonV HF Staff commited on
Commit
b7ae481
·
1 Parent(s): 497a404
.gitattributes CHANGED
@@ -1,2 +1,3 @@
1
  *.safetensors filter=lfs diff=lfs merge=lfs -text
2
  tokenizer.model filter=lfs diff=lfs merge=lfs -text
 
 
1
  *.safetensors filter=lfs diff=lfs merge=lfs -text
2
  tokenizer.model filter=lfs diff=lfs merge=lfs -text
3
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json DELETED
@@ -1 +0,0 @@
1
- {"<|IMAGE_PLACEHOLDER|>": 100295, "<|AUDIO_PLACEHOLDER|>": 100296, "<|LOC_0|>": 100297, "<|LOC_1|>": 100298, "<|LOC_2|>": 100299, "<|LOC_3|>": 100300, "<|LOC_4|>": 100301, "<|LOC_5|>": 100302, "<|LOC_6|>": 100303, "<|LOC_7|>": 100304, "<|LOC_8|>": 100305, "<|LOC_9|>": 100306, "<|LOC_10|>": 100307, "<|LOC_11|>": 100308, "<|LOC_12|>": 100309, "<|LOC_13|>": 100310, "<|LOC_14|>": 100311, "<|LOC_15|>": 100312, "<|LOC_16|>": 100313, "<|LOC_17|>": 100314, "<|LOC_18|>": 100315, "<|LOC_19|>": 100316, "<|LOC_20|>": 100317, "<|LOC_21|>": 100318, "<|LOC_22|>": 100319, "<|LOC_23|>": 100320, "<|LOC_24|>": 100321, "<|LOC_25|>": 100322, "<|LOC_26|>": 100323, "<|LOC_27|>": 100324, "<|LOC_28|>": 100325, "<|LOC_29|>": 100326, "<|LOC_30|>": 100327, "<|LOC_31|>": 100328, "<|LOC_32|>": 100329, "<|LOC_33|>": 100330, "<|LOC_34|>": 100331, "<|LOC_35|>": 100332, "<|LOC_36|>": 100333, "<|LOC_37|>": 100334, "<|LOC_38|>": 100335, "<|LOC_39|>": 100336, "<|LOC_40|>": 100337, "<|LOC_41|>": 100338, "<|LOC_42|>": 100339, "<|LOC_43|>": 100340, "<|LOC_44|>": 100341, "<|LOC_45|>": 100342, "<|LOC_46|>": 100343, "<|LOC_47|>": 100344, "<|LOC_48|>": 100345, "<|LOC_49|>": 100346, "<|LOC_50|>": 100347, "<|LOC_51|>": 100348, "<|LOC_52|>": 100349, "<|LOC_53|>": 100350, "<|LOC_54|>": 100351, "<|LOC_55|>": 100352, "<|LOC_56|>": 100353, "<|LOC_57|>": 100354, "<|LOC_58|>": 100355, "<|LOC_59|>": 100356, "<|LOC_60|>": 100357, "<|LOC_61|>": 100358, "<|LOC_62|>": 100359, "<|LOC_63|>": 100360, "<|LOC_64|>": 100361, "<|LOC_65|>": 100362, "<|LOC_66|>": 100363, "<|LOC_67|>": 100364, "<|LOC_68|>": 100365, "<|LOC_69|>": 100366, "<|LOC_70|>": 100367, "<|LOC_71|>": 100368, "<|LOC_72|>": 100369, "<|LOC_73|>": 100370, "<|LOC_74|>": 100371, "<|LOC_75|>": 100372, "<|LOC_76|>": 100373, "<|LOC_77|>": 100374, "<|LOC_78|>": 100375, "<|LOC_79|>": 100376, "<|LOC_80|>": 100377, "<|LOC_81|>": 100378, "<|LOC_82|>": 100379, "<|LOC_83|>": 100380, "<|LOC_84|>": 100381, "<|LOC_85|>": 100382, "<|LOC_86|>": 100383, "<|LOC_87|>": 100384, "<|LOC_88|>": 100385, "<|LOC_89|>": 100386, "<|LOC_90|>": 100387, "<|LOC_91|>": 100388, "<|LOC_92|>": 100389, "<|LOC_93|>": 100390, "<|LOC_94|>": 100391, "<|LOC_95|>": 100392, "<|LOC_96|>": 100393, "<|LOC_97|>": 100394, "<|LOC_98|>": 100395, "<|LOC_99|>": 100396, "<|LOC_100|>": 100397, "<|LOC_101|>": 100398, "<|LOC_102|>": 100399, "<|LOC_103|>": 100400, "<|LOC_104|>": 100401, "<|LOC_105|>": 100402, "<|LOC_106|>": 100403, "<|LOC_107|>": 100404, "<|LOC_108|>": 100405, "<|LOC_109|>": 100406, "<|LOC_110|>": 100407, "<|LOC_111|>": 100408, "<|LOC_112|>": 100409, "<|LOC_113|>": 100410, "<|LOC_114|>": 100411, "<|LOC_115|>": 100412, "<|LOC_116|>": 100413, "<|LOC_117|>": 100414, "<|LOC_118|>": 100415, "<|LOC_119|>": 100416, "<|LOC_120|>": 100417, "<|LOC_121|>": 100418, "<|LOC_122|>": 100419, "<|LOC_123|>": 100420, "<|LOC_124|>": 100421, "<|LOC_125|>": 100422, "<|LOC_126|>": 100423, "<|LOC_127|>": 100424, "<|LOC_128|>": 100425, "<|LOC_129|>": 100426, "<|LOC_130|>": 100427, "<|LOC_131|>": 100428, "<|LOC_132|>": 100429, "<|LOC_133|>": 100430, "<|LOC_134|>": 100431, "<|LOC_135|>": 100432, "<|LOC_136|>": 100433, "<|LOC_137|>": 100434, "<|LOC_138|>": 100435, "<|LOC_139|>": 100436, "<|LOC_140|>": 100437, "<|LOC_141|>": 100438, "<|LOC_142|>": 100439, "<|LOC_143|>": 100440, "<|LOC_144|>": 100441, "<|LOC_145|>": 100442, "<|LOC_146|>": 100443, "<|LOC_147|>": 100444, "<|LOC_148|>": 100445, "<|LOC_149|>": 100446, "<|LOC_150|>": 100447, "<|LOC_151|>": 100448, "<|LOC_152|>": 100449, "<|LOC_153|>": 100450, "<|LOC_154|>": 100451, "<|LOC_155|>": 100452, "<|LOC_156|>": 100453, "<|LOC_157|>": 100454, "<|LOC_158|>": 100455, "<|LOC_159|>": 100456, "<|LOC_160|>": 100457, "<|LOC_161|>": 100458, "<|LOC_162|>": 100459, "<|LOC_163|>": 100460, "<|LOC_164|>": 100461, "<|LOC_165|>": 100462, "<|LOC_166|>": 100463, "<|LOC_167|>": 100464, "<|LOC_168|>": 100465, "<|LOC_169|>": 100466, "<|LOC_170|>": 100467, "<|LOC_171|>": 100468, "<|LOC_172|>": 100469, "<|LOC_173|>": 100470, "<|LOC_174|>": 100471, "<|LOC_175|>": 100472, "<|LOC_176|>": 100473, "<|LOC_177|>": 100474, "<|LOC_178|>": 100475, "<|LOC_179|>": 100476, "<|LOC_180|>": 100477, "<|LOC_181|>": 100478, "<|LOC_182|>": 100479, "<|LOC_183|>": 100480, "<|LOC_184|>": 100481, "<|LOC_185|>": 100482, "<|LOC_186|>": 100483, "<|LOC_187|>": 100484, "<|LOC_188|>": 100485, "<|LOC_189|>": 100486, "<|LOC_190|>": 100487, "<|LOC_191|>": 100488, "<|LOC_192|>": 100489, "<|LOC_193|>": 100490, "<|LOC_194|>": 100491, "<|LOC_195|>": 100492, "<|LOC_196|>": 100493, "<|LOC_197|>": 100494, "<|LOC_198|>": 100495, "<|LOC_199|>": 100496, "<|LOC_200|>": 100497, "<|LOC_201|>": 100498, "<|LOC_202|>": 100499, "<|LOC_203|>": 100500, "<|LOC_204|>": 100501, "<|LOC_205|>": 100502, "<|LOC_206|>": 100503, "<|LOC_207|>": 100504, "<|LOC_208|>": 100505, "<|LOC_209|>": 100506, "<|LOC_210|>": 100507, "<|LOC_211|>": 100508, "<|LOC_212|>": 100509, "<|LOC_213|>": 100510, "<|LOC_214|>": 100511, "<|LOC_215|>": 100512, "<|LOC_216|>": 100513, "<|LOC_217|>": 100514, "<|LOC_218|>": 100515, "<|LOC_219|>": 100516, "<|LOC_220|>": 100517, "<|LOC_221|>": 100518, "<|LOC_222|>": 100519, "<|LOC_223|>": 100520, "<|LOC_224|>": 100521, "<|LOC_225|>": 100522, "<|LOC_226|>": 100523, "<|LOC_227|>": 100524, "<|LOC_228|>": 100525, "<|LOC_229|>": 100526, "<|LOC_230|>": 100527, "<|LOC_231|>": 100528, "<|LOC_232|>": 100529, "<|LOC_233|>": 100530, "<|LOC_234|>": 100531, "<|LOC_235|>": 100532, "<|LOC_236|>": 100533, "<|LOC_237|>": 100534, "<|LOC_238|>": 100535, "<|LOC_239|>": 100536, "<|LOC_240|>": 100537, "<|LOC_241|>": 100538, "<|LOC_242|>": 100539, "<|LOC_243|>": 100540, "<|LOC_244|>": 100541, "<|LOC_245|>": 100542, "<|LOC_246|>": 100543, "<|LOC_247|>": 100544, "<|LOC_248|>": 100545, "<|LOC_249|>": 100546, "<|LOC_250|>": 100547, "<|LOC_251|>": 100548, "<|LOC_252|>": 100549, "<|LOC_253|>": 100550, "<|LOC_254|>": 100551, "<|LOC_255|>": 100552, "<|LOC_256|>": 100553, "<|LOC_257|>": 100554, "<|LOC_258|>": 100555, "<|LOC_259|>": 100556, "<|LOC_260|>": 100557, "<|LOC_261|>": 100558, "<|LOC_262|>": 100559, "<|LOC_263|>": 100560, "<|LOC_264|>": 100561, "<|LOC_265|>": 100562, "<|LOC_266|>": 100563, "<|LOC_267|>": 100564, "<|LOC_268|>": 100565, "<|LOC_269|>": 100566, "<|LOC_270|>": 100567, "<|LOC_271|>": 100568, "<|LOC_272|>": 100569, "<|LOC_273|>": 100570, "<|LOC_274|>": 100571, "<|LOC_275|>": 100572, "<|LOC_276|>": 100573, "<|LOC_277|>": 100574, "<|LOC_278|>": 100575, "<|LOC_279|>": 100576, "<|LOC_280|>": 100577, "<|LOC_281|>": 100578, "<|LOC_282|>": 100579, "<|LOC_283|>": 100580, "<|LOC_284|>": 100581, "<|LOC_285|>": 100582, "<|LOC_286|>": 100583, "<|LOC_287|>": 100584, "<|LOC_288|>": 100585, "<|LOC_289|>": 100586, "<|LOC_290|>": 100587, "<|LOC_291|>": 100588, "<|LOC_292|>": 100589, "<|LOC_293|>": 100590, "<|LOC_294|>": 100591, "<|LOC_295|>": 100592, "<|LOC_296|>": 100593, "<|LOC_297|>": 100594, "<|LOC_298|>": 100595, "<|LOC_299|>": 100596, "<|LOC_300|>": 100597, "<|LOC_301|>": 100598, "<|LOC_302|>": 100599, "<|LOC_303|>": 100600, "<|LOC_304|>": 100601, "<|LOC_305|>": 100602, "<|LOC_306|>": 100603, "<|LOC_307|>": 100604, "<|LOC_308|>": 100605, "<|LOC_309|>": 100606, "<|LOC_310|>": 100607, "<|LOC_311|>": 100608, "<|LOC_312|>": 100609, "<|LOC_313|>": 100610, "<|LOC_314|>": 100611, "<|LOC_315|>": 100612, "<|LOC_316|>": 100613, "<|LOC_317|>": 100614, "<|LOC_318|>": 100615, "<|LOC_319|>": 100616, "<|LOC_320|>": 100617, "<|LOC_321|>": 100618, "<|LOC_322|>": 100619, "<|LOC_323|>": 100620, "<|LOC_324|>": 100621, "<|LOC_325|>": 100622, "<|LOC_326|>": 100623, "<|LOC_327|>": 100624, "<|LOC_328|>": 100625, "<|LOC_329|>": 100626, "<|LOC_330|>": 100627, "<|LOC_331|>": 100628, "<|LOC_332|>": 100629, "<|LOC_333|>": 100630, "<|LOC_334|>": 100631, "<|LOC_335|>": 100632, "<|LOC_336|>": 100633, "<|LOC_337|>": 100634, "<|LOC_338|>": 100635, "<|LOC_339|>": 100636, "<|LOC_340|>": 100637, "<|LOC_341|>": 100638, "<|LOC_342|>": 100639, "<|LOC_343|>": 100640, "<|LOC_344|>": 100641, "<|LOC_345|>": 100642, "<|LOC_346|>": 100643, "<|LOC_347|>": 100644, "<|LOC_348|>": 100645, "<|LOC_349|>": 100646, "<|LOC_350|>": 100647, "<|LOC_351|>": 100648, "<|LOC_352|>": 100649, "<|LOC_353|>": 100650, "<|LOC_354|>": 100651, "<|LOC_355|>": 100652, "<|LOC_356|>": 100653, "<|LOC_357|>": 100654, "<|LOC_358|>": 100655, "<|LOC_359|>": 100656, "<|LOC_360|>": 100657, "<|LOC_361|>": 100658, "<|LOC_362|>": 100659, "<|LOC_363|>": 100660, "<|LOC_364|>": 100661, "<|LOC_365|>": 100662, "<|LOC_366|>": 100663, "<|LOC_367|>": 100664, "<|LOC_368|>": 100665, "<|LOC_369|>": 100666, "<|LOC_370|>": 100667, "<|LOC_371|>": 100668, "<|LOC_372|>": 100669, "<|LOC_373|>": 100670, "<|LOC_374|>": 100671, "<|LOC_375|>": 100672, "<|LOC_376|>": 100673, "<|LOC_377|>": 100674, "<|LOC_378|>": 100675, "<|LOC_379|>": 100676, "<|LOC_380|>": 100677, "<|LOC_381|>": 100678, "<|LOC_382|>": 100679, "<|LOC_383|>": 100680, "<|LOC_384|>": 100681, "<|LOC_385|>": 100682, "<|LOC_386|>": 100683, "<|LOC_387|>": 100684, "<|LOC_388|>": 100685, "<|LOC_389|>": 100686, "<|LOC_390|>": 100687, "<|LOC_391|>": 100688, "<|LOC_392|>": 100689, "<|LOC_393|>": 100690, "<|LOC_394|>": 100691, "<|LOC_395|>": 100692, "<|LOC_396|>": 100693, "<|LOC_397|>": 100694, "<|LOC_398|>": 100695, "<|LOC_399|>": 100696, "<|LOC_400|>": 100697, "<|LOC_401|>": 100698, "<|LOC_402|>": 100699, "<|LOC_403|>": 100700, "<|LOC_404|>": 100701, "<|LOC_405|>": 100702, "<|LOC_406|>": 100703, "<|LOC_407|>": 100704, "<|LOC_408|>": 100705, "<|LOC_409|>": 100706, "<|LOC_410|>": 100707, "<|LOC_411|>": 100708, "<|LOC_412|>": 100709, "<|LOC_413|>": 100710, "<|LOC_414|>": 100711, "<|LOC_415|>": 100712, "<|LOC_416|>": 100713, "<|LOC_417|>": 100714, "<|LOC_418|>": 100715, "<|LOC_419|>": 100716, "<|LOC_420|>": 100717, "<|LOC_421|>": 100718, "<|LOC_422|>": 100719, "<|LOC_423|>": 100720, "<|LOC_424|>": 100721, "<|LOC_425|>": 100722, "<|LOC_426|>": 100723, "<|LOC_427|>": 100724, "<|LOC_428|>": 100725, "<|LOC_429|>": 100726, "<|LOC_430|>": 100727, "<|LOC_431|>": 100728, "<|LOC_432|>": 100729, "<|LOC_433|>": 100730, "<|LOC_434|>": 100731, "<|LOC_435|>": 100732, "<|LOC_436|>": 100733, "<|LOC_437|>": 100734, "<|LOC_438|>": 100735, "<|LOC_439|>": 100736, "<|LOC_440|>": 100737, "<|LOC_441|>": 100738, "<|LOC_442|>": 100739, "<|LOC_443|>": 100740, "<|LOC_444|>": 100741, "<|LOC_445|>": 100742, "<|LOC_446|>": 100743, "<|LOC_447|>": 100744, "<|LOC_448|>": 100745, "<|LOC_449|>": 100746, "<|LOC_450|>": 100747, "<|LOC_451|>": 100748, "<|LOC_452|>": 100749, "<|LOC_453|>": 100750, "<|LOC_454|>": 100751, "<|LOC_455|>": 100752, "<|LOC_456|>": 100753, "<|LOC_457|>": 100754, "<|LOC_458|>": 100755, "<|LOC_459|>": 100756, "<|LOC_460|>": 100757, "<|LOC_461|>": 100758, "<|LOC_462|>": 100759, "<|LOC_463|>": 100760, "<|LOC_464|>": 100761, "<|LOC_465|>": 100762, "<|LOC_466|>": 100763, "<|LOC_467|>": 100764, "<|LOC_468|>": 100765, "<|LOC_469|>": 100766, "<|LOC_470|>": 100767, "<|LOC_471|>": 100768, "<|LOC_472|>": 100769, "<|LOC_473|>": 100770, "<|LOC_474|>": 100771, "<|LOC_475|>": 100772, "<|LOC_476|>": 100773, "<|LOC_477|>": 100774, "<|LOC_478|>": 100775, "<|LOC_479|>": 100776, "<|LOC_480|>": 100777, "<|LOC_481|>": 100778, "<|LOC_482|>": 100779, "<|LOC_483|>": 100780, "<|LOC_484|>": 100781, "<|LOC_485|>": 100782, "<|LOC_486|>": 100783, "<|LOC_487|>": 100784, "<|LOC_488|>": 100785, "<|LOC_489|>": 100786, "<|LOC_490|>": 100787, "<|LOC_491|>": 100788, "<|LOC_492|>": 100789, "<|LOC_493|>": 100790, "<|LOC_494|>": 100791, "<|LOC_495|>": 100792, "<|LOC_496|>": 100793, "<|LOC_497|>": 100794, "<|LOC_498|>": 100795, "<|LOC_499|>": 100796, "<|LOC_500|>": 100797, "<|LOC_501|>": 100798, "<|LOC_502|>": 100799, "<|LOC_503|>": 100800, "<|LOC_504|>": 100801, "<|LOC_505|>": 100802, "<|LOC_506|>": 100803, "<|LOC_507|>": 100804, "<|LOC_508|>": 100805, "<|LOC_509|>": 100806, "<|LOC_510|>": 100807, "<|LOC_511|>": 100808, "<|LOC_512|>": 100809, "<|LOC_513|>": 100810, "<|LOC_514|>": 100811, "<|LOC_515|>": 100812, "<|LOC_516|>": 100813, "<|LOC_517|>": 100814, "<|LOC_518|>": 100815, "<|LOC_519|>": 100816, "<|LOC_520|>": 100817, "<|LOC_521|>": 100818, "<|LOC_522|>": 100819, "<|LOC_523|>": 100820, "<|LOC_524|>": 100821, "<|LOC_525|>": 100822, "<|LOC_526|>": 100823, "<|LOC_527|>": 100824, "<|LOC_528|>": 100825, "<|LOC_529|>": 100826, "<|LOC_530|>": 100827, "<|LOC_531|>": 100828, "<|LOC_532|>": 100829, "<|LOC_533|>": 100830, "<|LOC_534|>": 100831, "<|LOC_535|>": 100832, "<|LOC_536|>": 100833, "<|LOC_537|>": 100834, "<|LOC_538|>": 100835, "<|LOC_539|>": 100836, "<|LOC_540|>": 100837, "<|LOC_541|>": 100838, "<|LOC_542|>": 100839, "<|LOC_543|>": 100840, "<|LOC_544|>": 100841, "<|LOC_545|>": 100842, "<|LOC_546|>": 100843, "<|LOC_547|>": 100844, "<|LOC_548|>": 100845, "<|LOC_549|>": 100846, "<|LOC_550|>": 100847, "<|LOC_551|>": 100848, "<|LOC_552|>": 100849, "<|LOC_553|>": 100850, "<|LOC_554|>": 100851, "<|LOC_555|>": 100852, "<|LOC_556|>": 100853, "<|LOC_557|>": 100854, "<|LOC_558|>": 100855, "<|LOC_559|>": 100856, "<|LOC_560|>": 100857, "<|LOC_561|>": 100858, "<|LOC_562|>": 100859, "<|LOC_563|>": 100860, "<|LOC_564|>": 100861, "<|LOC_565|>": 100862, "<|LOC_566|>": 100863, "<|LOC_567|>": 100864, "<|LOC_568|>": 100865, "<|LOC_569|>": 100866, "<|LOC_570|>": 100867, "<|LOC_571|>": 100868, "<|LOC_572|>": 100869, "<|LOC_573|>": 100870, "<|LOC_574|>": 100871, "<|LOC_575|>": 100872, "<|LOC_576|>": 100873, "<|LOC_577|>": 100874, "<|LOC_578|>": 100875, "<|LOC_579|>": 100876, "<|LOC_580|>": 100877, "<|LOC_581|>": 100878, "<|LOC_582|>": 100879, "<|LOC_583|>": 100880, "<|LOC_584|>": 100881, "<|LOC_585|>": 100882, "<|LOC_586|>": 100883, "<|LOC_587|>": 100884, "<|LOC_588|>": 100885, "<|LOC_589|>": 100886, "<|LOC_590|>": 100887, "<|LOC_591|>": 100888, "<|LOC_592|>": 100889, "<|LOC_593|>": 100890, "<|LOC_594|>": 100891, "<|LOC_595|>": 100892, "<|LOC_596|>": 100893, "<|LOC_597|>": 100894, "<|LOC_598|>": 100895, "<|LOC_599|>": 100896, "<|LOC_600|>": 100897, "<|LOC_601|>": 100898, "<|LOC_602|>": 100899, "<|LOC_603|>": 100900, "<|LOC_604|>": 100901, "<|LOC_605|>": 100902, "<|LOC_606|>": 100903, "<|LOC_607|>": 100904, "<|LOC_608|>": 100905, "<|LOC_609|>": 100906, "<|LOC_610|>": 100907, "<|LOC_611|>": 100908, "<|LOC_612|>": 100909, "<|LOC_613|>": 100910, "<|LOC_614|>": 100911, "<|LOC_615|>": 100912, "<|LOC_616|>": 100913, "<|LOC_617|>": 100914, "<|LOC_618|>": 100915, "<|LOC_619|>": 100916, "<|LOC_620|>": 100917, "<|LOC_621|>": 100918, "<|LOC_622|>": 100919, "<|LOC_623|>": 100920, "<|LOC_624|>": 100921, "<|LOC_625|>": 100922, "<|LOC_626|>": 100923, "<|LOC_627|>": 100924, "<|LOC_628|>": 100925, "<|LOC_629|>": 100926, "<|LOC_630|>": 100927, "<|LOC_631|>": 100928, "<|LOC_632|>": 100929, "<|LOC_633|>": 100930, "<|LOC_634|>": 100931, "<|LOC_635|>": 100932, "<|LOC_636|>": 100933, "<|LOC_637|>": 100934, "<|LOC_638|>": 100935, "<|LOC_639|>": 100936, "<|LOC_640|>": 100937, "<|LOC_641|>": 100938, "<|LOC_642|>": 100939, "<|LOC_643|>": 100940, "<|LOC_644|>": 100941, "<|LOC_645|>": 100942, "<|LOC_646|>": 100943, "<|LOC_647|>": 100944, "<|LOC_648|>": 100945, "<|LOC_649|>": 100946, "<|LOC_650|>": 100947, "<|LOC_651|>": 100948, "<|LOC_652|>": 100949, "<|LOC_653|>": 100950, "<|LOC_654|>": 100951, "<|LOC_655|>": 100952, "<|LOC_656|>": 100953, "<|LOC_657|>": 100954, "<|LOC_658|>": 100955, "<|LOC_659|>": 100956, "<|LOC_660|>": 100957, "<|LOC_661|>": 100958, "<|LOC_662|>": 100959, "<|LOC_663|>": 100960, "<|LOC_664|>": 100961, "<|LOC_665|>": 100962, "<|LOC_666|>": 100963, "<|LOC_667|>": 100964, "<|LOC_668|>": 100965, "<|LOC_669|>": 100966, "<|LOC_670|>": 100967, "<|LOC_671|>": 100968, "<|LOC_672|>": 100969, "<|LOC_673|>": 100970, "<|LOC_674|>": 100971, "<|LOC_675|>": 100972, "<|LOC_676|>": 100973, "<|LOC_677|>": 100974, "<|LOC_678|>": 100975, "<|LOC_679|>": 100976, "<|LOC_680|>": 100977, "<|LOC_681|>": 100978, "<|LOC_682|>": 100979, "<|LOC_683|>": 100980, "<|LOC_684|>": 100981, "<|LOC_685|>": 100982, "<|LOC_686|>": 100983, "<|LOC_687|>": 100984, "<|LOC_688|>": 100985, "<|LOC_689|>": 100986, "<|LOC_690|>": 100987, "<|LOC_691|>": 100988, "<|LOC_692|>": 100989, "<|LOC_693|>": 100990, "<|LOC_694|>": 100991, "<|LOC_695|>": 100992, "<|LOC_696|>": 100993, "<|LOC_697|>": 100994, "<|LOC_698|>": 100995, "<|LOC_699|>": 100996, "<|LOC_700|>": 100997, "<|LOC_701|>": 100998, "<|LOC_702|>": 100999, "<|LOC_703|>": 101000, "<|LOC_704|>": 101001, "<|LOC_705|>": 101002, "<|LOC_706|>": 101003, "<|LOC_707|>": 101004, "<|LOC_708|>": 101005, "<|LOC_709|>": 101006, "<|LOC_710|>": 101007, "<|LOC_711|>": 101008, "<|LOC_712|>": 101009, "<|LOC_713|>": 101010, "<|LOC_714|>": 101011, "<|LOC_715|>": 101012, "<|LOC_716|>": 101013, "<|LOC_717|>": 101014, "<|LOC_718|>": 101015, "<|LOC_719|>": 101016, "<|LOC_720|>": 101017, "<|LOC_721|>": 101018, "<|LOC_722|>": 101019, "<|LOC_723|>": 101020, "<|LOC_724|>": 101021, "<|LOC_725|>": 101022, "<|LOC_726|>": 101023, "<|LOC_727|>": 101024, "<|LOC_728|>": 101025, "<|LOC_729|>": 101026, "<|LOC_730|>": 101027, "<|LOC_731|>": 101028, "<|LOC_732|>": 101029, "<|LOC_733|>": 101030, "<|LOC_734|>": 101031, "<|LOC_735|>": 101032, "<|LOC_736|>": 101033, "<|LOC_737|>": 101034, "<|LOC_738|>": 101035, "<|LOC_739|>": 101036, "<|LOC_740|>": 101037, "<|LOC_741|>": 101038, "<|LOC_742|>": 101039, "<|LOC_743|>": 101040, "<|LOC_744|>": 101041, "<|LOC_745|>": 101042, "<|LOC_746|>": 101043, "<|LOC_747|>": 101044, "<|LOC_748|>": 101045, "<|LOC_749|>": 101046, "<|LOC_750|>": 101047, "<|LOC_751|>": 101048, "<|LOC_752|>": 101049, "<|LOC_753|>": 101050, "<|LOC_754|>": 101051, "<|LOC_755|>": 101052, "<|LOC_756|>": 101053, "<|LOC_757|>": 101054, "<|LOC_758|>": 101055, "<|LOC_759|>": 101056, "<|LOC_760|>": 101057, "<|LOC_761|>": 101058, "<|LOC_762|>": 101059, "<|LOC_763|>": 101060, "<|LOC_764|>": 101061, "<|LOC_765|>": 101062, "<|LOC_766|>": 101063, "<|LOC_767|>": 101064, "<|LOC_768|>": 101065, "<|LOC_769|>": 101066, "<|LOC_770|>": 101067, "<|LOC_771|>": 101068, "<|LOC_772|>": 101069, "<|LOC_773|>": 101070, "<|LOC_774|>": 101071, "<|LOC_775|>": 101072, "<|LOC_776|>": 101073, "<|LOC_777|>": 101074, "<|LOC_778|>": 101075, "<|LOC_779|>": 101076, "<|LOC_780|>": 101077, "<|LOC_781|>": 101078, "<|LOC_782|>": 101079, "<|LOC_783|>": 101080, "<|LOC_784|>": 101081, "<|LOC_785|>": 101082, "<|LOC_786|>": 101083, "<|LOC_787|>": 101084, "<|LOC_788|>": 101085, "<|LOC_789|>": 101086, "<|LOC_790|>": 101087, "<|LOC_791|>": 101088, "<|LOC_792|>": 101089, "<|LOC_793|>": 101090, "<|LOC_794|>": 101091, "<|LOC_795|>": 101092, "<|LOC_796|>": 101093, "<|LOC_797|>": 101094, "<|LOC_798|>": 101095, "<|LOC_799|>": 101096, "<|LOC_800|>": 101097, "<|LOC_801|>": 101098, "<|LOC_802|>": 101099, "<|LOC_803|>": 101100, "<|LOC_804|>": 101101, "<|LOC_805|>": 101102, "<|LOC_806|>": 101103, "<|LOC_807|>": 101104, "<|LOC_808|>": 101105, "<|LOC_809|>": 101106, "<|LOC_810|>": 101107, "<|LOC_811|>": 101108, "<|LOC_812|>": 101109, "<|LOC_813|>": 101110, "<|LOC_814|>": 101111, "<|LOC_815|>": 101112, "<|LOC_816|>": 101113, "<|LOC_817|>": 101114, "<|LOC_818|>": 101115, "<|LOC_819|>": 101116, "<|LOC_820|>": 101117, "<|LOC_821|>": 101118, "<|LOC_822|>": 101119, "<|LOC_823|>": 101120, "<|LOC_824|>": 101121, "<|LOC_825|>": 101122, "<|LOC_826|>": 101123, "<|LOC_827|>": 101124, "<|LOC_828|>": 101125, "<|LOC_829|>": 101126, "<|LOC_830|>": 101127, "<|LOC_831|>": 101128, "<|LOC_832|>": 101129, "<|LOC_833|>": 101130, "<|LOC_834|>": 101131, "<|LOC_835|>": 101132, "<|LOC_836|>": 101133, "<|LOC_837|>": 101134, "<|LOC_838|>": 101135, "<|LOC_839|>": 101136, "<|LOC_840|>": 101137, "<|LOC_841|>": 101138, "<|LOC_842|>": 101139, "<|LOC_843|>": 101140, "<|LOC_844|>": 101141, "<|LOC_845|>": 101142, "<|LOC_846|>": 101143, "<|LOC_847|>": 101144, "<|LOC_848|>": 101145, "<|LOC_849|>": 101146, "<|LOC_850|>": 101147, "<|LOC_851|>": 101148, "<|LOC_852|>": 101149, "<|LOC_853|>": 101150, "<|LOC_854|>": 101151, "<|LOC_855|>": 101152, "<|LOC_856|>": 101153, "<|LOC_857|>": 101154, "<|LOC_858|>": 101155, "<|LOC_859|>": 101156, "<|LOC_860|>": 101157, "<|LOC_861|>": 101158, "<|LOC_862|>": 101159, "<|LOC_863|>": 101160, "<|LOC_864|>": 101161, "<|LOC_865|>": 101162, "<|LOC_866|>": 101163, "<|LOC_867|>": 101164, "<|LOC_868|>": 101165, "<|LOC_869|>": 101166, "<|LOC_870|>": 101167, "<|LOC_871|>": 101168, "<|LOC_872|>": 101169, "<|LOC_873|>": 101170, "<|LOC_874|>": 101171, "<|LOC_875|>": 101172, "<|LOC_876|>": 101173, "<|LOC_877|>": 101174, "<|LOC_878|>": 101175, "<|LOC_879|>": 101176, "<|LOC_880|>": 101177, "<|LOC_881|>": 101178, "<|LOC_882|>": 101179, "<|LOC_883|>": 101180, "<|LOC_884|>": 101181, "<|LOC_885|>": 101182, "<|LOC_886|>": 101183, "<|LOC_887|>": 101184, "<|LOC_888|>": 101185, "<|LOC_889|>": 101186, "<|LOC_890|>": 101187, "<|LOC_891|>": 101188, "<|LOC_892|>": 101189, "<|LOC_893|>": 101190, "<|LOC_894|>": 101191, "<|LOC_895|>": 101192, "<|LOC_896|>": 101193, "<|LOC_897|>": 101194, "<|LOC_898|>": 101195, "<|LOC_899|>": 101196, "<|LOC_900|>": 101197, "<|LOC_901|>": 101198, "<|LOC_902|>": 101199, "<|LOC_903|>": 101200, "<|LOC_904|>": 101201, "<|LOC_905|>": 101202, "<|LOC_906|>": 101203, "<|LOC_907|>": 101204, "<|LOC_908|>": 101205, "<|LOC_909|>": 101206, "<|LOC_910|>": 101207, "<|LOC_911|>": 101208, "<|LOC_912|>": 101209, "<|LOC_913|>": 101210, "<|LOC_914|>": 101211, "<|LOC_915|>": 101212, "<|LOC_916|>": 101213, "<|LOC_917|>": 101214, "<|LOC_918|>": 101215, "<|LOC_919|>": 101216, "<|LOC_920|>": 101217, "<|LOC_921|>": 101218, "<|LOC_922|>": 101219, "<|LOC_923|>": 101220, "<|LOC_924|>": 101221, "<|LOC_925|>": 101222, "<|LOC_926|>": 101223, "<|LOC_927|>": 101224, "<|LOC_928|>": 101225, "<|LOC_929|>": 101226, "<|LOC_930|>": 101227, "<|LOC_931|>": 101228, "<|LOC_932|>": 101229, "<|LOC_933|>": 101230, "<|LOC_934|>": 101231, "<|LOC_935|>": 101232, "<|LOC_936|>": 101233, "<|LOC_937|>": 101234, "<|LOC_938|>": 101235, "<|LOC_939|>": 101236, "<|LOC_940|>": 101237, "<|LOC_941|>": 101238, "<|LOC_942|>": 101239, "<|LOC_943|>": 101240, "<|LOC_944|>": 101241, "<|LOC_945|>": 101242, "<|LOC_946|>": 101243, "<|LOC_947|>": 101244, "<|LOC_948|>": 101245, "<|LOC_949|>": 101246, "<|LOC_950|>": 101247, "<|LOC_951|>": 101248, "<|LOC_952|>": 101249, "<|LOC_953|>": 101250, "<|LOC_954|>": 101251, "<|LOC_955|>": 101252, "<|LOC_956|>": 101253, "<|LOC_957|>": 101254, "<|LOC_958|>": 101255, "<|LOC_959|>": 101256, "<|LOC_960|>": 101257, "<|LOC_961|>": 101258, "<|LOC_962|>": 101259, "<|LOC_963|>": 101260, "<|LOC_964|>": 101261, "<|LOC_965|>": 101262, "<|LOC_966|>": 101263, "<|LOC_967|>": 101264, "<|LOC_968|>": 101265, "<|LOC_969|>": 101266, "<|LOC_970|>": 101267, "<|LOC_971|>": 101268, "<|LOC_972|>": 101269, "<|LOC_973|>": 101270, "<|LOC_974|>": 101271, "<|LOC_975|>": 101272, "<|LOC_976|>": 101273, "<|LOC_977|>": 101274, "<|LOC_978|>": 101275, "<|LOC_979|>": 101276, "<|LOC_980|>": 101277, "<|LOC_981|>": 101278, "<|LOC_982|>": 101279, "<|LOC_983|>": 101280, "<|LOC_984|>": 101281, "<|LOC_985|>": 101282, "<|LOC_986|>": 101283, "<|LOC_987|>": 101284, "<|LOC_988|>": 101285, "<|LOC_989|>": 101286, "<|LOC_990|>": 101287, "<|LOC_991|>": 101288, "<|LOC_992|>": 101289, "<|LOC_993|>": 101290, "<|LOC_994|>": 101291, "<|LOC_995|>": 101292, "<|LOC_996|>": 101293, "<|LOC_997|>": 101294, "<|LOC_998|>": 101295, "<|LOC_999|>": 101296, "<|LOC_1000|>": 101297, "<|LOC_BEGIN|>": 101298, "<|LOC_END|>": 101299, "<|LOC_SEP|>": 101300, "<|CROP_COL_SEP|>": 101301, "<|CROP_ROW_SEP|>": 101302, "<|IMAGE_SEP|>": 101303, "<|IMAGE_START|>": 101304, "<|IMAGE_END|>": 101305, "<|VIDEO_START|>": 101306, "<|VIDEO_END|>": 101307, "<|ASR_START|>": 101308, "<|ASR_END|>": 101309, "<|IMAGE_UNUSE:6|>": 101310, "<|IMAGE_UNUSE:7|>": 101311, "<|IMAGE_UNUSE:8|>": 101312, "<|IMAGE_UNUSE:9|>": 101313, "<|IMAGE_UNUSE:10|>": 101314, "<|IMAGE_UNUSE:11|>": 101315, "<|IMAGE_UNUSE:12|>": 101316, "<|IMAGE_UNUSE:13|>": 101317, "<|IMAGE_UNUSE:14|>": 101318, "<|IMAGE_UNUSE:15|>": 101319, "<|IMAGE_UNUSE:16|>": 101320, "<|IMAGE_UNUSE:17|>": 101321, "<|IMAGE_UNUSE:18|>": 101322, "<|IMAGE_UNUSE:19|>": 101323, "<|IMAGE_UNUSE:20|>": 101324, "<|IMAGE_UNUSE:21|>": 101325, "<|IMAGE_UNUSE:22|>": 101326, "<|IMAGE_UNUSE:23|>": 101327, "<|IMAGE_UNUSE:24|>": 101328, "<|IMAGE_UNUSE:25|>": 101329, "<|IMAGE_UNUSE:26|>": 101330, "<|IMAGE_UNUSE:27|>": 101331, "<|IMAGE_UNUSE:28|>": 101332, "<|IMAGE_UNUSE:29|>": 101333, "<|IMAGE_UNUSE:30|>": 101334, "<|IMAGE_UNUSE:31|>": 101335, "<|IMAGE_UNUSE:32|>": 101336, "<|IMAGE_UNUSE:33|>": 101337, "<|IMAGE_UNUSE:34|>": 101338, "<|IMAGE_UNUSE:35|>": 101339, "<|IMAGE_UNUSE:36|>": 101340, "<|IMAGE_UNUSE:37|>": 101341, "<|IMAGE_UNUSE:38|>": 101342, "<|IMAGE_UNUSE:39|>": 101343, "<|IMAGE_UNUSE:40|>": 101344, "<|IMAGE_UNUSE:41|>": 101345, "<|IMAGE_UNUSE:42|>": 101346, "<|IMAGE_UNUSE:43|>": 101347, "<|IMAGE_UNUSE:44|>": 101348, "<|IMAGE_UNUSE:45|>": 101349, "<|IMAGE_UNUSE:46|>": 101350, "<|IMAGE_UNUSE:47|>": 101351, "<|IMAGE_UNUSE:48|>": 101352, "<|IMAGE_UNUSE:49|>": 101353, "<|IMAGE_UNUSE:50|>": 101354, "<|IMAGE_UNUSE:51|>": 101355, "<|IMAGE_UNUSE:52|>": 101356, "<|IMAGE_UNUSE:53|>": 101357, "<|IMAGE_UNUSE:54|>": 101358, "<|IMAGE_UNUSE:55|>": 101359, "<|IMAGE_UNUSE:56|>": 101360, "<|IMAGE_UNUSE:57|>": 101361, "<|IMAGE_UNUSE:58|>": 101362, "<|IMAGE_UNUSE:59|>": 101363, "<|IMAGE_UNUSE:60|>": 101364, "<|IMAGE_UNUSE:61|>": 101365, "<|IMAGE_UNUSE:62|>": 101366, "<|IMAGE_UNUSE:63|>": 101367, "<|IMAGE_UNUSE:64|>": 101368, "<|IMAGE_UNUSE:65|>": 101369, "<|IMAGE_UNUSE:66|>": 101370, "<|IMAGE_UNUSE:67|>": 101371, "<|IMAGE_UNUSE:68|>": 101372, "<|IMAGE_UNUSE:69|>": 101373, "<|IMAGE_UNUSE:70|>": 101374, "<|IMAGE_UNUSE:71|>": 101375, "<|IMAGE_UNUSE:72|>": 101376, "<|IMAGE_UNUSE:73|>": 101377, "<|IMAGE_UNUSE:74|>": 101378, "<|IMAGE_UNUSE:75|>": 101379, "<|IMAGE_UNUSE:76|>": 101380, "<|IMAGE_UNUSE:77|>": 101381, "<|IMAGE_UNUSE:78|>": 101382, "<|IMAGE_UNUSE:79|>": 101383, "<|IMAGE_UNUSE:80|>": 101384, "<|IMAGE_UNUSE:81|>": 101385, "<|IMAGE_UNUSE:82|>": 101386, "<|IMAGE_UNUSE:83|>": 101387, "<|IMAGE_UNUSE:84|>": 101388, "<|IMAGE_UNUSE:85|>": 101389, "<|IMAGE_UNUSE:86|>": 101390, "<|IMAGE_UNUSE:87|>": 101391, "<|IMAGE_UNUSE:88|>": 101392, "<|IMAGE_UNUSE:89|>": 101393, "<|IMAGE_UNUSE:90|>": 101394, "<|IMAGE_UNUSE:91|>": 101395, "<|IMAGE_UNUSE:92|>": 101396, "<|IMAGE_UNUSE:93|>": 101397, "<|IMAGE_UNUSE:94|>": 101398, "<|IMAGE_UNUSE:95|>": 101399, "<|IMAGE_UNUSE:96|>": 101400, "<|IMAGE_UNUSE:97|>": 101401, "<|IMAGE_UNUSE:98|>": 101402, "<|IMAGE_UNUSE:99|>": 101403, "<|IMAGE_UNUSE:100|>": 101404, "<|IMAGE_UNUSE:101|>": 101405, "<|IMAGE_UNUSE:102|>": 101406, "<|IMAGE_UNUSE:103|>": 101407, "<|IMAGE_UNUSE:104|>": 101408, "<|IMAGE_UNUSE:105|>": 101409, "<|IMAGE_UNUSE:106|>": 101410, "<|IMAGE_UNUSE:107|>": 101411, "<|IMAGE_UNUSE:108|>": 101412, "<|IMAGE_UNUSE:109|>": 101413, "<|IMAGE_UNUSE:110|>": 101414, "<|IMAGE_UNUSE:111|>": 101415, "<|IMAGE_UNUSE:112|>": 101416, "<|IMAGE_UNUSE:113|>": 101417, "<|IMAGE_UNUSE:114|>": 101418, "<|IMAGE_UNUSE:115|>": 101419, "<|IMAGE_UNUSE:116|>": 101420, "<|IMAGE_UNUSE:117|>": 101421, "<|IMAGE_UNUSE:118|>": 101422, "<|IMAGE_UNUSE:119|>": 101423, "<|IMAGE_UNUSE:120|>": 101424, "<|IMAGE_UNUSE:121|>": 101425, "<|IMAGE_UNUSE:122|>": 101426, "<|IMAGE_UNUSE:123|>": 101427, "<|IMAGE_UNUSE:124|>": 101428, "<|IMAGE_UNUSE:125|>": 101429, "<|IMAGE_UNUSE:126|>": 101430, "<|IMAGE_UNUSE:127|>": 101431, "<|IMAGE_UNUSE:128|>": 101432, "<|IMAGE_UNUSE:129|>": 101433, "<|IMAGE_UNUSE:130|>": 101434, "<|IMAGE_UNUSE:131|>": 101435, "<|IMAGE_UNUSE:132|>": 101436, "<|IMAGE_UNUSE:133|>": 101437, "<|IMAGE_UNUSE:134|>": 101438, "<|IMAGE_UNUSE:135|>": 101439, "<|IMAGE_UNUSE:136|>": 101440, "<|IMAGE_UNUSE:137|>": 101441, "<|IMAGE_UNUSE:138|>": 101442, "<|IMAGE_UNUSE:139|>": 101443, "<|IMAGE_UNUSE:140|>": 101444, "<|IMAGE_UNUSE:141|>": 101445, "<|IMAGE_UNUSE:142|>": 101446, "<|IMAGE_UNUSE:143|>": 101447, "<|IMAGE_UNUSE:144|>": 101448, "<|IMAGE_UNUSE:145|>": 101449, "<|IMAGE_UNUSE:146|>": 101450, "<|IMAGE_UNUSE:147|>": 101451, "<|IMAGE_UNUSE:148|>": 101452, "<|IMAGE_UNUSE:149|>": 101453, "<|IMAGE_UNUSE:150|>": 101454, "<|IMAGE_UNUSE:151|>": 101455, "<|IMAGE_UNUSE:152|>": 101456, "<|IMAGE_UNUSE:153|>": 101457, "<|IMAGE_UNUSE:154|>": 101458, "<|IMAGE_UNUSE:155|>": 101459, "<|IMAGE_UNUSE:156|>": 101460, "<|IMAGE_UNUSE:157|>": 101461, "<|IMAGE_UNUSE:158|>": 101462, "<|IMAGE_UNUSE:159|>": 101463, "<|IMAGE_UNUSE:160|>": 101464, "<|IMAGE_UNUSE:161|>": 101465, "<|IMAGE_UNUSE:162|>": 101466, "<|IMAGE_UNUSE:163|>": 101467, "<|IMAGE_UNUSE:164|>": 101468, "<|IMAGE_UNUSE:165|>": 101469, "<|IMAGE_UNUSE:166|>": 101470, "<|IMAGE_UNUSE:167|>": 101471, "<|IMAGE_UNUSE:168|>": 101472, "<|IMAGE_UNUSE:169|>": 101473, "<|IMAGE_UNUSE:170|>": 101474, "<|IMAGE_UNUSE:171|>": 101475, "<|IMAGE_UNUSE:172|>": 101476, "<|IMAGE_UNUSE:173|>": 101477, "<|IMAGE_UNUSE:174|>": 101478, "<|IMAGE_UNUSE:175|>": 101479, "<|IMAGE_UNUSE:176|>": 101480, "<|IMAGE_UNUSE:177|>": 101481, "<|IMAGE_UNUSE:178|>": 101482, "<|IMAGE_UNUSE:179|>": 101483, "<|IMAGE_UNUSE:180|>": 101484, "<|IMAGE_UNUSE:181|>": 101485, "<|IMAGE_UNUSE:182|>": 101486, "<|IMAGE_UNUSE:183|>": 101487, "<|IMAGE_UNUSE:184|>": 101488, "<|IMAGE_UNUSE:185|>": 101489, "<|IMAGE_UNUSE:186|>": 101490, "<|IMAGE_UNUSE:187|>": 101491, "<|IMAGE_UNUSE:188|>": 101492, "<|IMAGE_UNUSE:189|>": 101493, "<|IMAGE_UNUSE:190|>": 101494, "<|IMAGE_UNUSE:191|>": 101495, "<|IMAGE_UNUSE:192|>": 101496, "<|IMAGE_UNUSE:193|>": 101497, "<|IMAGE_UNUSE:194|>": 101498, "<|IMAGE_UNUSE:195|>": 101499, "<|IMAGE_UNUSE:196|>": 101500, "<|IMAGE_UNUSE:197|>": 101501, "<|IMAGE_UNUSE:198|>": 101502, "<|IMAGE_UNUSE:199|>": 101503, "<|IMAGE_UNUSE:200|>": 101504, "<|IMAGE_UNUSE:201|>": 101505, "<|IMAGE_UNUSE:202|>": 101506, "<|IMAGE_UNUSE:203|>": 101507, "<|IMAGE_UNUSE:204|>": 101508, "<|IMAGE_UNUSE:205|>": 101509, "<|IMAGE_UNUSE:206|>": 101510, "<|IMAGE_UNUSE:207|>": 101511, "<|IMAGE_UNUSE:208|>": 101512, "<|IMAGE_UNUSE:209|>": 101513, "<|IMAGE_UNUSE:210|>": 101514, "<|IMAGE_UNUSE:211|>": 101515, "<|IMAGE_UNUSE:212|>": 101516, "<|IMAGE_UNUSE:213|>": 101517, "<|IMAGE_UNUSE:214|>": 101518, "<|IMAGE_UNUSE:215|>": 101519, "<|IMAGE_UNUSE:216|>": 101520, "<|IMAGE_UNUSE:217|>": 101521, "<|IMAGE_UNUSE:218|>": 101522, "<|IMAGE_UNUSE:219|>": 101523, "<|IMAGE_UNUSE:220|>": 101524, "<|IMAGE_UNUSE:221|>": 101525, "<|IMAGE_UNUSE:222|>": 101526, "<|IMAGE_UNUSE:223|>": 101527, "<|IMAGE_UNUSE:224|>": 101528, "<|IMAGE_UNUSE:225|>": 101529, "<|IMAGE_UNUSE:226|>": 101530, "<|IMAGE_UNUSE:227|>": 101531, "<|IMAGE_UNUSE:228|>": 101532, "<|IMAGE_UNUSE:229|>": 101533, "<|IMAGE_UNUSE:230|>": 101534, "<|IMAGE_UNUSE:231|>": 101535, "<|IMAGE_UNUSE:232|>": 101536, "<|IMAGE_UNUSE:233|>": 101537, "<|IMAGE_UNUSE:234|>": 101538, "<|IMAGE_UNUSE:235|>": 101539, "<|IMAGE_UNUSE:236|>": 101540, "<|IMAGE_UNUSE:237|>": 101541, "<|IMAGE_UNUSE:238|>": 101542, "<|IMAGE_UNUSE:239|>": 101543, "<|IMAGE_UNUSE:240|>": 101544, "<|IMAGE_UNUSE:241|>": 101545, "<|IMAGE_UNUSE:242|>": 101546, "<|IMAGE_UNUSE:243|>": 101547, "<|IMAGE_UNUSE:244|>": 101548, "<|IMAGE_UNUSE:245|>": 101549, "<|IMAGE_UNUSE:246|>": 101550, "<|IMAGE_UNUSE:247|>": 101551, "<|IMAGE_UNUSE:248|>": 101552, "<|IMAGE_UNUSE:249|>": 101553, "<|IMAGE_UNUSE:250|>": 101554, "<|IMAGE_UNUSE:251|>": 101555, "<|IMAGE_UNUSE:252|>": 101556, "<|IMAGE_UNUSE:253|>": 101557, "<|IMAGE_UNUSE:254|>": 101558, "<|IMAGE_UNUSE:255|>": 101559, "<|IMAGE_UNUSE:256|>": 101560, "<|IMAGE_UNUSE:257|>": 101561, "<|IMAGE_UNUSE:258|>": 101562, "<|IMAGE_UNUSE:259|>": 101563, "<|IMAGE_UNUSE:260|>": 101564, "<|IMAGE_UNUSE:261|>": 101565, "<|IMAGE_UNUSE:262|>": 101566, "<|IMAGE_UNUSE:263|>": 101567, "<|IMAGE_UNUSE:264|>": 101568, "<|IMAGE_UNUSE:265|>": 101569, "<|IMAGE_UNUSE:266|>": 101570, "<|IMAGE_UNUSE:267|>": 101571, "<|IMAGE_UNUSE:268|>": 101572, "<|IMAGE_UNUSE:269|>": 101573, "<|IMAGE_UNUSE:270|>": 101574, "<|IMAGE_UNUSE:271|>": 101575, "<|IMAGE_UNUSE:272|>": 101576, "<|IMAGE_UNUSE:273|>": 101577, "<|IMAGE_UNUSE:274|>": 101578, "<|IMAGE_UNUSE:275|>": 101579, "<|IMAGE_UNUSE:276|>": 101580, "<|IMAGE_UNUSE:277|>": 101581, "<|IMAGE_UNUSE:278|>": 101582, "<|IMAGE_UNUSE:279|>": 101583, "<|IMAGE_UNUSE:280|>": 101584, "<|IMAGE_UNUSE:281|>": 101585, "<|IMAGE_UNUSE:282|>": 101586, "<|IMAGE_UNUSE:283|>": 101587, "<|IMAGE_UNUSE:284|>": 101588, "<|IMAGE_UNUSE:285|>": 101589, "<|IMAGE_UNUSE:286|>": 101590, "<|IMAGE_UNUSE:287|>": 101591, "<|IMAGE_UNUSE:288|>": 101592, "<|IMAGE_UNUSE:289|>": 101593, "<|IMAGE_UNUSE:290|>": 101594, "<|IMAGE_UNUSE:291|>": 101595, "<|IMAGE_UNUSE:292|>": 101596, "<|IMAGE_UNUSE:293|>": 101597, "<|IMAGE_UNUSE:294|>": 101598, "<|IMAGE_UNUSE:295|>": 101599, "<|IMAGE_UNUSE:296|>": 101600, "<|IMAGE_UNUSE:297|>": 101601, "<|IMAGE_UNUSE:298|>": 101602, "<|IMAGE_UNUSE:299|>": 101603, "<|IMAGE_UNUSE:300|>": 101604, "<|IMAGE_UNUSE:301|>": 101605, "<|IMAGE_UNUSE:302|>": 101606, "<|IMAGE_UNUSE:303|>": 101607, "<|IMAGE_UNUSE:304|>": 101608, "<|IMAGE_UNUSE:305|>": 101609, "<|IMAGE_UNUSE:306|>": 101610, "<|IMAGE_UNUSE:307|>": 101611, "<|IMAGE_UNUSE:308|>": 101612, "<|IMAGE_UNUSE:309|>": 101613, "<|IMAGE_UNUSE:310|>": 101614, "<|IMAGE_UNUSE:311|>": 101615, "<|IMAGE_UNUSE:312|>": 101616, "<|IMAGE_UNUSE:313|>": 101617, "<|IMAGE_UNUSE:314|>": 101618, "<|IMAGE_UNUSE:315|>": 101619, "<|IMAGE_UNUSE:316|>": 101620, "<|IMAGE_UNUSE:317|>": 101621, "<|IMAGE_UNUSE:318|>": 101622, "<|IMAGE_UNUSE:319|>": 101623, "<|IMAGE_UNUSE:320|>": 101624, "<|IMAGE_UNUSE:321|>": 101625, "<|IMAGE_UNUSE:322|>": 101626, "<|IMAGE_UNUSE:323|>": 101627, "<|IMAGE_UNUSE:324|>": 101628, "<|IMAGE_UNUSE:325|>": 101629, "<|IMAGE_UNUSE:326|>": 101630, "<|IMAGE_UNUSE:327|>": 101631, "<|IMAGE_UNUSE:328|>": 101632, "<|IMAGE_UNUSE:329|>": 101633, "<|IMAGE_UNUSE:330|>": 101634, "<|IMAGE_UNUSE:331|>": 101635, "<|IMAGE_UNUSE:332|>": 101636, "<|IMAGE_UNUSE:333|>": 101637, "<|IMAGE_UNUSE:334|>": 101638, "<|IMAGE_UNUSE:335|>": 101639, "<|IMAGE_UNUSE:336|>": 101640, "<|IMAGE_UNUSE:337|>": 101641, "<|IMAGE_UNUSE:338|>": 101642, "<|IMAGE_UNUSE:339|>": 101643, "<|IMAGE_UNUSE:340|>": 101644, "<|IMAGE_UNUSE:341|>": 101645, "<|IMAGE_UNUSE:342|>": 101646, "<|IMAGE_UNUSE:343|>": 101647, "<|IMAGE_UNUSE:344|>": 101648, "<|IMAGE_UNUSE:345|>": 101649, "<|IMAGE_UNUSE:346|>": 101650, "<|IMAGE_UNUSE:347|>": 101651, "<|IMAGE_UNUSE:348|>": 101652, "<|IMAGE_UNUSE:349|>": 101653, "<|IMAGE_UNUSE:350|>": 101654, "<|IMAGE_UNUSE:351|>": 101655, "<|IMAGE_UNUSE:352|>": 101656, "<|IMAGE_UNUSE:353|>": 101657, "<|IMAGE_UNUSE:354|>": 101658, "<|IMAGE_UNUSE:355|>": 101659, "<|IMAGE_UNUSE:356|>": 101660, "<|IMAGE_UNUSE:357|>": 101661, "<|IMAGE_UNUSE:358|>": 101662, "<|IMAGE_UNUSE:359|>": 101663, "<|IMAGE_UNUSE:360|>": 101664, "<|IMAGE_UNUSE:361|>": 101665, "<|IMAGE_UNUSE:362|>": 101666, "<|IMAGE_UNUSE:363|>": 101667, "<|IMAGE_UNUSE:364|>": 101668, "<|IMAGE_UNUSE:365|>": 101669, "<|IMAGE_UNUSE:366|>": 101670, "<|IMAGE_UNUSE:367|>": 101671, "<|IMAGE_UNUSE:368|>": 101672, "<|IMAGE_UNUSE:369|>": 101673, "<|IMAGE_UNUSE:370|>": 101674, "<|IMAGE_UNUSE:371|>": 101675, "<|IMAGE_UNUSE:372|>": 101676, "<|IMAGE_UNUSE:373|>": 101677, "<|IMAGE_UNUSE:374|>": 101678, "<|IMAGE_UNUSE:375|>": 101679, "<|IMAGE_UNUSE:376|>": 101680, "<|IMAGE_UNUSE:377|>": 101681, "<|IMAGE_UNUSE:378|>": 101682, "<|IMAGE_UNUSE:379|>": 101683, "<|IMAGE_UNUSE:380|>": 101684, "<|IMAGE_UNUSE:381|>": 101685, "<|IMAGE_UNUSE:382|>": 101686, "<|IMAGE_UNUSE:383|>": 101687, "<|IMAGE_UNUSE:384|>": 101688, "<|IMAGE_UNUSE:385|>": 101689, "<|IMAGE_UNUSE:386|>": 101690, "<|IMAGE_UNUSE:387|>": 101691, "<|IMAGE_UNUSE:388|>": 101692, "<|IMAGE_UNUSE:389|>": 101693, "<|IMAGE_UNUSE:390|>": 101694, "<|IMAGE_UNUSE:391|>": 101695, "<|IMAGE_UNUSE:392|>": 101696, "<|IMAGE_UNUSE:393|>": 101697, "<|IMAGE_UNUSE:394|>": 101698, "<|IMAGE_UNUSE:395|>": 101699, "<|IMAGE_UNUSE:396|>": 101700, "<|IMAGE_UNUSE:397|>": 101701, "<|IMAGE_UNUSE:398|>": 101702, "<|IMAGE_UNUSE:399|>": 101703, "<|IMAGE_UNUSE:400|>": 101704, "<|IMAGE_UNUSE:401|>": 101705, "<|IMAGE_UNUSE:402|>": 101706, "<|IMAGE_UNUSE:403|>": 101707, "<|IMAGE_UNUSE:404|>": 101708, "<|IMAGE_UNUSE:405|>": 101709, "<|IMAGE_UNUSE:406|>": 101710, "<|IMAGE_UNUSE:407|>": 101711, "<|IMAGE_UNUSE:408|>": 101712, "<|IMAGE_UNUSE:409|>": 101713, "<|IMAGE_UNUSE:410|>": 101714, "<|IMAGE_UNUSE:411|>": 101715, "<|IMAGE_UNUSE:412|>": 101716, "<|IMAGE_UNUSE:413|>": 101717, "<|IMAGE_UNUSE:414|>": 101718, "<|IMAGE_UNUSE:415|>": 101719, "<|IMAGE_UNUSE:416|>": 101720, "<|IMAGE_UNUSE:417|>": 101721, "<|IMAGE_UNUSE:418|>": 101722, "<|IMAGE_UNUSE:419|>": 101723, "<|IMAGE_UNUSE:420|>": 101724, "<|IMAGE_UNUSE:421|>": 101725, "<|IMAGE_UNUSE:422|>": 101726, "<|IMAGE_UNUSE:423|>": 101727, "<|IMAGE_UNUSE:424|>": 101728, "<|IMAGE_UNUSE:425|>": 101729, "<|IMAGE_UNUSE:426|>": 101730, "<|IMAGE_UNUSE:427|>": 101731, "<|IMAGE_UNUSE:428|>": 101732, "<|IMAGE_UNUSE:429|>": 101733, "<|IMAGE_UNUSE:430|>": 101734, "<|IMAGE_UNUSE:431|>": 101735, "<|IMAGE_UNUSE:432|>": 101736, "<|IMAGE_UNUSE:433|>": 101737, "<|IMAGE_UNUSE:434|>": 101738, "<|IMAGE_UNUSE:435|>": 101739, "<|IMAGE_UNUSE:436|>": 101740, "<|IMAGE_UNUSE:437|>": 101741, "<|IMAGE_UNUSE:438|>": 101742, "<|IMAGE_UNUSE:439|>": 101743, "<|IMAGE_UNUSE:440|>": 101744, "<|IMAGE_UNUSE:441|>": 101745, "<|IMAGE_UNUSE:442|>": 101746, "<|IMAGE_UNUSE:443|>": 101747, "<|IMAGE_UNUSE:444|>": 101748, "<|IMAGE_UNUSE:445|>": 101749, "<|IMAGE_UNUSE:446|>": 101750, "<|IMAGE_UNUSE:447|>": 101751, "<|IMAGE_UNUSE:448|>": 101752, "<|IMAGE_UNUSE:449|>": 101753, "<|IMAGE_UNUSE:450|>": 101754, "<|IMAGE_UNUSE:451|>": 101755, "<|IMAGE_UNUSE:452|>": 101756, "<|IMAGE_UNUSE:453|>": 101757, "<|IMAGE_UNUSE:454|>": 101758, "<|IMAGE_UNUSE:455|>": 101759, "<|IMAGE_UNUSE:456|>": 101760, "<|IMAGE_UNUSE:457|>": 101761, "<|IMAGE_UNUSE:458|>": 101762, "<|IMAGE_UNUSE:459|>": 101763, "<|IMAGE_UNUSE:460|>": 101764, "<|IMAGE_UNUSE:461|>": 101765, "<|IMAGE_UNUSE:462|>": 101766, "<|IMAGE_UNUSE:463|>": 101767, "<|IMAGE_UNUSE:464|>": 101768, "<|IMAGE_UNUSE:465|>": 101769, "<|IMAGE_UNUSE:466|>": 101770, "<|IMAGE_UNUSE:467|>": 101771, "<|IMAGE_UNUSE:468|>": 101772, "<|IMAGE_UNUSE:469|>": 101773, "<|IMAGE_UNUSE:470|>": 101774, "<|IMAGE_UNUSE:471|>": 101775, "<|IMAGE_UNUSE:472|>": 101776, "<|IMAGE_UNUSE:473|>": 101777, "<|IMAGE_UNUSE:474|>": 101778, "<|IMAGE_UNUSE:475|>": 101779, "<|IMAGE_UNUSE:476|>": 101780, "<|IMAGE_UNUSE:477|>": 101781, "<|IMAGE_UNUSE:478|>": 101782, "<|IMAGE_UNUSE:479|>": 101783, "<|IMAGE_UNUSE:480|>": 101784, "<|IMAGE_UNUSE:481|>": 101785, "<|IMAGE_UNUSE:482|>": 101786, "<|IMAGE_UNUSE:483|>": 101787, "<|IMAGE_UNUSE:484|>": 101788, "<|IMAGE_UNUSE:485|>": 101789, "<|IMAGE_UNUSE:486|>": 101790, "<|IMAGE_UNUSE:487|>": 101791, "<|IMAGE_UNUSE:488|>": 101792, "<|IMAGE_UNUSE:489|>": 101793, "<|IMAGE_UNUSE:490|>": 101794, "<|IMAGE_UNUSE:491|>": 101795, "<|IMAGE_UNUSE:492|>": 101796, "<|IMAGE_UNUSE:493|>": 101797, "<|IMAGE_UNUSE:494|>": 101798, "<|IMAGE_UNUSE:495|>": 101799, "<|IMAGE_UNUSE:496|>": 101800, "<|IMAGE_UNUSE:497|>": 101801, "<|IMAGE_UNUSE:498|>": 101802, "<|IMAGE_UNUSE:499|>": 101803, "<|IMAGE_UNUSE:500|>": 101804, "<|IMAGE_UNUSE:501|>": 101805, "<|IMAGE_UNUSE:502|>": 101806, "<|IMAGE_UNUSE:503|>": 101807, "<|IMAGE_UNUSE:504|>": 101808, "<|IMAGE_UNUSE:505|>": 101809, "<|IMAGE_UNUSE:506|>": 101810, "<|IMAGE_UNUSE:507|>": 101811, "<|IMAGE_UNUSE:508|>": 101812, "<|IMAGE_UNUSE:509|>": 101813, "<|IMAGE_UNUSE:510|>": 101814, "<|IMAGE_UNUSE:511|>": 101815, "<|IMAGE_UNUSE:512|>": 101816, "<|IMAGE_UNUSE:513|>": 101817, "<|IMAGE_UNUSE:514|>": 101818, "<|IMAGE_UNUSE:515|>": 101819, "<|IMAGE_UNUSE:516|>": 101820, "<|IMAGE_UNUSE:517|>": 101821, "<|IMAGE_UNUSE:518|>": 101822, "<|IMAGE_UNUSE:519|>": 101823, "<|IMAGE_UNUSE:520|>": 101824, "<|IMAGE_UNUSE:521|>": 101825, "<|IMAGE_UNUSE:522|>": 101826, "<|IMAGE_UNUSE:523|>": 101827, "<|IMAGE_UNUSE:524|>": 101828, "<|IMAGE_UNUSE:525|>": 101829, "<|IMAGE_UNUSE:526|>": 101830, "<|IMAGE_UNUSE:527|>": 101831, "<|IMAGE_UNUSE:528|>": 101832, "<|IMAGE_UNUSE:529|>": 101833, "<|IMAGE_UNUSE:530|>": 101834, "<|IMAGE_UNUSE:531|>": 101835, "<|IMAGE_UNUSE:532|>": 101836, "<|IMAGE_UNUSE:533|>": 101837, "<|IMAGE_UNUSE:534|>": 101838, "<|IMAGE_UNUSE:535|>": 101839, "<|IMAGE_UNUSE:536|>": 101840, "<|IMAGE_UNUSE:537|>": 101841, "<|IMAGE_UNUSE:538|>": 101842, "<|IMAGE_UNUSE:539|>": 101843, "<|IMAGE_UNUSE:540|>": 101844, "<|IMAGE_UNUSE:541|>": 101845, "<|IMAGE_UNUSE:542|>": 101846, "<|IMAGE_UNUSE:543|>": 101847, "<|IMAGE_UNUSE:544|>": 101848, "<|IMAGE_UNUSE:545|>": 101849, "<|IMAGE_UNUSE:546|>": 101850, "<|IMAGE_UNUSE:547|>": 101851, "<|IMAGE_UNUSE:548|>": 101852, "<|IMAGE_UNUSE:549|>": 101853, "<|IMAGE_UNUSE:550|>": 101854, "<|IMAGE_UNUSE:551|>": 101855, "<|IMAGE_UNUSE:552|>": 101856, "<|IMAGE_UNUSE:553|>": 101857, "<|IMAGE_UNUSE:554|>": 101858, "<|IMAGE_UNUSE:555|>": 101859, "<|IMAGE_UNUSE:556|>": 101860, "<|IMAGE_UNUSE:557|>": 101861, "<|IMAGE_UNUSE:558|>": 101862, "<|IMAGE_UNUSE:559|>": 101863, "<|IMAGE_UNUSE:560|>": 101864, "<|IMAGE_UNUSE:561|>": 101865, "<|IMAGE_UNUSE:562|>": 101866, "<|IMAGE_UNUSE:563|>": 101867, "<|IMAGE_UNUSE:564|>": 101868, "<|IMAGE_UNUSE:565|>": 101869, "<|IMAGE_UNUSE:566|>": 101870, "<|IMAGE_UNUSE:567|>": 101871, "<|IMAGE_UNUSE:568|>": 101872, "<|IMAGE_UNUSE:569|>": 101873, "<|IMAGE_UNUSE:570|>": 101874, "<|IMAGE_UNUSE:571|>": 101875, "<|IMAGE_UNUSE:572|>": 101876, "<|IMAGE_UNUSE:573|>": 101877, "<|IMAGE_UNUSE:574|>": 101878, "<|IMAGE_UNUSE:575|>": 101879, "<|IMAGE_UNUSE:576|>": 101880, "<|IMAGE_UNUSE:577|>": 101881, "<|IMAGE_UNUSE:578|>": 101882, "<|IMAGE_UNUSE:579|>": 101883, "<|IMAGE_UNUSE:580|>": 101884, "<|IMAGE_UNUSE:581|>": 101885, "<|IMAGE_UNUSE:582|>": 101886, "<|IMAGE_UNUSE:583|>": 101887, "<|IMAGE_UNUSE:584|>": 101888, "<|IMAGE_UNUSE:585|>": 101889, "<|IMAGE_UNUSE:586|>": 101890, "<|IMAGE_UNUSE:587|>": 101891, "<|IMAGE_UNUSE:588|>": 101892, "<|IMAGE_UNUSE:589|>": 101893, "<|IMAGE_UNUSE:590|>": 101894, "<|IMAGE_UNUSE:591|>": 101895, "<|IMAGE_UNUSE:592|>": 101896, "<|IMAGE_UNUSE:593|>": 101897, "<|IMAGE_UNUSE:594|>": 101898, "<|IMAGE_UNUSE:595|>": 101899, "<|IMAGE_UNUSE:596|>": 101900, "<|IMAGE_UNUSE:597|>": 101901, "<|IMAGE_UNUSE:598|>": 101902, "<|IMAGE_UNUSE:599|>": 101903, "<|IMAGE_UNUSE:600|>": 101904, "<|IMAGE_UNUSE:601|>": 101905, "<|IMAGE_UNUSE:602|>": 101906, "<|IMAGE_UNUSE:603|>": 101907, "<|IMAGE_UNUSE:604|>": 101908, "<|IMAGE_UNUSE:605|>": 101909, "<|IMAGE_UNUSE:606|>": 101910, "<|IMAGE_UNUSE:607|>": 101911, "<|IMAGE_UNUSE:608|>": 101912, "<|IMAGE_UNUSE:609|>": 101913, "<|IMAGE_UNUSE:610|>": 101914, "<|IMAGE_UNUSE:611|>": 101915, "<|IMAGE_UNUSE:612|>": 101916, "<|IMAGE_UNUSE:613|>": 101917, "<|IMAGE_UNUSE:614|>": 101918, "<|IMAGE_UNUSE:615|>": 101919, "<|IMAGE_UNUSE:616|>": 101920, "<|IMAGE_UNUSE:617|>": 101921, "<|IMAGE_UNUSE:618|>": 101922, "<|IMAGE_UNUSE:619|>": 101923, "<|IMAGE_UNUSE:620|>": 101924, "<|IMAGE_UNUSE:621|>": 101925, "<|IMAGE_UNUSE:622|>": 101926, "<|IMAGE_UNUSE:623|>": 101927, "<|IMAGE_UNUSE:624|>": 101928, "<|IMAGE_UNUSE:625|>": 101929, "<|IMAGE_UNUSE:626|>": 101930, "<|IMAGE_UNUSE:627|>": 101931, "<|IMAGE_UNUSE:628|>": 101932, "<|IMAGE_UNUSE:629|>": 101933, "<|IMAGE_UNUSE:630|>": 101934, "<|IMAGE_UNUSE:631|>": 101935, "<|IMAGE_UNUSE:632|>": 101936, "<|IMAGE_UNUSE:633|>": 101937, "<|IMAGE_UNUSE:634|>": 101938, "<|IMAGE_UNUSE:635|>": 101939, "<|IMAGE_UNUSE:636|>": 101940, "<|IMAGE_UNUSE:637|>": 101941, "<|IMAGE_UNUSE:638|>": 101942, "<|IMAGE_UNUSE:639|>": 101943, "<|IMAGE_UNUSE:640|>": 101944, "<|IMAGE_UNUSE:641|>": 101945, "<|IMAGE_UNUSE:642|>": 101946, "<|IMAGE_UNUSE:643|>": 101947, "<|IMAGE_UNUSE:644|>": 101948, "<|IMAGE_UNUSE:645|>": 101949, "<|IMAGE_UNUSE:646|>": 101950, "<|IMAGE_UNUSE:647|>": 101951, "<|IMAGE_UNUSE:648|>": 101952, "<|IMAGE_UNUSE:649|>": 101953, "<|IMAGE_UNUSE:650|>": 101954, "<|IMAGE_UNUSE:651|>": 101955, "<|IMAGE_UNUSE:652|>": 101956, "<|IMAGE_UNUSE:653|>": 101957, "<|IMAGE_UNUSE:654|>": 101958, "<|IMAGE_UNUSE:655|>": 101959, "<|IMAGE_UNUSE:656|>": 101960, "<|IMAGE_UNUSE:657|>": 101961, "<|IMAGE_UNUSE:658|>": 101962, "<|IMAGE_UNUSE:659|>": 101963, "<|IMAGE_UNUSE:660|>": 101964, "<|IMAGE_UNUSE:661|>": 101965, "<|IMAGE_UNUSE:662|>": 101966, "<|IMAGE_UNUSE:663|>": 101967, "<|IMAGE_UNUSE:664|>": 101968, "<|IMAGE_UNUSE:665|>": 101969, "<|IMAGE_UNUSE:666|>": 101970, "<|IMAGE_UNUSE:667|>": 101971, "<|IMAGE_UNUSE:668|>": 101972, "<|IMAGE_UNUSE:669|>": 101973, "<|IMAGE_UNUSE:670|>": 101974, "<|IMAGE_UNUSE:671|>": 101975, "<|IMAGE_UNUSE:672|>": 101976, "<|IMAGE_UNUSE:673|>": 101977, "<|IMAGE_UNUSE:674|>": 101978, "<|IMAGE_UNUSE:675|>": 101979, "<|IMAGE_UNUSE:676|>": 101980, "<|IMAGE_UNUSE:677|>": 101981, "<|IMAGE_UNUSE:678|>": 101982, "<|IMAGE_UNUSE:679|>": 101983, "<|IMAGE_UNUSE:680|>": 101984, "<|IMAGE_UNUSE:681|>": 101985, "<|IMAGE_UNUSE:682|>": 101986, "<|IMAGE_UNUSE:683|>": 101987, "<|IMAGE_UNUSE:684|>": 101988, "<|IMAGE_UNUSE:685|>": 101989, "<|IMAGE_UNUSE:686|>": 101990, "<|IMAGE_UNUSE:687|>": 101991, "<|IMAGE_UNUSE:688|>": 101992, "<|IMAGE_UNUSE:689|>": 101993, "<|IMAGE_UNUSE:690|>": 101994, "<|IMAGE_UNUSE:691|>": 101995, "<|IMAGE_UNUSE:692|>": 101996, "<|IMAGE_UNUSE:693|>": 101997, "<|IMAGE_UNUSE:694|>": 101998, "<|IMAGE_UNUSE:695|>": 101999, "<|IMAGE_UNUSE:696|>": 102000, "<|IMAGE_UNUSE:697|>": 102001, "<|IMAGE_UNUSE:698|>": 102002, "<|IMAGE_UNUSE:699|>": 102003, "<|IMAGE_UNUSE:700|>": 102004, "<|IMAGE_UNUSE:701|>": 102005, "<|IMAGE_UNUSE:702|>": 102006, "<|IMAGE_UNUSE:703|>": 102007, "<|IMAGE_UNUSE:704|>": 102008, "<|IMAGE_UNUSE:705|>": 102009, "<|IMAGE_UNUSE:706|>": 102010, "<|IMAGE_UNUSE:707|>": 102011, "<|IMAGE_UNUSE:708|>": 102012, "<|IMAGE_UNUSE:709|>": 102013, "<|IMAGE_UNUSE:710|>": 102014, "<|IMAGE_UNUSE:711|>": 102015, "<|IMAGE_UNUSE:712|>": 102016, "<|IMAGE_UNUSE:713|>": 102017, "<|IMAGE_UNUSE:714|>": 102018, "<|IMAGE_UNUSE:715|>": 102019, "<|IMAGE_UNUSE:716|>": 102020, "<|IMAGE_UNUSE:717|>": 102021, "<|IMAGE_UNUSE:718|>": 102022, "<|IMAGE_UNUSE:719|>": 102023, "<|IMAGE_UNUSE:720|>": 102024, "<|IMAGE_UNUSE:721|>": 102025, "<|IMAGE_UNUSE:722|>": 102026, "<|IMAGE_UNUSE:723|>": 102027, "<|IMAGE_UNUSE:724|>": 102028, "<|IMAGE_UNUSE:725|>": 102029, "<|IMAGE_UNUSE:726|>": 102030, "<|IMAGE_UNUSE:727|>": 102031, "<|IMAGE_UNUSE:728|>": 102032, "<|IMAGE_UNUSE:729|>": 102033, "<|IMAGE_UNUSE:730|>": 102034, "<|IMAGE_UNUSE:731|>": 102035, "<|IMAGE_UNUSE:732|>": 102036, "<|IMAGE_UNUSE:733|>": 102037, "<|IMAGE_UNUSE:734|>": 102038, "<|IMAGE_UNUSE:735|>": 102039, "<|IMAGE_UNUSE:736|>": 102040, "<|IMAGE_UNUSE:737|>": 102041, "<|IMAGE_UNUSE:738|>": 102042, "<|IMAGE_UNUSE:739|>": 102043, "<|IMAGE_UNUSE:740|>": 102044, "<|IMAGE_UNUSE:741|>": 102045, "<|IMAGE_UNUSE:742|>": 102046, "<|IMAGE_UNUSE:743|>": 102047, "<|IMAGE_UNUSE:744|>": 102048, "<|IMAGE_UNUSE:745|>": 102049, "<|IMAGE_UNUSE:746|>": 102050, "<|IMAGE_UNUSE:747|>": 102051, "<|IMAGE_UNUSE:748|>": 102052, "<|IMAGE_UNUSE:749|>": 102053, "<|IMAGE_UNUSE:750|>": 102054, "<|IMAGE_UNUSE:751|>": 102055, "<|IMAGE_UNUSE:752|>": 102056, "<|IMAGE_UNUSE:753|>": 102057, "<|IMAGE_UNUSE:754|>": 102058, "<|IMAGE_UNUSE:755|>": 102059, "<|IMAGE_UNUSE:756|>": 102060, "<|IMAGE_UNUSE:757|>": 102061, "<|IMAGE_UNUSE:758|>": 102062, "<|IMAGE_UNUSE:759|>": 102063, "<|IMAGE_UNUSE:760|>": 102064, "<|IMAGE_UNUSE:761|>": 102065, "<|IMAGE_UNUSE:762|>": 102066, "<|IMAGE_UNUSE:763|>": 102067, "<|IMAGE_UNUSE:764|>": 102068, "<|IMAGE_UNUSE:765|>": 102069, "<|IMAGE_UNUSE:766|>": 102070, "<|IMAGE_UNUSE:767|>": 102071, "<|IMAGE_UNUSE:768|>": 102072, "<|IMAGE_UNUSE:769|>": 102073, "<|IMAGE_UNUSE:770|>": 102074, "<|IMAGE_UNUSE:771|>": 102075, "<|IMAGE_UNUSE:772|>": 102076, "<|IMAGE_UNUSE:773|>": 102077, "<|IMAGE_UNUSE:774|>": 102078, "<|IMAGE_UNUSE:775|>": 102079, "<|IMAGE_UNUSE:776|>": 102080, "<|IMAGE_UNUSE:777|>": 102081, "<|IMAGE_UNUSE:778|>": 102082, "<|IMAGE_UNUSE:779|>": 102083, "<|IMAGE_UNUSE:780|>": 102084, "<|IMAGE_UNUSE:781|>": 102085, "<|IMAGE_UNUSE:782|>": 102086, "<|IMAGE_UNUSE:783|>": 102087, "<|IMAGE_UNUSE:784|>": 102088, "<|IMAGE_UNUSE:785|>": 102089, "<|IMAGE_UNUSE:786|>": 102090, "<|IMAGE_UNUSE:787|>": 102091, "<|IMAGE_UNUSE:788|>": 102092, "<|IMAGE_UNUSE:789|>": 102093, "<|IMAGE_UNUSE:790|>": 102094, "<|IMAGE_UNUSE:791|>": 102095, "<|IMAGE_UNUSE:792|>": 102096, "<|IMAGE_UNUSE:793|>": 102097, "<|IMAGE_UNUSE:794|>": 102098, "<|IMAGE_UNUSE:795|>": 102099, "<|IMAGE_UNUSE:796|>": 102100, "<|IMAGE_UNUSE:797|>": 102101, "<|IMAGE_UNUSE:798|>": 102102, "<|IMAGE_UNUSE:799|>": 102103, "<|IMAGE_UNUSE:800|>": 102104, "<|IMAGE_UNUSE:801|>": 102105, "<|IMAGE_UNUSE:802|>": 102106, "<|IMAGE_UNUSE:803|>": 102107, "<|IMAGE_UNUSE:804|>": 102108, "<|IMAGE_UNUSE:805|>": 102109, "<|IMAGE_UNUSE:806|>": 102110, "<|IMAGE_UNUSE:807|>": 102111, "<|IMAGE_UNUSE:808|>": 102112, "<|IMAGE_UNUSE:809|>": 102113, "<|IMAGE_UNUSE:810|>": 102114, "<|IMAGE_UNUSE:811|>": 102115, "<|IMAGE_UNUSE:812|>": 102116, "<|IMAGE_UNUSE:813|>": 102117, "<|IMAGE_UNUSE:814|>": 102118, "<|IMAGE_UNUSE:815|>": 102119, "<|IMAGE_UNUSE:816|>": 102120, "<|IMAGE_UNUSE:817|>": 102121, "<|IMAGE_UNUSE:818|>": 102122, "<|IMAGE_UNUSE:819|>": 102123, "<|IMAGE_UNUSE:820|>": 102124, "<|IMAGE_UNUSE:821|>": 102125, "<|IMAGE_UNUSE:822|>": 102126, "<|IMAGE_UNUSE:823|>": 102127, "<|IMAGE_UNUSE:824|>": 102128, "<|IMAGE_UNUSE:825|>": 102129, "<|IMAGE_UNUSE:826|>": 102130, "<|IMAGE_UNUSE:827|>": 102131, "<|IMAGE_UNUSE:828|>": 102132, "<|IMAGE_UNUSE:829|>": 102133, "<|IMAGE_UNUSE:830|>": 102134, "<|IMAGE_UNUSE:831|>": 102135, "<|IMAGE_UNUSE:832|>": 102136, "<|IMAGE_UNUSE:833|>": 102137, "<|IMAGE_UNUSE:834|>": 102138, "<|IMAGE_UNUSE:835|>": 102139, "<|IMAGE_UNUSE:836|>": 102140, "<|IMAGE_UNUSE:837|>": 102141, "<|IMAGE_UNUSE:838|>": 102142, "<|IMAGE_UNUSE:839|>": 102143, "<|IMAGE_UNUSE:840|>": 102144, "<|IMAGE_UNUSE:841|>": 102145, "<|IMAGE_UNUSE:842|>": 102146, "<|IMAGE_UNUSE:843|>": 102147, "<|IMAGE_UNUSE:844|>": 102148, "<|IMAGE_UNUSE:845|>": 102149, "<|IMAGE_UNUSE:846|>": 102150, "<|IMAGE_UNUSE:847|>": 102151, "<|IMAGE_UNUSE:848|>": 102152, "<|IMAGE_UNUSE:849|>": 102153, "<|IMAGE_UNUSE:850|>": 102154, "<|IMAGE_UNUSE:851|>": 102155, "<|IMAGE_UNUSE:852|>": 102156, "<|IMAGE_UNUSE:853|>": 102157, "<|IMAGE_UNUSE:854|>": 102158, "<|IMAGE_UNUSE:855|>": 102159, "<|IMAGE_UNUSE:856|>": 102160, "<|IMAGE_UNUSE:857|>": 102161, "<|IMAGE_UNUSE:858|>": 102162, "<|IMAGE_UNUSE:859|>": 102163, "<|IMAGE_UNUSE:860|>": 102164, "<|IMAGE_UNUSE:861|>": 102165, "<|IMAGE_UNUSE:862|>": 102166, "<|IMAGE_UNUSE:863|>": 102167, "<|IMAGE_UNUSE:864|>": 102168, "<|IMAGE_UNUSE:865|>": 102169, "<|IMAGE_UNUSE:866|>": 102170, "<|IMAGE_UNUSE:867|>": 102171, "<|IMAGE_UNUSE:868|>": 102172, "<|IMAGE_UNUSE:869|>": 102173, "<|IMAGE_UNUSE:870|>": 102174, "<|IMAGE_UNUSE:871|>": 102175, "<|IMAGE_UNUSE:872|>": 102176, "<|IMAGE_UNUSE:873|>": 102177, "<|IMAGE_UNUSE:874|>": 102178, "<|IMAGE_UNUSE:875|>": 102179, "<|IMAGE_UNUSE:876|>": 102180, "<|IMAGE_UNUSE:877|>": 102181, "<|IMAGE_UNUSE:878|>": 102182, "<|IMAGE_UNUSE:879|>": 102183, "<|IMAGE_UNUSE:880|>": 102184, "<|IMAGE_UNUSE:881|>": 102185, "<|IMAGE_UNUSE:882|>": 102186, "<|IMAGE_UNUSE:883|>": 102187, "<|IMAGE_UNUSE:884|>": 102188, "<|IMAGE_UNUSE:885|>": 102189, "<|IMAGE_UNUSE:886|>": 102190, "<|IMAGE_UNUSE:887|>": 102191, "<|IMAGE_UNUSE:888|>": 102192, "<|IMAGE_UNUSE:889|>": 102193, "<|IMAGE_UNUSE:890|>": 102194, "<|IMAGE_UNUSE:891|>": 102195, "<|IMAGE_UNUSE:892|>": 102196, "<|IMAGE_UNUSE:893|>": 102197, "<|IMAGE_UNUSE:894|>": 102198, "<|IMAGE_UNUSE:895|>": 102199, "<|IMAGE_UNUSE:896|>": 102200, "<|IMAGE_UNUSE:897|>": 102201, "<|IMAGE_UNUSE:898|>": 102202, "<|IMAGE_UNUSE:899|>": 102203, "<|IMAGE_UNUSE:900|>": 102204, "<|IMAGE_UNUSE:901|>": 102205, "<|IMAGE_UNUSE:902|>": 102206, "<|IMAGE_UNUSE:903|>": 102207, "<|IMAGE_UNUSE:904|>": 102208, "<|IMAGE_UNUSE:905|>": 102209, "<|IMAGE_UNUSE:906|>": 102210, "<|IMAGE_UNUSE:907|>": 102211, "<|IMAGE_UNUSE:908|>": 102212, "<|IMAGE_UNUSE:909|>": 102213, "<|IMAGE_UNUSE:910|>": 102214, "<|IMAGE_UNUSE:911|>": 102215, "<|IMAGE_UNUSE:912|>": 102216, "<|IMAGE_UNUSE:913|>": 102217, "<|IMAGE_UNUSE:914|>": 102218, "<|IMAGE_UNUSE:915|>": 102219, "<|IMAGE_UNUSE:916|>": 102220, "<|IMAGE_UNUSE:917|>": 102221, "<|IMAGE_UNUSE:918|>": 102222, "<|IMAGE_UNUSE:919|>": 102223, "<|IMAGE_UNUSE:920|>": 102224, "<|IMAGE_UNUSE:921|>": 102225, "<|IMAGE_UNUSE:922|>": 102226, "<|IMAGE_UNUSE:923|>": 102227, "<|IMAGE_UNUSE:924|>": 102228, "<|IMAGE_UNUSE:925|>": 102229, "<|IMAGE_UNUSE:926|>": 102230, "<|IMAGE_UNUSE:927|>": 102231, "<|IMAGE_UNUSE:928|>": 102232, "<|IMAGE_UNUSE:929|>": 102233, "<|IMAGE_UNUSE:930|>": 102234, "<|IMAGE_UNUSE:931|>": 102235, "<|IMAGE_UNUSE:932|>": 102236, "<|IMAGE_UNUSE:933|>": 102237, "<|IMAGE_UNUSE:934|>": 102238, "<|IMAGE_UNUSE:935|>": 102239, "<|IMAGE_UNUSE:936|>": 102240, "<|IMAGE_UNUSE:937|>": 102241, "<|IMAGE_UNUSE:938|>": 102242, "<|IMAGE_UNUSE:939|>": 102243, "<|IMAGE_UNUSE:940|>": 102244, "<|IMAGE_UNUSE:941|>": 102245, "<|IMAGE_UNUSE:942|>": 102246, "<|IMAGE_UNUSE:943|>": 102247, "<|IMAGE_UNUSE:944|>": 102248, "<|IMAGE_UNUSE:945|>": 102249, "<|IMAGE_UNUSE:946|>": 102250, "<|IMAGE_UNUSE:947|>": 102251, "<|IMAGE_UNUSE:948|>": 102252, "<|IMAGE_UNUSE:949|>": 102253, "<|IMAGE_UNUSE:950|>": 102254, "<|IMAGE_UNUSE:951|>": 102255, "<|IMAGE_UNUSE:952|>": 102256, "<|IMAGE_UNUSE:953|>": 102257, "<|IMAGE_UNUSE:954|>": 102258, "<|IMAGE_UNUSE:955|>": 102259, "<|IMAGE_UNUSE:956|>": 102260, "<|IMAGE_UNUSE:957|>": 102261, "<|IMAGE_UNUSE:958|>": 102262, "<|IMAGE_UNUSE:959|>": 102263, "<|IMAGE_UNUSE:960|>": 102264, "<|IMAGE_UNUSE:961|>": 102265, "<|IMAGE_UNUSE:962|>": 102266, "<|IMAGE_UNUSE:963|>": 102267, "<|IMAGE_UNUSE:964|>": 102268, "<|IMAGE_UNUSE:965|>": 102269, "<|IMAGE_UNUSE:966|>": 102270, "<|IMAGE_UNUSE:967|>": 102271, "<|IMAGE_UNUSE:968|>": 102272, "<|IMAGE_UNUSE:969|>": 102273, "<|IMAGE_UNUSE:970|>": 102274, "<|IMAGE_UNUSE:971|>": 102275, "<|IMAGE_UNUSE:972|>": 102276, "<|IMAGE_UNUSE:973|>": 102277, "<|IMAGE_UNUSE:974|>": 102278, "<|IMAGE_UNUSE:975|>": 102279, "<|IMAGE_UNUSE:976|>": 102280, "<|IMAGE_UNUSE:977|>": 102281, "<|IMAGE_UNUSE:978|>": 102282, "<|IMAGE_UNUSE:979|>": 102283, "<|IMAGE_UNUSE:980|>": 102284, "<|IMAGE_UNUSE:981|>": 102285, "<|IMAGE_UNUSE:982|>": 102286, "<|IMAGE_UNUSE:983|>": 102287, "<|IMAGE_UNUSE:984|>": 102288, "<|IMAGE_UNUSE:985|>": 102289, "<|IMAGE_UNUSE:986|>": 102290, "<|IMAGE_UNUSE:987|>": 102291, "<|IMAGE_UNUSE:988|>": 102292, "<|IMAGE_UNUSE:989|>": 102293, "<|IMAGE_UNUSE:990|>": 102294, "<|IMAGE_UNUSE:991|>": 102295, "<|IMAGE_UNUSE:992|>": 102296, "<|IMAGE_UNUSE:993|>": 102297, "<|IMAGE_UNUSE:994|>": 102298, "<|IMAGE_UNUSE:995|>": 102299, "<|IMAGE_UNUSE:996|>": 102300, "<|IMAGE_UNUSE:997|>": 102301, "<|IMAGE_UNUSE:998|>": 102302, "<|IMAGE_UNUSE:999|>": 102303, "<|IMAGE_UNUSE:1000|>": 102304, "<|IMAGE_UNUSE:1001|>": 102305, "<|IMAGE_UNUSE:1002|>": 102306, "<|IMAGE_UNUSE:1003|>": 102307, "<|IMAGE_UNUSE:1004|>": 102308, "<|IMAGE_UNUSE:1005|>": 102309, "<|IMAGE_UNUSE:1006|>": 102310, "<|IMAGE_UNUSE:1007|>": 102311, "<|IMAGE_UNUSE:1008|>": 102312, "<|IMAGE_UNUSE:1009|>": 102313, "<|IMAGE_UNUSE:1010|>": 102314, "<|IMAGE_UNUSE:1011|>": 102315, "<|IMAGE_UNUSE:1012|>": 102316, "<|IMAGE_UNUSE:1013|>": 102317, "<|IMAGE_UNUSE:1014|>": 102318, "<|IMAGE_UNUSE:1015|>": 102319, "<|IMAGE_UNUSE:1016|>": 102320, "<|IMAGE_UNUSE:1017|>": 102321, "<|IMAGE_UNUSE:1018|>": 102322, "<|IMAGE_UNUSE:1019|>": 102323, "<|IMAGE_UNUSE:1020|>": 102324, "<|IMAGE_UNUSE:1021|>": 102325, "<|IMAGE_UNUSE:1022|>": 102326, "<|IMAGE_UNUSE:1023|>": 102327, "<|IMAGE_UNUSE:1024|>": 102328, "<|IMAGE_UNUSE:1025|>": 102329, "<|IMAGE_UNUSE:1026|>": 102330, "<|IMAGE_UNUSE:1027|>": 102331, "<|IMAGE_UNUSE:1028|>": 102332, "<|IMAGE_UNUSE:1029|>": 102333, "<|IMAGE_UNUSE:1030|>": 102334, "<|IMAGE_UNUSE:1031|>": 102335, "<|IMAGE_UNUSE:1032|>": 102336, "<|IMAGE_UNUSE:1033|>": 102337, "<|IMAGE_UNUSE:1034|>": 102338, "<|IMAGE_UNUSE:1035|>": 102339, "<|IMAGE_UNUSE:1036|>": 102340, "<|IMAGE_UNUSE:1037|>": 102341, "<|IMAGE_UNUSE:1038|>": 102342, "<|STREAMING_BEGIN|>": 102343, "<|STREAMING_END|>": 102344, "<|STREAMING_TEXT_END|>": 102345, "<|AUDIO_UNUSE:0|>": 102346, "<|AUDIO_UNUSE:1|>": 102347, "<|AUDIO_UNUSE:2|>": 102348, "<|AUDIO_UNUSE:3|>": 102349, "<|AUDIO_UNUSE:4|>": 102350, "<|AUDIO_UNUSE:5|>": 102351, "<|AUDIO_UNUSE:6|>": 102352, "<|AUDIO_UNUSE:7|>": 102353, "<|AUDIO_UNUSE:8|>": 102354, "<|AUDIO_UNUSE:9|>": 102355, "<|AUDIO_UNUSE:10|>": 102356, "<|AUDIO_UNUSE:11|>": 102357, "<|AUDIO_UNUSE:12|>": 102358, "<|AUDIO_UNUSE:13|>": 102359, "<|AUDIO_UNUSE:14|>": 102360, "<|AUDIO_UNUSE:15|>": 102361, "<|AUDIO_UNUSE:16|>": 102362, "<|AUDIO_UNUSE:17|>": 102363, "<|AUDIO_UNUSE:18|>": 102364, "<|AUDIO_UNUSE:19|>": 102365, "<|AUDIO_UNUSE:20|>": 102366, "<|AUDIO_UNUSE:21|>": 102367, "<|AUDIO_UNUSE:22|>": 102368, "<|AUDIO_UNUSE:23|>": 102369, "<|AUDIO_UNUSE:24|>": 102370, "<|AUDIO_UNUSE:25|>": 102371, "<|AUDIO_UNUSE:26|>": 102372, "<|AUDIO_UNUSE:27|>": 102373, "<|AUDIO_UNUSE:28|>": 102374, "<|AUDIO_UNUSE:29|>": 102375, "<|AUDIO_UNUSE:30|>": 102376, "<|AUDIO_UNUSE:31|>": 102377, "<|AUDIO_UNUSE:32|>": 102378, "<|AUDIO_UNUSE:33|>": 102379, "<|AUDIO_UNUSE:34|>": 102380, "<|AUDIO_UNUSE:35|>": 102381, "<|AUDIO_UNUSE:36|>": 102382, "<|AUDIO_UNUSE:37|>": 102383, "<|AUDIO_UNUSE:38|>": 102384, "<|AUDIO_UNUSE:39|>": 102385, "<|AUDIO_UNUSE:40|>": 102386, "<|AUDIO_UNUSE:41|>": 102387, "<|AUDIO_UNUSE:42|>": 102388, "<|AUDIO_UNUSE:43|>": 102389, "<|AUDIO_UNUSE:44|>": 102390, "<|AUDIO_UNUSE:45|>": 102391, "<|AUDIO_UNUSE:46|>": 102392, "<|AUDIO_UNUSE:47|>": 102393, "<|AUDIO_UNUSE:48|>": 102394, "<|AUDIO_UNUSE:49|>": 102395, "<|AUDIO_UNUSE:50|>": 102396, "<|AUDIO_UNUSE:51|>": 102397, "<|AUDIO_UNUSE:52|>": 102398, "<|AUDIO_UNUSE:53|>": 102399, "<|AUDIO_UNUSE:54|>": 102400, "<|AUDIO_UNUSE:55|>": 102401, "<|AUDIO_UNUSE:56|>": 102402, "<|AUDIO_UNUSE:57|>": 102403, "<|AUDIO_UNUSE:58|>": 102404, "<|AUDIO_UNUSE:59|>": 102405, "<|AUDIO_UNUSE:60|>": 102406, "<|AUDIO_UNUSE:61|>": 102407, "<|AUDIO_UNUSE:62|>": 102408, "<|AUDIO_UNUSE:63|>": 102409, "<|AUDIO_UNUSE:64|>": 102410, "<|AUDIO_UNUSE:65|>": 102411, "<|AUDIO_UNUSE:66|>": 102412, "<|AUDIO_UNUSE:67|>": 102413, "<|AUDIO_UNUSE:68|>": 102414, "<|AUDIO_UNUSE:69|>": 102415, "<|AUDIO_UNUSE:70|>": 102416, "<|AUDIO_UNUSE:71|>": 102417, "<|AUDIO_UNUSE:72|>": 102418, "<|AUDIO_UNUSE:73|>": 102419, "<|AUDIO_UNUSE:74|>": 102420, "<|AUDIO_UNUSE:75|>": 102421, "<|AUDIO_UNUSE:76|>": 102422, "<|AUDIO_UNUSE:77|>": 102423, "<|AUDIO_UNUSE:78|>": 102424, "<|AUDIO_UNUSE:79|>": 102425, "<|AUDIO_UNUSE:80|>": 102426, "<|AUDIO_UNUSE:81|>": 102427, "<|AUDIO_UNUSE:82|>": 102428, "<|AUDIO_UNUSE:83|>": 102429, "<|AUDIO_UNUSE:84|>": 102430, "<|AUDIO_UNUSE:85|>": 102431, "<|AUDIO_UNUSE:86|>": 102432, "<|AUDIO_UNUSE:87|>": 102433, "<|AUDIO_UNUSE:88|>": 102434, "<|AUDIO_UNUSE:89|>": 102435, "<|AUDIO_UNUSE:90|>": 102436, "<|AUDIO_UNUSE:91|>": 102437, "<|AUDIO_UNUSE:92|>": 102438, "<|AUDIO_UNUSE:93|>": 102439, "<|AUDIO_UNUSE:94|>": 102440, "<|AUDIO_UNUSE:95|>": 102441, "<|AUDIO_UNUSE:96|>": 102442, "<|AUDIO_UNUSE:97|>": 102443, "<|AUDIO_UNUSE:98|>": 102444, "<|AUDIO_UNUSE:99|>": 102445, "<|AUDIO_UNUSE:100|>": 102446, "<|AUDIO_UNUSE:101|>": 102447, "<|AUDIO_UNUSE:102|>": 102448, "<|AUDIO_UNUSE:103|>": 102449, "<|AUDIO_UNUSE:104|>": 102450, "<|AUDIO_UNUSE:105|>": 102451, "<|AUDIO_UNUSE:106|>": 102452, "<|AUDIO_UNUSE:107|>": 102453, "<|AUDIO_UNUSE:108|>": 102454, "<|AUDIO_UNUSE:109|>": 102455, "<|AUDIO_UNUSE:110|>": 102456, "<|AUDIO_UNUSE:111|>": 102457, "<|AUDIO_UNUSE:112|>": 102458, "<|AUDIO_UNUSE:113|>": 102459, "<|AUDIO_UNUSE:114|>": 102460, "<|AUDIO_UNUSE:115|>": 102461, "<|AUDIO_UNUSE:116|>": 102462, "<|AUDIO_UNUSE:117|>": 102463, "<|AUDIO_UNUSE:118|>": 102464, "<|AUDIO_UNUSE:119|>": 102465, "<|AUDIO_UNUSE:120|>": 102466, "<|AUDIO_UNUSE:121|>": 102467, "<|AUDIO_UNUSE:122|>": 102468, "<|AUDIO_UNUSE:123|>": 102469, "<|AUDIO_UNUSE:124|>": 102470, "<|AUDIO_UNUSE:125|>": 102471, "<|AUDIO_UNUSE:126|>": 102472, "<|AUDIO_UNUSE:127|>": 102473, "<|AUDIO_UNUSE:128|>": 102474, "<|AUDIO_UNUSE:129|>": 102475, "<|AUDIO_UNUSE:130|>": 102476, "<|AUDIO_UNUSE:131|>": 102477, "<|AUDIO_UNUSE:132|>": 102478, "<|AUDIO_UNUSE:133|>": 102479, "<|AUDIO_UNUSE:134|>": 102480, "<|AUDIO_UNUSE:135|>": 102481, "<|AUDIO_UNUSE:136|>": 102482, "<|AUDIO_UNUSE:137|>": 102483, "<|AUDIO_UNUSE:138|>": 102484, "<|AUDIO_UNUSE:139|>": 102485, "<|AUDIO_UNUSE:140|>": 102486, "<|AUDIO_UNUSE:141|>": 102487, "<|AUDIO_UNUSE:142|>": 102488, "<|AUDIO_UNUSE:143|>": 102489, "<|AUDIO_UNUSE:144|>": 102490, "<|AUDIO_UNUSE:145|>": 102491, "<|AUDIO_UNUSE:146|>": 102492, "<|AUDIO_UNUSE:147|>": 102493, "<|AUDIO_UNUSE:148|>": 102494, "<|AUDIO_UNUSE:149|>": 102495, "<|AUDIO_UNUSE:150|>": 102496, "<|AUDIO_UNUSE:151|>": 102497, "<|AUDIO_UNUSE:152|>": 102498, "<|AUDIO_UNUSE:153|>": 102499, "<|AUDIO_UNUSE:154|>": 102500, "<|AUDIO_UNUSE:155|>": 102501, "<|AUDIO_UNUSE:156|>": 102502, "<|AUDIO_UNUSE:157|>": 102503, "<|AUDIO_UNUSE:158|>": 102504, "<|AUDIO_UNUSE:159|>": 102505, "<|AUDIO_UNUSE:160|>": 102506, "<|AUDIO_UNUSE:161|>": 102507, "<|AUDIO_UNUSE:162|>": 102508, "<|AUDIO_UNUSE:163|>": 102509, "<|AUDIO_UNUSE:164|>": 102510, "<|AUDIO_UNUSE:165|>": 102511, "<|AUDIO_UNUSE:166|>": 102512, "<|AUDIO_UNUSE:167|>": 102513, "<|AUDIO_UNUSE:168|>": 102514, "<|AUDIO_UNUSE:169|>": 102515, "<|AUDIO_UNUSE:170|>": 102516, "<|AUDIO_UNUSE:171|>": 102517, "<|AUDIO_UNUSE:172|>": 102518, "<|AUDIO_UNUSE:173|>": 102519, "<|AUDIO_UNUSE:174|>": 102520, "<|AUDIO_UNUSE:175|>": 102521, "<|AUDIO_UNUSE:176|>": 102522, "<|AUDIO_UNUSE:177|>": 102523, "<|AUDIO_UNUSE:178|>": 102524, "<|AUDIO_UNUSE:179|>": 102525, "<|AUDIO_UNUSE:180|>": 102526, "<|AUDIO_UNUSE:181|>": 102527, "<|AUDIO_UNUSE:182|>": 102528, "<|AUDIO_UNUSE:183|>": 102529, "<|AUDIO_UNUSE:184|>": 102530, "<|AUDIO_UNUSE:185|>": 102531, "<|AUDIO_UNUSE:186|>": 102532, "<|AUDIO_UNUSE:187|>": 102533, "<|AUDIO_UNUSE:188|>": 102534, "<|AUDIO_UNUSE:189|>": 102535, "<|AUDIO_UNUSE:190|>": 102536, "<|AUDIO_UNUSE:191|>": 102537, "<|AUDIO_UNUSE:192|>": 102538, "<|AUDIO_UNUSE:193|>": 102539, "<|AUDIO_UNUSE:194|>": 102540, "<|AUDIO_UNUSE:195|>": 102541, "<|AUDIO_UNUSE:196|>": 102542, "<|AUDIO_UNUSE:197|>": 102543, "<|AUDIO_UNUSE:198|>": 102544, "<|AUDIO_UNUSE:199|>": 102545, "<|AUDIO_UNUSE:200|>": 102546, "<|AUDIO_UNUSE:201|>": 102547, "<|AUDIO_UNUSE:202|>": 102548, "<|AUDIO_UNUSE:203|>": 102549, "<|AUDIO_UNUSE:204|>": 102550, "<|AUDIO_UNUSE:205|>": 102551, "<|AUDIO_UNUSE:206|>": 102552, "<|AUDIO_UNUSE:207|>": 102553, "<|AUDIO_UNUSE:208|>": 102554, "<|AUDIO_UNUSE:209|>": 102555, "<|AUDIO_UNUSE:210|>": 102556, "<|AUDIO_UNUSE:211|>": 102557, "<|AUDIO_UNUSE:212|>": 102558, "<|AUDIO_UNUSE:213|>": 102559, "<|AUDIO_UNUSE:214|>": 102560, "<|AUDIO_UNUSE:215|>": 102561, "<|AUDIO_UNUSE:216|>": 102562, "<|AUDIO_UNUSE:217|>": 102563, "<|AUDIO_UNUSE:218|>": 102564, "<|AUDIO_UNUSE:219|>": 102565, "<|AUDIO_UNUSE:220|>": 102566, "<|AUDIO_UNUSE:221|>": 102567, "<|AUDIO_UNUSE:222|>": 102568, "<|AUDIO_UNUSE:223|>": 102569, "<|AUDIO_UNUSE:224|>": 102570, "<|AUDIO_UNUSE:225|>": 102571, "<|AUDIO_UNUSE:226|>": 102572, "<|AUDIO_UNUSE:227|>": 102573, "<|AUDIO_UNUSE:228|>": 102574, "<|AUDIO_UNUSE:229|>": 102575, "<|AUDIO_UNUSE:230|>": 102576, "<|AUDIO_UNUSE:231|>": 102577, "<|AUDIO_UNUSE:232|>": 102578, "<|AUDIO_UNUSE:233|>": 102579, "<|AUDIO_UNUSE:234|>": 102580, "<|AUDIO_UNUSE:235|>": 102581, "<|AUDIO_UNUSE:236|>": 102582, "<|AUDIO_UNUSE:237|>": 102583, "<|AUDIO_UNUSE:238|>": 102584, "<|AUDIO_UNUSE:239|>": 102585, "<|AUDIO_UNUSE:240|>": 102586, "<|AUDIO_UNUSE:241|>": 102587, "<|AUDIO_UNUSE:242|>": 102588, "<|AUDIO_UNUSE:243|>": 102589, "<|AUDIO_UNUSE:244|>": 102590, "<|AUDIO_UNUSE:245|>": 102591, "<|AUDIO_UNUSE:246|>": 102592, "<|AUDIO_UNUSE:247|>": 102593, "<|AUDIO_UNUSE:248|>": 102594, "<|AUDIO_UNUSE:249|>": 102595, "<|AUDIO_UNUSE:250|>": 102596, "<|AUDIO_UNUSE:251|>": 102597, "<|AUDIO_UNUSE:252|>": 102598, "<|AUDIO_UNUSE:253|>": 102599, "<|AUDIO_UNUSE:254|>": 102600, "<|AUDIO_UNUSE:255|>": 102601, "<|AUDIO_UNUSE:256|>": 102602, "<|AUDIO_UNUSE:257|>": 102603, "<|AUDIO_UNUSE:258|>": 102604, "<|AUDIO_UNUSE:259|>": 102605, "<|AUDIO_UNUSE:260|>": 102606, "<|AUDIO_UNUSE:261|>": 102607, "<|AUDIO_UNUSE:262|>": 102608, "<|AUDIO_UNUSE:263|>": 102609, "<|AUDIO_UNUSE:264|>": 102610, "<|AUDIO_UNUSE:265|>": 102611, "<|AUDIO_UNUSE:266|>": 102612, "<|AUDIO_UNUSE:267|>": 102613, "<|AUDIO_UNUSE:268|>": 102614, "<|AUDIO_UNUSE:269|>": 102615, "<|AUDIO_UNUSE:270|>": 102616, "<|AUDIO_UNUSE:271|>": 102617, "<|AUDIO_UNUSE:272|>": 102618, "<|AUDIO_UNUSE:273|>": 102619, "<|AUDIO_UNUSE:274|>": 102620, "<|AUDIO_UNUSE:275|>": 102621, "<|AUDIO_UNUSE:276|>": 102622, "<|AUDIO_UNUSE:277|>": 102623, "<|AUDIO_UNUSE:278|>": 102624, "<|AUDIO_UNUSE:279|>": 102625, "<|AUDIO_UNUSE:280|>": 102626, "<|AUDIO_UNUSE:281|>": 102627, "<|AUDIO_UNUSE:282|>": 102628, "<|AUDIO_UNUSE:283|>": 102629, "<|AUDIO_UNUSE:284|>": 102630, "<|AUDIO_UNUSE:285|>": 102631, "<|AUDIO_UNUSE:286|>": 102632, "<|AUDIO_UNUSE:287|>": 102633, "<|AUDIO_UNUSE:288|>": 102634, "<|AUDIO_UNUSE:289|>": 102635, "<|AUDIO_UNUSE:290|>": 102636, "<|AUDIO_UNUSE:291|>": 102637, "<|AUDIO_UNUSE:292|>": 102638, "<|AUDIO_UNUSE:293|>": 102639, "<|AUDIO_UNUSE:294|>": 102640, "<|AUDIO_UNUSE:295|>": 102641, "<|AUDIO_UNUSE:296|>": 102642, "<|AUDIO_UNUSE:297|>": 102643, "<|AUDIO_UNUSE:298|>": 102644, "<|AUDIO_UNUSE:299|>": 102645, "<|AUDIO_UNUSE:300|>": 102646, "<|AUDIO_UNUSE:301|>": 102647, "<|AUDIO_UNUSE:302|>": 102648, "<|AUDIO_UNUSE:303|>": 102649, "<|AUDIO_UNUSE:304|>": 102650, "<|AUDIO_UNUSE:305|>": 102651, "<|AUDIO_UNUSE:306|>": 102652, "<|AUDIO_UNUSE:307|>": 102653, "<|AUDIO_UNUSE:308|>": 102654, "<|AUDIO_UNUSE:309|>": 102655, "<|AUDIO_UNUSE:310|>": 102656, "<|AUDIO_UNUSE:311|>": 102657, "<|AUDIO_UNUSE:312|>": 102658, "<|AUDIO_UNUSE:313|>": 102659, "<|AUDIO_UNUSE:314|>": 102660, "<|AUDIO_UNUSE:315|>": 102661, "<|AUDIO_UNUSE:316|>": 102662, "<|AUDIO_UNUSE:317|>": 102663, "<|AUDIO_UNUSE:318|>": 102664, "<|AUDIO_UNUSE:319|>": 102665, "<|AUDIO_UNUSE:320|>": 102666, "<|AUDIO_UNUSE:321|>": 102667, "<|AUDIO_UNUSE:322|>": 102668, "<|AUDIO_UNUSE:323|>": 102669, "<|AUDIO_UNUSE:324|>": 102670, "<|AUDIO_UNUSE:325|>": 102671, "<|AUDIO_UNUSE:326|>": 102672, "<|AUDIO_UNUSE:327|>": 102673, "<|AUDIO_UNUSE:328|>": 102674, "<|AUDIO_UNUSE:329|>": 102675, "<|AUDIO_UNUSE:330|>": 102676, "<|AUDIO_UNUSE:331|>": 102677, "<|AUDIO_UNUSE:332|>": 102678, "<|AUDIO_UNUSE:333|>": 102679, "<|AUDIO_UNUSE:334|>": 102680, "<|AUDIO_UNUSE:335|>": 102681, "<|AUDIO_UNUSE:336|>": 102682, "<|AUDIO_UNUSE:337|>": 102683, "<|AUDIO_UNUSE:338|>": 102684, "<|AUDIO_UNUSE:339|>": 102685, "<|AUDIO_UNUSE:340|>": 102686, "<|AUDIO_UNUSE:341|>": 102687, "<|AUDIO_UNUSE:342|>": 102688, "<|AUDIO_UNUSE:343|>": 102689, "<|AUDIO_UNUSE:344|>": 102690, "<|AUDIO_UNUSE:345|>": 102691, "<|AUDIO_UNUSE:346|>": 102692, "<|AUDIO_UNUSE:347|>": 102693, "<|AUDIO_UNUSE:348|>": 102694, "<|AUDIO_UNUSE:349|>": 102695, "<|AUDIO_UNUSE:350|>": 102696, "<|AUDIO_UNUSE:351|>": 102697, "<|AUDIO_UNUSE:352|>": 102698, "<|AUDIO_UNUSE:353|>": 102699, "<|AUDIO_UNUSE:354|>": 102700, "<|AUDIO_UNUSE:355|>": 102701, "<|AUDIO_UNUSE:356|>": 102702, "<|AUDIO_UNUSE:357|>": 102703, "<|AUDIO_UNUSE:358|>": 102704, "<|AUDIO_UNUSE:359|>": 102705, "<|AUDIO_UNUSE:360|>": 102706, "<|AUDIO_UNUSE:361|>": 102707, "<|AUDIO_UNUSE:362|>": 102708, "<|AUDIO_UNUSE:363|>": 102709, "<|AUDIO_UNUSE:364|>": 102710, "<|AUDIO_UNUSE:365|>": 102711, "<|AUDIO_UNUSE:366|>": 102712, "<|AUDIO_UNUSE:367|>": 102713, "<|AUDIO_UNUSE:368|>": 102714, "<|AUDIO_UNUSE:369|>": 102715, "<|AUDIO_UNUSE:370|>": 102716, "<|AUDIO_UNUSE:371|>": 102717, "<|AUDIO_UNUSE:372|>": 102718, "<|AUDIO_UNUSE:373|>": 102719, "<|AUDIO_UNUSE:374|>": 102720, "<|AUDIO_UNUSE:375|>": 102721, "<|AUDIO_UNUSE:376|>": 102722, "<|AUDIO_UNUSE:377|>": 102723, "<|AUDIO_UNUSE:378|>": 102724, "<|AUDIO_UNUSE:379|>": 102725, "<|AUDIO_UNUSE:380|>": 102726, "<|AUDIO_UNUSE:381|>": 102727, "<|AUDIO_UNUSE:382|>": 102728, "<|AUDIO_UNUSE:383|>": 102729, "<|AUDIO_UNUSE:384|>": 102730, "<|AUDIO_UNUSE:385|>": 102731, "<|AUDIO_UNUSE:386|>": 102732, "<|AUDIO_UNUSE:387|>": 102733, "<|AUDIO_UNUSE:388|>": 102734, "<|AUDIO_UNUSE:389|>": 102735, "<|AUDIO_UNUSE:390|>": 102736, "<|AUDIO_UNUSE:391|>": 102737, "<|AUDIO_UNUSE:392|>": 102738, "<|AUDIO_UNUSE:393|>": 102739, "<|AUDIO_UNUSE:394|>": 102740, "<|AUDIO_UNUSE:395|>": 102741, "<|AUDIO_UNUSE:396|>": 102742, "<|AUDIO_UNUSE:397|>": 102743, "<|AUDIO_UNUSE:398|>": 102744, "<|AUDIO_UNUSE:399|>": 102745, "<|AUDIO_UNUSE:400|>": 102746, "<|AUDIO_UNUSE:401|>": 102747, "<|AUDIO_UNUSE:402|>": 102748, "<|AUDIO_UNUSE:403|>": 102749, "<|AUDIO_UNUSE:404|>": 102750, "<|AUDIO_UNUSE:405|>": 102751, "<|AUDIO_UNUSE:406|>": 102752, "<|AUDIO_UNUSE:407|>": 102753, "<|AUDIO_UNUSE:408|>": 102754, "<|AUDIO_UNUSE:409|>": 102755, "<|AUDIO_UNUSE:410|>": 102756, "<|AUDIO_UNUSE:411|>": 102757, "<|AUDIO_UNUSE:412|>": 102758, "<|AUDIO_UNUSE:413|>": 102759, "<|AUDIO_UNUSE:414|>": 102760, "<|AUDIO_UNUSE:415|>": 102761, "<|AUDIO_UNUSE:416|>": 102762, "<|AUDIO_UNUSE:417|>": 102763, "<|AUDIO_UNUSE:418|>": 102764, "<|AUDIO_UNUSE:419|>": 102765, "<|AUDIO_UNUSE:420|>": 102766, "<|AUDIO_UNUSE:421|>": 102767, "<|AUDIO_UNUSE:422|>": 102768, "<|AUDIO_UNUSE:423|>": 102769, "<|AUDIO_UNUSE:424|>": 102770, "<|AUDIO_UNUSE:425|>": 102771, "<|AUDIO_UNUSE:426|>": 102772, "<|AUDIO_UNUSE:427|>": 102773, "<|AUDIO_UNUSE:428|>": 102774, "<|AUDIO_UNUSE:429|>": 102775, "<|AUDIO_UNUSE:430|>": 102776, "<|AUDIO_UNUSE:431|>": 102777, "<|AUDIO_UNUSE:432|>": 102778, "<|AUDIO_UNUSE:433|>": 102779, "<|AUDIO_UNUSE:434|>": 102780, "<|AUDIO_UNUSE:435|>": 102781, "<|AUDIO_UNUSE:436|>": 102782, "<|AUDIO_UNUSE:437|>": 102783, "<|AUDIO_UNUSE:438|>": 102784, "<|AUDIO_UNUSE:439|>": 102785, "<|AUDIO_UNUSE:440|>": 102786, "<|AUDIO_UNUSE:441|>": 102787, "<|AUDIO_UNUSE:442|>": 102788, "<|AUDIO_UNUSE:443|>": 102789, "<|AUDIO_UNUSE:444|>": 102790, "<|AUDIO_UNUSE:445|>": 102791, "<|AUDIO_UNUSE:446|>": 102792, "<|AUDIO_UNUSE:447|>": 102793, "<|AUDIO_UNUSE:448|>": 102794, "<|AUDIO_UNUSE:449|>": 102795, "<|AUDIO_UNUSE:450|>": 102796, "<|AUDIO_UNUSE:451|>": 102797, "<|AUDIO_UNUSE:452|>": 102798, "<|AUDIO_UNUSE:453|>": 102799, "<|AUDIO_UNUSE:454|>": 102800, "<|AUDIO_UNUSE:455|>": 102801, "<|AUDIO_UNUSE:456|>": 102802, "<|AUDIO_UNUSE:457|>": 102803, "<|AUDIO_UNUSE:458|>": 102804, "<|AUDIO_UNUSE:459|>": 102805, "<|AUDIO_UNUSE:460|>": 102806, "<|AUDIO_UNUSE:461|>": 102807, "<|AUDIO_UNUSE:462|>": 102808, "<|AUDIO_UNUSE:463|>": 102809, "<|AUDIO_UNUSE:464|>": 102810, "<|AUDIO_UNUSE:465|>": 102811, "<|AUDIO_UNUSE:466|>": 102812, "<|AUDIO_UNUSE:467|>": 102813, "<|AUDIO_UNUSE:468|>": 102814, "<|AUDIO_UNUSE:469|>": 102815, "<|AUDIO_UNUSE:470|>": 102816, "<|AUDIO_UNUSE:471|>": 102817, "<|AUDIO_UNUSE:472|>": 102818, "<|AUDIO_UNUSE:473|>": 102819, "<|AUDIO_UNUSE:474|>": 102820, "<|AUDIO_UNUSE:475|>": 102821, "<|AUDIO_UNUSE:476|>": 102822, "<|AUDIO_UNUSE:477|>": 102823, "<|AUDIO_UNUSE:478|>": 102824, "<|AUDIO_UNUSE:479|>": 102825, "<|AUDIO_UNUSE:480|>": 102826, "<|AUDIO_UNUSE:481|>": 102827, "<|AUDIO_UNUSE:482|>": 102828, "<|AUDIO_UNUSE:483|>": 102829, "<|AUDIO_UNUSE:484|>": 102830, "<|AUDIO_UNUSE:485|>": 102831, "<|AUDIO_UNUSE:486|>": 102832, "<|AUDIO_UNUSE:487|>": 102833, "<|AUDIO_UNUSE:488|>": 102834, "<|AUDIO_UNUSE:489|>": 102835, "<|AUDIO_UNUSE:490|>": 102836, "<|AUDIO_UNUSE:491|>": 102837, "<|AUDIO_UNUSE:492|>": 102838, "<|AUDIO_UNUSE:493|>": 102839, "<|AUDIO_UNUSE:494|>": 102840, "<|AUDIO_UNUSE:495|>": 102841, "<|AUDIO_UNUSE:496|>": 102842, "<|AUDIO_UNUSE:497|>": 102843, "<|AUDIO_UNUSE:498|>": 102844, "<|AUDIO_UNUSE:499|>": 102845, "<|AUDIO_UNUSE:500|>": 102846, "<|AUDIO_UNUSE:501|>": 102847, "<|AUDIO_UNUSE:502|>": 102848, "<|AUDIO_UNUSE:503|>": 102849, "<|AUDIO_UNUSE:504|>": 102850, "<|AUDIO_UNUSE:505|>": 102851, "<|AUDIO_UNUSE:506|>": 102852, "<|AUDIO_UNUSE:507|>": 102853, "<|AUDIO_UNUSE:508|>": 102854, "<|AUDIO_UNUSE:509|>": 102855, "<|AUDIO_UNUSE:510|>": 102856, "<|AUDIO_UNUSE:511|>": 102857, "<|AUDIO_UNUSE:512|>": 102858, "<|AUDIO_UNUSE:513|>": 102859, "<|AUDIO_UNUSE:514|>": 102860, "<|AUDIO_UNUSE:515|>": 102861, "<|AUDIO_UNUSE:516|>": 102862, "<|AUDIO_UNUSE:517|>": 102863, "<|AUDIO_UNUSE:518|>": 102864, "<|AUDIO_UNUSE:519|>": 102865, "<|AUDIO_UNUSE:520|>": 102866, "<|AUDIO_UNUSE:521|>": 102867, "<|AUDIO_UNUSE:522|>": 102868, "<|AUDIO_UNUSE:523|>": 102869, "<|AUDIO_UNUSE:524|>": 102870, "<|AUDIO_UNUSE:525|>": 102871, "<|AUDIO_UNUSE:526|>": 102872, "<|AUDIO_UNUSE:527|>": 102873, "<|AUDIO_UNUSE:528|>": 102874, "<|AUDIO_UNUSE:529|>": 102875, "<|AUDIO_UNUSE:530|>": 102876, "<|AUDIO_UNUSE:531|>": 102877, "<|AUDIO_UNUSE:532|>": 102878, "<|AUDIO_UNUSE:533|>": 102879, "<|AUDIO_UNUSE:534|>": 102880, "<|AUDIO_UNUSE:535|>": 102881, "<|AUDIO_UNUSE:536|>": 102882, "<|AUDIO_UNUSE:537|>": 102883, "<|AUDIO_UNUSE:538|>": 102884, "<|AUDIO_UNUSE:539|>": 102885, "<|AUDIO_UNUSE:540|>": 102886, "<|AUDIO_UNUSE:541|>": 102887, "<|AUDIO_UNUSE:542|>": 102888, "<|AUDIO_UNUSE:543|>": 102889, "<|AUDIO_UNUSE:544|>": 102890, "<|AUDIO_UNUSE:545|>": 102891, "<|AUDIO_UNUSE:546|>": 102892, "<|AUDIO_UNUSE:547|>": 102893, "<|AUDIO_UNUSE:548|>": 102894, "<|AUDIO_UNUSE:549|>": 102895, "<|AUDIO_UNUSE:550|>": 102896, "<|AUDIO_UNUSE:551|>": 102897, "<|AUDIO_UNUSE:552|>": 102898, "<|AUDIO_UNUSE:553|>": 102899, "<|AUDIO_UNUSE:554|>": 102900, "<|AUDIO_UNUSE:555|>": 102901, "<|AUDIO_UNUSE:556|>": 102902, "<|AUDIO_UNUSE:557|>": 102903, "<|AUDIO_UNUSE:558|>": 102904, "<|AUDIO_UNUSE:559|>": 102905, "<|AUDIO_UNUSE:560|>": 102906, "<|AUDIO_UNUSE:561|>": 102907, "<|AUDIO_UNUSE:562|>": 102908, "<|AUDIO_UNUSE:563|>": 102909, "<|AUDIO_UNUSE:564|>": 102910, "<|AUDIO_UNUSE:565|>": 102911, "<|AUDIO_UNUSE:566|>": 102912, "<|AUDIO_UNUSE:567|>": 102913, "<|AUDIO_UNUSE:568|>": 102914, "<|AUDIO_UNUSE:569|>": 102915, "<|AUDIO_UNUSE:570|>": 102916, "<|AUDIO_UNUSE:571|>": 102917, "<|AUDIO_UNUSE:572|>": 102918, "<|AUDIO_UNUSE:573|>": 102919, "<|AUDIO_UNUSE:574|>": 102920, "<|AUDIO_UNUSE:575|>": 102921, "<|AUDIO_UNUSE:576|>": 102922, "<|AUDIO_UNUSE:577|>": 102923, "<|AUDIO_UNUSE:578|>": 102924, "<|AUDIO_UNUSE:579|>": 102925, "<|AUDIO_UNUSE:580|>": 102926, "<|AUDIO_UNUSE:581|>": 102927, "<|AUDIO_UNUSE:582|>": 102928, "<|AUDIO_UNUSE:583|>": 102929, "<|AUDIO_UNUSE:584|>": 102930, "<|AUDIO_UNUSE:585|>": 102931, "<|AUDIO_UNUSE:586|>": 102932, "<|AUDIO_UNUSE:587|>": 102933, "<|AUDIO_UNUSE:588|>": 102934, "<|AUDIO_UNUSE:589|>": 102935, "<|AUDIO_UNUSE:590|>": 102936, "<|AUDIO_UNUSE:591|>": 102937, "<|AUDIO_UNUSE:592|>": 102938, "<|AUDIO_UNUSE:593|>": 102939, "<|AUDIO_UNUSE:594|>": 102940, "<|AUDIO_UNUSE:595|>": 102941, "<|AUDIO_UNUSE:596|>": 102942, "<|AUDIO_UNUSE:597|>": 102943, "<|AUDIO_UNUSE:598|>": 102944, "<|AUDIO_UNUSE:599|>": 102945, "<|AUDIO_UNUSE:600|>": 102946, "<|AUDIO_UNUSE:601|>": 102947, "<|AUDIO_UNUSE:602|>": 102948, "<|AUDIO_UNUSE:603|>": 102949, "<|AUDIO_UNUSE:604|>": 102950, "<|AUDIO_UNUSE:605|>": 102951, "<|AUDIO_UNUSE:606|>": 102952, "<|AUDIO_UNUSE:607|>": 102953, "<|AUDIO_UNUSE:608|>": 102954, "<|AUDIO_UNUSE:609|>": 102955, "<|AUDIO_UNUSE:610|>": 102956, "<|AUDIO_UNUSE:611|>": 102957, "<|AUDIO_UNUSE:612|>": 102958, "<|AUDIO_UNUSE:613|>": 102959, "<|AUDIO_UNUSE:614|>": 102960, "<|AUDIO_UNUSE:615|>": 102961, "<|AUDIO_UNUSE:616|>": 102962, "<|AUDIO_UNUSE:617|>": 102963, "<|AUDIO_UNUSE:618|>": 102964, "<|AUDIO_UNUSE:619|>": 102965, "<|AUDIO_UNUSE:620|>": 102966, "<|AUDIO_UNUSE:621|>": 102967, "<|AUDIO_UNUSE:622|>": 102968, "<|AUDIO_UNUSE:623|>": 102969, "<|AUDIO_UNUSE:624|>": 102970, "<|AUDIO_UNUSE:625|>": 102971, "<|AUDIO_UNUSE:626|>": 102972, "<|AUDIO_UNUSE:627|>": 102973, "<|AUDIO_UNUSE:628|>": 102974, "<|AUDIO_UNUSE:629|>": 102975, "<|AUDIO_UNUSE:630|>": 102976, "<|AUDIO_UNUSE:631|>": 102977, "<|AUDIO_UNUSE:632|>": 102978, "<|AUDIO_UNUSE:633|>": 102979, "<|AUDIO_UNUSE:634|>": 102980, "<|AUDIO_UNUSE:635|>": 102981, "<|AUDIO_UNUSE:636|>": 102982, "<|AUDIO_UNUSE:637|>": 102983, "<|AUDIO_UNUSE:638|>": 102984, "<|AUDIO_UNUSE:639|>": 102985, "<|AUDIO_UNUSE:640|>": 102986, "<|AUDIO_UNUSE:641|>": 102987, "<|AUDIO_UNUSE:642|>": 102988, "<|AUDIO_UNUSE:643|>": 102989, "<|AUDIO_UNUSE:644|>": 102990, "<|AUDIO_UNUSE:645|>": 102991, "<|AUDIO_UNUSE:646|>": 102992, "<|AUDIO_UNUSE:647|>": 102993, "<|AUDIO_UNUSE:648|>": 102994, "<|AUDIO_UNUSE:649|>": 102995, "<|AUDIO_UNUSE:650|>": 102996, "<|AUDIO_UNUSE:651|>": 102997, "<|AUDIO_UNUSE:652|>": 102998, "<|AUDIO_UNUSE:653|>": 102999, "<|AUDIO_UNUSE:654|>": 103000, "<|AUDIO_UNUSE:655|>": 103001, "<|AUDIO_UNUSE:656|>": 103002, "<|AUDIO_UNUSE:657|>": 103003, "<|AUDIO_UNUSE:658|>": 103004, "<|AUDIO_UNUSE:659|>": 103005, "<|AUDIO_UNUSE:660|>": 103006, "<|AUDIO_UNUSE:661|>": 103007, "<|AUDIO_UNUSE:662|>": 103008, "<|AUDIO_UNUSE:663|>": 103009, "<|AUDIO_UNUSE:664|>": 103010, "<|AUDIO_UNUSE:665|>": 103011, "<|AUDIO_UNUSE:666|>": 103012, "<|AUDIO_UNUSE:667|>": 103013, "<|AUDIO_UNUSE:668|>": 103014, "<|AUDIO_UNUSE:669|>": 103015, "<|AUDIO_UNUSE:670|>": 103016, "<|AUDIO_UNUSE:671|>": 103017, "<|AUDIO_UNUSE:672|>": 103018, "<|AUDIO_UNUSE:673|>": 103019, "<|AUDIO_UNUSE:674|>": 103020, "<|AUDIO_UNUSE:675|>": 103021, "<|AUDIO_UNUSE:676|>": 103022, "<|AUDIO_UNUSE:677|>": 103023, "<|AUDIO_UNUSE:678|>": 103024, "<|AUDIO_UNUSE:679|>": 103025, "<|AUDIO_UNUSE:680|>": 103026, "<|AUDIO_UNUSE:681|>": 103027, "<|AUDIO_UNUSE:682|>": 103028, "<|AUDIO_UNUSE:683|>": 103029, "<|AUDIO_UNUSE:684|>": 103030, "<|AUDIO_UNUSE:685|>": 103031, "<|AUDIO_UNUSE:686|>": 103032, "<|AUDIO_UNUSE:687|>": 103033, "<|AUDIO_UNUSE:688|>": 103034, "<|AUDIO_UNUSE:689|>": 103035, "<|AUDIO_UNUSE:690|>": 103036, "<|AUDIO_UNUSE:691|>": 103037, "<|AUDIO_UNUSE:692|>": 103038, "<|AUDIO_UNUSE:693|>": 103039, "<|AUDIO_UNUSE:694|>": 103040, "<|AUDIO_UNUSE:695|>": 103041, "<|AUDIO_UNUSE:696|>": 103042, "<|AUDIO_UNUSE:697|>": 103043, "<|AUDIO_UNUSE:698|>": 103044, "<|AUDIO_UNUSE:699|>": 103045, "<|AUDIO_UNUSE:700|>": 103046, "<|AUDIO_UNUSE:701|>": 103047, "<|AUDIO_UNUSE:702|>": 103048, "<|AUDIO_UNUSE:703|>": 103049, "<|AUDIO_UNUSE:704|>": 103050, "<|AUDIO_UNUSE:705|>": 103051, "<|AUDIO_UNUSE:706|>": 103052, "<|AUDIO_UNUSE:707|>": 103053, "<|AUDIO_UNUSE:708|>": 103054, "<|AUDIO_UNUSE:709|>": 103055, "<|AUDIO_UNUSE:710|>": 103056, "<|AUDIO_UNUSE:711|>": 103057, "<|AUDIO_UNUSE:712|>": 103058, "<|AUDIO_UNUSE:713|>": 103059, "<|AUDIO_UNUSE:714|>": 103060, "<|AUDIO_UNUSE:715|>": 103061, "<|AUDIO_UNUSE:716|>": 103062, "<|AUDIO_UNUSE:717|>": 103063, "<|AUDIO_UNUSE:718|>": 103064, "<|AUDIO_UNUSE:719|>": 103065, "<|AUDIO_UNUSE:720|>": 103066, "<|AUDIO_UNUSE:721|>": 103067, "<|AUDIO_UNUSE:722|>": 103068, "<|AUDIO_UNUSE:723|>": 103069, "<|AUDIO_UNUSE:724|>": 103070, "<|AUDIO_UNUSE:725|>": 103071, "<|AUDIO_UNUSE:726|>": 103072, "<|AUDIO_UNUSE:727|>": 103073, "<|AUDIO_UNUSE:728|>": 103074, "<|AUDIO_UNUSE:729|>": 103075, "<|AUDIO_UNUSE:730|>": 103076, "<|AUDIO_UNUSE:731|>": 103077, "<|AUDIO_UNUSE:732|>": 103078, "<|AUDIO_UNUSE:733|>": 103079, "<|AUDIO_UNUSE:734|>": 103080, "<|AUDIO_UNUSE:735|>": 103081, "<|AUDIO_UNUSE:736|>": 103082, "<|AUDIO_UNUSE:737|>": 103083, "<|AUDIO_UNUSE:738|>": 103084, "<|AUDIO_UNUSE:739|>": 103085, "<|AUDIO_UNUSE:740|>": 103086, "<|AUDIO_UNUSE:741|>": 103087, "<|AUDIO_UNUSE:742|>": 103088, "<|AUDIO_UNUSE:743|>": 103089, "<|AUDIO_UNUSE:744|>": 103090, "<|AUDIO_UNUSE:745|>": 103091, "<|AUDIO_UNUSE:746|>": 103092, "<|AUDIO_UNUSE:747|>": 103093, "<|AUDIO_UNUSE:748|>": 103094, "<|AUDIO_UNUSE:749|>": 103095, "<|AUDIO_UNUSE:750|>": 103096, "<|AUDIO_UNUSE:751|>": 103097, "<|AUDIO_UNUSE:752|>": 103098, "<|AUDIO_UNUSE:753|>": 103099, "<|AUDIO_UNUSE:754|>": 103100, "<|AUDIO_UNUSE:755|>": 103101, "<|AUDIO_UNUSE:756|>": 103102, "<|AUDIO_UNUSE:757|>": 103103, "<|AUDIO_UNUSE:758|>": 103104, "<|AUDIO_UNUSE:759|>": 103105, "<|AUDIO_UNUSE:760|>": 103106, "<|AUDIO_UNUSE:761|>": 103107, "<|AUDIO_UNUSE:762|>": 103108, "<|AUDIO_UNUSE:763|>": 103109, "<|AUDIO_UNUSE:764|>": 103110, "<|AUDIO_UNUSE:765|>": 103111, "<|AUDIO_UNUSE:766|>": 103112, "<|AUDIO_UNUSE:767|>": 103113, "<|AUDIO_UNUSE:768|>": 103114, "<|AUDIO_UNUSE:769|>": 103115, "<|AUDIO_UNUSE:770|>": 103116, "<|AUDIO_UNUSE:771|>": 103117, "<|AUDIO_UNUSE:772|>": 103118, "<|AUDIO_UNUSE:773|>": 103119, "<|AUDIO_UNUSE:774|>": 103120, "<|AUDIO_UNUSE:775|>": 103121, "<|AUDIO_UNUSE:776|>": 103122, "<|AUDIO_UNUSE:777|>": 103123, "<|AUDIO_UNUSE:778|>": 103124, "<|AUDIO_UNUSE:779|>": 103125, "<|AUDIO_UNUSE:780|>": 103126, "<|AUDIO_UNUSE:781|>": 103127, "<|AUDIO_UNUSE:782|>": 103128, "<|AUDIO_UNUSE:783|>": 103129, "<|AUDIO_UNUSE:784|>": 103130, "<|AUDIO_UNUSE:785|>": 103131, "<|AUDIO_UNUSE:786|>": 103132, "<|AUDIO_UNUSE:787|>": 103133, "<|AUDIO_UNUSE:788|>": 103134, "<|AUDIO_UNUSE:789|>": 103135, "<|AUDIO_UNUSE:790|>": 103136, "<|AUDIO_UNUSE:791|>": 103137, "<|AUDIO_UNUSE:792|>": 103138, "<|AUDIO_UNUSE:793|>": 103139, "<|AUDIO_UNUSE:794|>": 103140, "<|AUDIO_UNUSE:795|>": 103141, "<|AUDIO_UNUSE:796|>": 103142, "<|AUDIO_UNUSE:797|>": 103143, "<|AUDIO_UNUSE:798|>": 103144, "<|AUDIO_UNUSE:799|>": 103145, "<|AUDIO_UNUSE:800|>": 103146, "<|AUDIO_UNUSE:801|>": 103147, "<|AUDIO_UNUSE:802|>": 103148, "<|AUDIO_UNUSE:803|>": 103149, "<|AUDIO_UNUSE:804|>": 103150, "<|AUDIO_UNUSE:805|>": 103151, "<|AUDIO_UNUSE:806|>": 103152, "<|AUDIO_UNUSE:807|>": 103153, "<|AUDIO_UNUSE:808|>": 103154, "<|AUDIO_UNUSE:809|>": 103155, "<|AUDIO_UNUSE:810|>": 103156, "<|AUDIO_UNUSE:811|>": 103157, "<|AUDIO_UNUSE:812|>": 103158, "<|AUDIO_UNUSE:813|>": 103159, "<|AUDIO_UNUSE:814|>": 103160, "<|AUDIO_UNUSE:815|>": 103161, "<|AUDIO_UNUSE:816|>": 103162, "<|AUDIO_UNUSE:817|>": 103163, "<|AUDIO_UNUSE:818|>": 103164, "<|AUDIO_UNUSE:819|>": 103165, "<|AUDIO_UNUSE:820|>": 103166, "<|AUDIO_UNUSE:821|>": 103167, "<|AUDIO_UNUSE:822|>": 103168, "<|AUDIO_UNUSE:823|>": 103169, "<|AUDIO_UNUSE:824|>": 103170, "<|AUDIO_UNUSE:825|>": 103171, "<|AUDIO_UNUSE:826|>": 103172, "<|AUDIO_UNUSE:827|>": 103173, "<|AUDIO_UNUSE:828|>": 103174, "<|AUDIO_UNUSE:829|>": 103175, "<|AUDIO_UNUSE:830|>": 103176, "<|AUDIO_UNUSE:831|>": 103177, "<|AUDIO_UNUSE:832|>": 103178, "<|AUDIO_UNUSE:833|>": 103179, "<|AUDIO_UNUSE:834|>": 103180, "<|AUDIO_UNUSE:835|>": 103181, "<|AUDIO_UNUSE:836|>": 103182, "<|AUDIO_UNUSE:837|>": 103183, "<|AUDIO_UNUSE:838|>": 103184, "<|AUDIO_UNUSE:839|>": 103185, "<|AUDIO_UNUSE:840|>": 103186, "<|AUDIO_UNUSE:841|>": 103187, "<|AUDIO_UNUSE:842|>": 103188, "<|AUDIO_UNUSE:843|>": 103189, "<|AUDIO_UNUSE:844|>": 103190, "<|AUDIO_UNUSE:845|>": 103191, "<|AUDIO_UNUSE:846|>": 103192, "<|AUDIO_UNUSE:847|>": 103193, "<|AUDIO_UNUSE:848|>": 103194, "<|AUDIO_UNUSE:849|>": 103195, "<|AUDIO_UNUSE:850|>": 103196, "<|AUDIO_UNUSE:851|>": 103197, "<|AUDIO_UNUSE:852|>": 103198, "<|AUDIO_UNUSE:853|>": 103199, "<|AUDIO_UNUSE:854|>": 103200, "<|AUDIO_UNUSE:855|>": 103201, "<|AUDIO_UNUSE:856|>": 103202, "<|AUDIO_UNUSE:857|>": 103203, "<|AUDIO_UNUSE:858|>": 103204, "<|AUDIO_UNUSE:859|>": 103205, "<|AUDIO_UNUSE:860|>": 103206, "<|AUDIO_UNUSE:861|>": 103207, "<|AUDIO_UNUSE:862|>": 103208, "<|AUDIO_UNUSE:863|>": 103209, "<|AUDIO_UNUSE:864|>": 103210, "<|AUDIO_UNUSE:865|>": 103211, "<|AUDIO_UNUSE:866|>": 103212, "<|AUDIO_UNUSE:867|>": 103213, "<|AUDIO_UNUSE:868|>": 103214, "<|AUDIO_UNUSE:869|>": 103215, "<|AUDIO_UNUSE:870|>": 103216, "<|AUDIO_UNUSE:871|>": 103217, "<|AUDIO_UNUSE:872|>": 103218, "<|AUDIO_UNUSE:873|>": 103219, "<|AUDIO_UNUSE:874|>": 103220, "<|AUDIO_UNUSE:875|>": 103221, "<|AUDIO_UNUSE:876|>": 103222, "<|AUDIO_UNUSE:877|>": 103223, "<|AUDIO_UNUSE:878|>": 103224, "<|AUDIO_UNUSE:879|>": 103225, "<|AUDIO_UNUSE:880|>": 103226, "<|AUDIO_UNUSE:881|>": 103227, "<|AUDIO_UNUSE:882|>": 103228, "<|AUDIO_UNUSE:883|>": 103229, "<|AUDIO_UNUSE:884|>": 103230, "<|AUDIO_UNUSE:885|>": 103231, "<|AUDIO_UNUSE:886|>": 103232, "<|AUDIO_UNUSE:887|>": 103233, "<|AUDIO_UNUSE:888|>": 103234, "<|AUDIO_UNUSE:889|>": 103235, "<|AUDIO_UNUSE:890|>": 103236, "<|AUDIO_UNUSE:891|>": 103237, "<|AUDIO_UNUSE:892|>": 103238, "<|AUDIO_UNUSE:893|>": 103239, "<|AUDIO_UNUSE:894|>": 103240, "<|AUDIO_UNUSE:895|>": 103241, "<|AUDIO_UNUSE:896|>": 103242, "<|AUDIO_UNUSE:897|>": 103243, "<|AUDIO_UNUSE:898|>": 103244, "<|AUDIO_UNUSE:899|>": 103245, "<|AUDIO_UNUSE:900|>": 103246, "<|AUDIO_UNUSE:901|>": 103247, "<|AUDIO_UNUSE:902|>": 103248, "<|AUDIO_UNUSE:903|>": 103249, "<|AUDIO_UNUSE:904|>": 103250, "<|AUDIO_UNUSE:905|>": 103251, "<|AUDIO_UNUSE:906|>": 103252, "<|AUDIO_UNUSE:907|>": 103253, "<|AUDIO_UNUSE:908|>": 103254, "<|AUDIO_UNUSE:909|>": 103255, "<|AUDIO_UNUSE:910|>": 103256, "<|AUDIO_UNUSE:911|>": 103257, "<|AUDIO_UNUSE:912|>": 103258, "<|AUDIO_UNUSE:913|>": 103259, "<|AUDIO_UNUSE:914|>": 103260, "<|AUDIO_UNUSE:915|>": 103261, "<|AUDIO_UNUSE:916|>": 103262, "<|AUDIO_UNUSE:917|>": 103263, "<|AUDIO_UNUSE:918|>": 103264, "<|AUDIO_UNUSE:919|>": 103265, "<|AUDIO_UNUSE:920|>": 103266, "<|AUDIO_UNUSE:921|>": 103267, "<|AUDIO_UNUSE:922|>": 103268, "<|AUDIO_UNUSE:923|>": 103269, "<|AUDIO_UNUSE:924|>": 103270, "<|AUDIO_UNUSE:925|>": 103271, "<|AUDIO_UNUSE:926|>": 103272, "<|AUDIO_UNUSE:927|>": 103273, "<|AUDIO_UNUSE:928|>": 103274, "<|AUDIO_UNUSE:929|>": 103275, "<|AUDIO_UNUSE:930|>": 103276, "<|AUDIO_UNUSE:931|>": 103277, "<|AUDIO_UNUSE:932|>": 103278, "<|AUDIO_UNUSE:933|>": 103279, "<|AUDIO_UNUSE:934|>": 103280, "<|AUDIO_UNUSE:935|>": 103281, "<|AUDIO_UNUSE:936|>": 103282, "<|AUDIO_UNUSE:937|>": 103283, "<|AUDIO_UNUSE:938|>": 103284, "<|AUDIO_UNUSE:939|>": 103285, "<|AUDIO_UNUSE:940|>": 103286, "<|AUDIO_UNUSE:941|>": 103287, "<|AUDIO_UNUSE:942|>": 103288, "<|AUDIO_UNUSE:943|>": 103289, "<|AUDIO_UNUSE:944|>": 103290, "<|AUDIO_UNUSE:945|>": 103291, "<|AUDIO_UNUSE:946|>": 103292, "<|AUDIO_UNUSE:947|>": 103293, "<|AUDIO_UNUSE:948|>": 103294, "<|AUDIO_UNUSE:949|>": 103295, "<|AUDIO_UNUSE:950|>": 103296, "<|AUDIO_UNUSE:951|>": 103297, "<|AUDIO_UNUSE:952|>": 103298, "<|AUDIO_UNUSE:953|>": 103299, "<|AUDIO_UNUSE:954|>": 103300, "<|AUDIO_UNUSE:955|>": 103301, "<|AUDIO_UNUSE:956|>": 103302, "<|AUDIO_UNUSE:957|>": 103303, "<|AUDIO_UNUSE:958|>": 103304, "<|AUDIO_UNUSE:959|>": 103305, "<|AUDIO_UNUSE:960|>": 103306, "<|AUDIO_UNUSE:961|>": 103307, "<|AUDIO_UNUSE:962|>": 103308, "<|AUDIO_UNUSE:963|>": 103309, "<|AUDIO_UNUSE:964|>": 103310, "<|AUDIO_UNUSE:965|>": 103311, "<|AUDIO_UNUSE:966|>": 103312, "<|AUDIO_UNUSE:967|>": 103313, "<|AUDIO_UNUSE:968|>": 103314, "<|AUDIO_UNUSE:969|>": 103315, "<|AUDIO_UNUSE:970|>": 103316, "<|AUDIO_UNUSE:971|>": 103317, "<|AUDIO_UNUSE:972|>": 103318, "<|AUDIO_UNUSE:973|>": 103319, "<|AUDIO_UNUSE:974|>": 103320, "<|AUDIO_UNUSE:975|>": 103321, "<|AUDIO_UNUSE:976|>": 103322, "<|AUDIO_UNUSE:977|>": 103323, "<|AUDIO_UNUSE:978|>": 103324, "<|AUDIO_UNUSE:979|>": 103325, "<|AUDIO_UNUSE:980|>": 103326, "<|AUDIO_UNUSE:981|>": 103327, "<|AUDIO_UNUSE:982|>": 103328, "<|AUDIO_UNUSE:983|>": 103329, "<|AUDIO_UNUSE:984|>": 103330, "<|AUDIO_UNUSE:985|>": 103331, "<|AUDIO_UNUSE:986|>": 103332, "<|AUDIO_UNUSE:987|>": 103333, "<|AUDIO_UNUSE:988|>": 103334, "<|AUDIO_UNUSE:989|>": 103335, "<|AUDIO_UNUSE:990|>": 103336, "<|AUDIO_UNUSE:991|>": 103337, "<|AUDIO_UNUSE:992|>": 103338, "<|AUDIO_UNUSE:993|>": 103339, "<|AUDIO_UNUSE:994|>": 103340, "<|AUDIO_UNUSE:995|>": 103341, "<|AUDIO_UNUSE:996|>": 103342, "<|AUDIO_UNUSE:997|>": 103343, "<|AUDIO_UNUSE:998|>": 103344, "<|AUDIO_UNUSE:999|>": 103345, "<|AUDIO_UNUSE:1000|>": 103346, "<|AUDIO_UNUSE:1001|>": 103347, "<|AUDIO_UNUSE:1002|>": 103348, "<|AUDIO_UNUSE:1003|>": 103349, "<|AUDIO_UNUSE:1004|>": 103350, "<|AUDIO_UNUSE:1005|>": 103351, "<|AUDIO_UNUSE:1006|>": 103352, "<|AUDIO_UNUSE:1007|>": 103353, "<|AUDIO_UNUSE:1008|>": 103354, "<|AUDIO_UNUSE:1009|>": 103355, "<|AUDIO_UNUSE:1010|>": 103356, "<|AUDIO_UNUSE:1011|>": 103357, "<|AUDIO_UNUSE:1012|>": 103358, "<|AUDIO_UNUSE:1013|>": 103359, "<|AUDIO_UNUSE:1014|>": 103360, "<|AUDIO_UNUSE:1015|>": 103361, "<|AUDIO_UNUSE:1016|>": 103362, "<|AUDIO_UNUSE:1017|>": 103363, "<|AUDIO_UNUSE:1018|>": 103364, "<|AUDIO_UNUSE:1019|>": 103365, "<|AUDIO_UNUSE:1020|>": 103366}
 
 
chat_template.jinja ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ {%- set image_count = namespace(value=0) -%}
3
+ {%- set video_count = namespace(value=0) -%}
4
+ {{- '<|begin_of_sentence|>' }}
5
+ {%- for message in messages -%}
6
+ {%- if message.role in ['system', 'user'] -%}
7
+ {%- if message.role == 'user' -%}
8
+ {{- 'User: ' -}}
9
+ {%- endif -%}
10
+ {%- if message.content is string -%}
11
+ {{- message.content -}}
12
+ {%- else -%}
13
+ {%- for content_item in message.content -%}
14
+ {%- if content_item.type == 'text' -%}
15
+ {{- content_item.text -}}
16
+ {%- elif content_item.type in ['image_url', 'image'] -%}
17
+ {%- set image_count.value = image_count.value + 1 -%}
18
+ Picture {{ image_count.value }}:<|IMAGE_START|><|IMAGE_PLACEHOLDER|><|IMAGE_END|>
19
+ {%- elif content_item.type in ['video_url', 'video'] -%}
20
+ {%- set video_count.value = video_count.value + 1 -%}
21
+ Video {{ video_count.value }}:<|VIDEO_START|><|VIDEO_PLACEHOLDER|><|VIDEO_END|>
22
+ {%- endif -%}
23
+ {%- endfor -%}
24
+ {%- endif -%}
25
+ {%- if message.role == 'system' -%}
26
+ {{- '
27
+ ' -}}
28
+ {%- endif -%}
29
+ {%- elif message.role == 'assistant' -%}
30
+ {%- macro extract_text_content(content_field) -%}
31
+ {%- if content_field is string -%}
32
+ {{- content_field -}}
33
+ {%- elif content_field is iterable and content_field is not string -%}
34
+ {%- set ns = namespace(text_parts=[]) -%}
35
+ {%- set text_parts = [] -%}
36
+ {%- for item in content_field -%}
37
+ {%- if item.type == 'text' -%}
38
+ {%- set ns.text_parts = ns.text_parts + [item.text] -%}
39
+ {%- endif -%}
40
+ {%- endfor -%}
41
+ {{- ns.text_parts | join("") -}}
42
+ {%- else -%}
43
+ {{- '' -}}
44
+ {%- endif -%}
45
+ {%- endmacro -%}
46
+ {%- set reasoning_content = extract_text_content(message.reasoning_content) -%}
47
+ {%- set content = extract_text_content(message.content) -%}
48
+ {%- if '</think>' in content %}
49
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('
50
+ ').split('<think>')[-1].lstrip('
51
+ ') %}
52
+ {%- set content = content.split('</think>')[-1].lstrip('
53
+ ') %}
54
+ {%- endif %}
55
+ {%- if reasoning_content %}
56
+ {{- '
57
+ ' + 'Assistant: ' + '<think>
58
+ ' + reasoning_content.strip('
59
+ ') + '
60
+ </think>
61
+ ' + content.lstrip('
62
+ ') }}
63
+ {%- else %}
64
+ {{- '
65
+ ' + 'Assistant: ' + content }}
66
+ {%- endif %}
67
+ {{- '<|end_of_sentence |>' }}
68
+ {%- endif -%}
69
+ {%- endfor -%}
70
+ {%- if add_generation_prompt is not defined or add_generation_prompt is true %}
71
+ {{- '
72
+ Assistant: ' -}}
73
+ {%- if (enable_thinking is defined and enable_thinking is false) or enable_thinking is not defined %}
74
+ {{- '<think>
75
+
76
+ </think>
77
+
78
+ ' }}
79
+ {%- endif %}
80
+ {%- if enable_thinking is defined and enable_thinking is true %}{{- '<think>' }}{%- endif %}
81
+ {%- endif %}
chat_template.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "chat_template": "\n{%- set image_count = namespace(value=0) -%}\n{%- set video_count = namespace(value=0) -%}\n{{- '<|begin_of_sentence|>' }}\n{%- for message in messages -%}\n {%- if message.role in ['system', 'user'] -%}\n {%- if message.role == 'user' -%}\n {{- 'User: ' -}}\n {%- endif -%}\n {%- if message.content is string -%}\n {{- message.content -}}\n {%- else -%}\n {%- for content_item in message.content -%}\n {%- if content_item.type == 'text' -%}\n {{- content_item.text -}}\n {%- elif content_item.type in ['image_url', 'image'] -%}\n {%- set image_count.value = image_count.value + 1 -%}\n Picture {{ image_count.value }}:<|IMAGE_START|><|image@placeholder|><|IMAGE_END|>\n {%- elif content_item.type in ['video_url', 'video'] -%}\n {%- set video_count.value = video_count.value + 1 -%}\n Video {{ video_count.value }}:<|VIDEO_START|><|video@placeholder|><|VIDEO_END|>\n {%- endif -%}\n {%- endfor -%}\n {%- endif -%}\n {%- if message.role == 'system' -%}\n {{- '\n' -}}\n {%- endif -%}\n {%- elif message.role == 'assistant' -%}\n {%- macro extract_text_content(content_field) -%}\n {%- if content_field is string -%}\n {{- content_field -}}\n {%- elif content_field is iterable and content_field is not string -%}\n {%- set ns = namespace(text_parts=[]) -%}\n {%- set text_parts = [] -%}\n {%- for item in content_field -%}\n {%- if item.type == 'text' -%}\n {%- set ns.text_parts = ns.text_parts + [item.text] -%}\n {%- endif -%}\n {%- endfor -%}\n {{- ns.text_parts | join('') -}}\n {%- else -%}\n {{- '' -}}\n {%- endif -%}\n {%- endmacro -%}\n {%- set reasoning_content = extract_text_content(message.reasoning_content) -%}\n {%- set content = extract_text_content(message.content) -%}\n {%- if '</think>' in content %}\n {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}\n {%- set content = content.split('</think>')[-1].lstrip('\n') %}\n {%- endif %}\n {%- if reasoning_content %}\n {{- '\n' + 'Assistant: ' + '<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}\n {%- else %}\n {{- '\n' + 'Assistant: ' + content }}\n {%- endif %}\n {{- '<|end_of_sentence|>' }}\n {%- endif -%}\n{%- endfor -%}\n{%- if add_generation_prompt is not defined or add_generation_prompt is true %}\n {{- '\nAssistant: ' -}}\n {%- if enable_thinking is defined and enable_thinking is false %}\n {{- '<think>\n\n</think>\n\n' }}\n {%- endif %}\n {%- if enable_thinking is not defined or enable_thinking is true %}\n {{- '<think>' }}\n {%- endif %}\n{%- endif %}\n"
3
- }
 
 
 
 
config.json CHANGED
@@ -1,75 +1,88 @@
1
  {
2
- "architectures": [
3
- "Ernie4_5_VLMoeForConditionalGeneration"
4
- ],
5
- "auto_map": {
6
- "AutoConfig": "configuration_ernie4_5_vl.Ernie4_5_VLMoEConfig",
7
- "AutoModel": "modeling_ernie4_5_vl.Ernie4_5_VLMoeForConditionalGeneration",
8
- "AutoModelForCausalLM": "modeling_ernie4_5_vl.Ernie4_5_VLMoeForConditionalGeneration",
9
- "AutoProcessor": "processing_ernie4_5_vl.Ernie4_5_VLProcessor",
10
- "AutoImageProcessor": "processing_ernie4_5_vl.Ernie4_5_VLImageProcessor"
11
- },
12
- "pad_token_id": 0,
13
- "bos_token_id": 1,
14
- "eos_token_id": 2,
15
- "torch_dtype": "bfloat16",
16
- "hidden_act": "silu",
17
- "hidden_size": 2560,
18
- "intermediate_size": 12288,
19
- "im_patch_id": 100295,
20
- "image_start_token_id": 101304,
21
  "image_end_token_id": 101305,
22
- "video_start_token_id": 101306,
23
- "video_end_token_id": 101307,
24
- "max_position_embeddings": 131072,
25
- "model_type": "ernie4_5_moe_vl",
26
- "moe_capacity": [128, 128, 128],
27
- "moe_gate": "topk",
28
- "moe_intermediate_size": [1536, 512],
29
- "moe_k": 6,
30
- "moe_layer_end_index": [29, 28],
31
- "moe_layer_interval": 1,
32
- "moe_layer_start_index": [1, 1],
33
- "moe_multimodal_dispatch_use_allgather": "v2-alltoall-unpad-text",
34
- "moe_num_experts": [64, 64],
35
- "moe_num_shared_experts": 2,
36
- "moe_use_aux_free": true,
37
- "num_attention_heads": 20,
38
- "num_hidden_layers": 28,
39
- "num_key_value_heads": 4,
40
- "pixel_hidden_size": 1280,
41
- "rms_norm_eps": 1e-05,
42
- "rope_3d": true,
43
- "rope_theta": 500000,
44
- "spatial_conv_size": 2,
45
- "temporal_conv_size": 2,
46
- "vocab_size": 103424,
47
- "tie_word_embeddings": true,
48
- "use_cache": true,
49
- "use_rmsnorm": true,
50
- "use_bias": false,
51
- "rope_scaling": {
52
- "type": "default",
53
- "mrope_section": [
54
- 22,
55
- 22,
56
- 20
57
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  },
 
 
 
 
59
  "vision_config": {
60
- "attn_implementation": "eager",
61
  "depth": 32,
62
- "embed_dim": 1280,
63
  "hidden_act": "quick_gelu",
64
  "hidden_size": 1280,
65
  "in_channels": 3,
66
- "in_chans": 3,
67
- "mlp_ratio": 4,
 
68
  "num_heads": 16,
69
  "patch_size": 14,
 
70
  "spatial_merge_size": 2,
71
- "spatial_patch_size": 14,
72
- "vit_first_fwd_bsz": 128,
73
- "attn_sep": true
74
  }
75
  }
 
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "image_end_token_id": 101305,
3
+ "image_start_token_id": 101304,
4
+ "image_token_id": 100295,
5
+ "model_type": "ernie4_5_vl_moe",
6
+ "text_config": {
7
+ "hidden_act": "silu",
8
+ "hidden_size": 2560,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 12288,
11
+ "max_position_embeddings": 131072,
12
+ "mlp_layer_types": [
13
+ "dense",
14
+ "sparse",
15
+ "sparse",
16
+ "sparse",
17
+ "sparse",
18
+ "sparse",
19
+ "sparse",
20
+ "sparse",
21
+ "sparse",
22
+ "sparse",
23
+ "sparse",
24
+ "sparse",
25
+ "sparse",
26
+ "sparse",
27
+ "sparse",
28
+ "sparse",
29
+ "sparse",
30
+ "sparse",
31
+ "sparse",
32
+ "sparse",
33
+ "sparse",
34
+ "sparse",
35
+ "sparse",
36
+ "sparse",
37
+ "sparse",
38
+ "sparse",
39
+ "sparse",
40
+ "sparse"
41
+ ],
42
+ "model_type": "ernie4_5_vl_moe_text",
43
+ "moe_intermediate_size": [
44
+ 1536,
45
+ 512
46
+ ],
47
+ "moe_k": 6,
48
+ "moe_norm_min": 1e-12,
49
+ "moe_num_experts": 64,
50
+ "moe_num_shared_experts": 2,
51
+ "num_attention_heads": 20,
52
+ "num_hidden_layers": 28,
53
+ "num_key_value_heads": 4,
54
+ "output_router_logits": false,
55
+ "rms_norm_eps": 1e-05,
56
+ "rope_parameters": {
57
+ "mrope_section": [
58
+ 22,
59
+ 22,
60
+ 20
61
+ ],
62
+ "rope_theta": 500000.0,
63
+ "rope_type": "default"
64
+ },
65
+ "router_aux_loss_coef": 0.001,
66
+ "use_bias": false,
67
+ "use_cache": true,
68
+ "vocab_size": 103424
69
  },
70
+ "transformers_version": "5.0.0.dev0",
71
+ "video_end_token_id": 101307,
72
+ "video_start_token_id": 101306,
73
+ "video_token_id": 103367,
74
  "vision_config": {
 
75
  "depth": 32,
 
76
  "hidden_act": "quick_gelu",
77
  "hidden_size": 1280,
78
  "in_channels": 3,
79
+ "initializer_range": 0.02,
80
+ "intermediate_size": 5120,
81
+ "model_type": "ernie4_5_vl_moe_vision",
82
  "num_heads": 16,
83
  "patch_size": 14,
84
+ "rms_norm_eps": 1e-06,
85
  "spatial_merge_size": 2,
86
+ "temporal_merge_size": 2
 
 
87
  }
88
  }
configuration_ernie4_5_vl.py DELETED
@@ -1,650 +0,0 @@
1
- # Copyright (c) 2025 Baidu, Inc. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- """Ernie model configuration"""
16
- import copy
17
-
18
- from typing import List, Optional, Tuple, Union
19
-
20
- from transformers import PretrainedConfig
21
-
22
-
23
- __all__ = [
24
- "ERNIE_PRETRAINED_INIT_CONFIGURATION",
25
- "Ernie4_5_Config",
26
- "Ernie4_5_MoEConfig",
27
- "Ernie4_5_VLMoEConfig",
28
- ]
29
-
30
-
31
- class DFNRopeVisionTransformerConfig(PretrainedConfig):
32
- """
33
- Configuration class for DFNRopeVisionTransformer model.
34
- This class inherits from [`PretrainedConfig`] and can be used to control the model outputs. Read the
35
- documentation from [`PretrainedConfig`] for more information.
36
- """
37
-
38
- model_type = "DFNRope_vision_transformer"
39
- base_model_tp_plan = {}
40
-
41
- def __init__(
42
- self,
43
- depth=32,
44
- embed_dim=1280,
45
- hidden_size=3584,
46
- hidden_act="quick_gelu",
47
- mlp_ratio=4,
48
- num_heads=16,
49
- in_channels=3,
50
- patch_size=14,
51
- spatial_merge_size=2,
52
- attn_implementation="eager", # new added
53
- pp_data_balance=False,
54
- recompute=False,
55
- attn_sep=False,
56
- vit_first_fwd_bsz=128,
57
- vit_num_recompute_layers=10000,
58
- **kwargs,
59
- ):
60
- """
61
- Initialize DFNRopeVisionTransformer model configuration with default or specified parameters.
62
-
63
- Args:
64
- depth (int): Number of transformer layers in the model.
65
- embed_dim (int): Dimensionality of the embedding layer.
66
- hidden_size (int): Dimensionality of the feedforward network.
67
- hidden_act (str): Activation function for the feedforward network.
68
- mlp_ratio (float): Ratio between the number of input features and
69
- the number of output features in the feedforward network.
70
- num_heads (int): Number of attention heads in each attention layer.
71
- in_channels (int): Number of channels in the input image.
72
- patch_size (int):
73
- Size of patches in the input image. Defaults to 14.
74
- spatial_merge_size (int):
75
- Spatial merge size for the spatial transformer module. Defaults to 2.
76
- attn_implementation (str): Attention implementation type. Defaults to "eager".
77
- pp_data_balance (bool): Whether to balance data during preprocessing. Defaults to False.
78
- recompute (bool): Whether to use recompute. Defaults to False.
79
- attn_sep (bool): Whether to separate attention computation into two stages. Defaults to False.
80
- vit_first_fwd_bsz (int): First forward batch size for ViT. Defaults to 128.
81
- vit_num_recompute_layers (int): Number of recomputed layers for ViT. Defaults to
82
- """
83
- super().__init__(**kwargs)
84
-
85
- self.depth = depth
86
- self.embed_dim = embed_dim
87
- self.hidden_size = hidden_size
88
- self.hidden_act = hidden_act
89
- self.mlp_ratio = mlp_ratio
90
- self.num_heads = num_heads
91
- self.in_channels = in_channels
92
- self.patch_size = patch_size
93
- self.spatial_merge_size = spatial_merge_size
94
- self.attn_implementation = attn_implementation
95
- self.pp_data_balance = pp_data_balance
96
- self.recompute = recompute
97
- self.attn_sep = attn_sep
98
- self.vit_first_fwd_bsz = vit_first_fwd_bsz
99
- self.vit_num_recompute_layers = vit_num_recompute_layers
100
-
101
- def get(self, key, default=None):
102
- """get config value by key"""
103
- if hasattr(self, key):
104
- return getattr(self, key)
105
- else:
106
- return default
107
-
108
-
109
- ERNIE_PRETRAINED_INIT_CONFIGURATION = {
110
- "ernie/tiny-random-ernie": {
111
- "hidden_size": 768,
112
- "initializer_range": 0.02,
113
- "intermediate_size": 11008,
114
- "max_position_embeddings": 2048,
115
- "model_type": "ernie",
116
- "num_attention_heads": 2,
117
- "num_hidden_layers": 2,
118
- "rms_norm_eps": 1e-06,
119
- "vocab_size": 32000,
120
- "bos_token_id": 1,
121
- "eos_token_id": 2,
122
- "pad_token_id": 0,
123
- "use_cache": False,
124
- "recompute": False,
125
- "use_flash_attn": True,
126
- "use_pure_fp16": False,
127
- },
128
- }
129
-
130
-
131
- class Ernie4_5_Config(PretrainedConfig):
132
- """
133
- Configuration class for ERNIE model.
134
-
135
- This class stores the configuration of an ERNIE model, defining the model architecture.
136
- It inherits from PretrainedConfig and can be used to control model outputs.
137
- """
138
-
139
- model_type = "ernie"
140
- pretrained_init_configuration = ERNIE_PRETRAINED_INIT_CONFIGURATION
141
- base_model_tp_plan = {}
142
-
143
- def __init__(
144
- self,
145
- vocab_size=32000,
146
- hidden_size=768,
147
- intermediate_size=11008,
148
- max_position_embeddings=32768,
149
- num_hidden_layers=2,
150
- num_attention_heads=2,
151
- initializer_range=0.02, # no use
152
- rms_norm_eps=1e-6,
153
- use_cache=False,
154
- use_flash_attention=True,
155
- use_sparse_flash_attn=True,
156
- use_var_len_flash_attn=False,
157
- recompute=False,
158
- recompute_granularity="core_attn",
159
- recompute_use_reentrant=False,
160
- use_rmsnorm=True,
161
- fuse_rms_norm=False,
162
- fuse_ln=False,
163
- pad_token_id=0,
164
- bos_token_id=1,
165
- eos_token_id=2,
166
- fuse_swiglu=False,
167
- use_bias=False,
168
- rope_theta=10000,
169
- fuse_rope=False,
170
- fuse_softmax_mask=False,
171
- use_fast_ln=False,
172
- weight_share_add_bias=True,
173
- fuse_linear=False,
174
- max_sequence_length=None,
175
- ignored_index=-100,
176
- add_tail_layers=False,
177
- use_recompute_lm_head=False,
178
- use_recompute_loss_fn=False,
179
- refined_recompute=dict(),
180
- attention_probs_dropout_prob=0.0,
181
- hidden_dropout_prob=0.0,
182
- compression_ratio: float = 1.0,
183
- num_key_value_heads=None,
184
- use_sparse_head_and_loss_fn=False,
185
- micro_batch_size=-1,
186
- use_ep_comm_overlap=False,
187
- use_fused_head_and_loss_fn=False,
188
- token_balance_loss=False,
189
- token_balance_seqlen=False, # calculated based on batchsize and seqlen
190
- cachekv_quant: bool = False,
191
- pp_seg_method="layer:ErnieDecoderLayer|EmptyLayer",
192
- **kwargs,
193
- ):
194
- """
195
- Initialize ERNIE model configuration with default or specified parameters.
196
-
197
- Args:
198
- vocab_size (int): Size of the vocabulary (number of unique tokens)
199
- hidden_size (int): Dimensionality of the encoder layers and the pooler layer
200
- intermediate_size (int): Dimensionality of the "intermediate" (feed-forward) layer
201
- max_position_embeddings (int): Maximum sequence length the model can handle
202
- num_hidden_layers (int): Number of hidden layers in the Transformer encoder
203
- num_attention_heads (int): Number of attention heads for each attention layer
204
- rms_norm_eps (float): The epsilon used by the RMS normalization layers
205
- use_cache (bool): Whether to use caching for faster generation (decoding)
206
- use_flash_attention (bool): Whether to use FlashAttention for optimized attention computation
207
- use_sparse_flash_attn (bool): Whether to use sparse FlashAttention
208
- use_var_len_flash_attn (bool): Whether to use variable-length FlashAttention
209
- recompute (bool): Whether to use gradient checkpointing to save memory
210
- recompute_granularity (str): Granularity of recomputation ("core_attn", "full", etc.)
211
- recompute_use_reentrant (bool): Whether to use reentrant checkpointing
212
- use_rmsnorm (bool): Whether to use RMSNorm instead of LayerNorm
213
- fuse_rms_norm (bool): Whether to fuse RMSNorm operations for optimization
214
- fuse_ln (bool): Whether to fuse LayerNorm operations
215
- pad_token_id (int): Token ID used for padding sequences
216
- bos_token_id (int): Token ID used for beginning-of-sequence
217
- eos_token_id (int): Token ID used for end-of-sequence
218
- fuse_swiglu (bool): Whether to fuse SwiGLU operations
219
- use_bias (bool): Whether to use bias terms in linear layers
220
- rope_theta (float): The base period of the RoPE embeddings
221
- fuse_rope (bool): Whether to fuse RoPE operations
222
- use_fast_ln (bool): Whether to use optimized LayerNorm implementation
223
- weight_share_add_bias (bool): Whether to share bias weights in certain layers
224
- fuse_linear (bool): Whether to fuse linear operations
225
- max_sequence_length (int): Maximum sequence length for positional embeddings
226
- ignored_index (int): Target value that is ignored during loss computation
227
- add_tail_layers (bool): Whether to add additional layers at the end
228
- use_recompute_lm_head (bool): Whether to recompute gradients for language model head
229
- use_recompute_loss_fn (bool): Whether to recompute gradients for loss function
230
- refined_recompute (dict): Dictionary specifying refined recomputation settings
231
- attention_probs_dropout_prob (float): Dropout probability for attention weights
232
- hidden_dropout_prob (float): Dropout probability for hidden layers
233
- compression_ratio (float): Ratio for KV cache compression (1.0 = no compression)
234
- num_key_value_heads (int): Number of key/value heads (for Grouped Query Attention)
235
- use_sparse_head_and_loss_fn (bool): Whether to use sparse attention head and loss function
236
- micro_batch_size (int): Size of micro batches (-1 for automatic)
237
- use_ep_comm_overlap (bool): Whether to overlap communication with computation
238
- use_fused_head_loss_fn (bool): Whether to use fused head and loss function
239
- token_balance_loss (bool): Whether to balance loss by token count
240
- token_balance_seqlen (bool): Whether to balance sequence lengths
241
- cachekv_quant (bool): Whether to quantize key-value cache
242
- pp_seg_method (str): Method for pipeline parallel segmentation
243
- **kwargs: Additional keyword arguments passed to parent class
244
- """
245
-
246
- # Set default for tied embeddings if not specified.
247
- if "tie_word_embeddings" not in kwargs:
248
- kwargs["tie_word_embeddings"] = False
249
- super().__init__(
250
- pad_token_id=pad_token_id,
251
- bos_token_id=bos_token_id,
252
- eos_token_id=eos_token_id,
253
- **kwargs,
254
- )
255
- self.vocab_size = vocab_size
256
- self.hidden_size = hidden_size
257
- self.intermediate_size = intermediate_size
258
- self.max_position_embeddings = max_position_embeddings
259
- self.num_hidden_layers = num_hidden_layers
260
- self.num_attention_heads = num_attention_heads
261
- self.initializer_range = initializer_range
262
- self.rms_norm_eps = rms_norm_eps
263
- self.use_cache = use_cache
264
- self.recompute = recompute
265
- self.recompute_granularity = recompute_granularity
266
- self.use_flash_attention = use_flash_attention
267
- self.use_sparse_flash_attn = use_sparse_flash_attn
268
- self.recompute_use_reentrant = recompute_use_reentrant
269
- self.use_var_len_flash_attn = use_var_len_flash_attn
270
- self.pad_token_id = pad_token_id
271
- self.bos_token_id = bos_token_id
272
- self.eos_token_id = eos_token_id
273
- self.fuse_swiglu = fuse_swiglu
274
- self.fuse_rms_norm = fuse_rms_norm
275
- self.fuse_ln = fuse_ln
276
- self.use_rmsnorm = use_rmsnorm
277
- self.micro_batch_size = micro_batch_size
278
-
279
- self.max_sequence_length = max_sequence_length
280
- self.use_bias = use_bias
281
- self.weight_share_add_bias = weight_share_add_bias
282
- self.rope_theta = rope_theta
283
- self.fuse_rope = fuse_rope
284
- self.fuse_softmax_mask = fuse_softmax_mask
285
- self.use_fast_ln = use_fast_ln
286
-
287
- self.fuse_linear = fuse_linear
288
- self.ignored_index = ignored_index
289
- self.add_tail_layers = add_tail_layers
290
- self.use_recompute_lm_head = use_recompute_lm_head
291
- self.use_recompute_loss_fn = use_recompute_loss_fn
292
-
293
- self.refined_recompute = refined_recompute
294
- self.skip_recompute_ops = dict()
295
- """
296
- `refined_recompute` is a dictionary that specifies fine-grained gradient recomputation settings,
297
- which currently only takes effect in Pipeline Parallel (PP) mode.
298
-
299
- In PP mode, this dictionary populates `self.skip_recompute_ops` with the following structure:
300
- - Key (`op_name`): The operation name to configure, with possible values:
301
- * "mlp_row_ln" - MLP row-wise layer normalization
302
- * "flash_attn" - Flash attention operation
303
- * "attention_row_ln" - Attention row-wise layer normalization
304
- * "attention_column_ln" - Attention column-wise layer normalization
305
- * "mlp_column_ln" - MLP column-wise layer normalization
306
-
307
- - Value (`skip_num`): Controls how many times to skip recomputation:
308
- * 0: Never skip recomputation (minimum memory usage)
309
- * -1: Always skip recomputation (maximum memory usage)
310
- * [0,1,...,12]: Skip recomputation for specified number of times
311
- * ≥12: Equivalent to -1 (always skip recomputation)
312
-
313
- This allows precise control over memory/computation tradeoffs for different operations.
314
- """
315
- self.attention_probs_dropout_prob = attention_probs_dropout_prob
316
- self.hidden_dropout_prob = hidden_dropout_prob
317
- self.compression_ratio = compression_ratio
318
- self.num_key_value_heads = num_key_value_heads
319
- self.use_sparse_head_and_loss_fn = use_sparse_head_and_loss_fn
320
- self.use_ep_comm_overlap = use_ep_comm_overlap
321
- self.use_fused_head_and_loss_fn = use_fused_head_and_loss_fn
322
- self.token_balance_loss = token_balance_loss
323
- self.token_balance_seqlen = token_balance_seqlen
324
- self.cachekv_quant = cachekv_quant
325
- self.pp_seg_method = pp_seg_method
326
-
327
- def get(self, key, default=None):
328
- """get config value by key"""
329
- if hasattr(self, key):
330
- return getattr(self, key)
331
- else:
332
- return default
333
-
334
-
335
- class Ernie4_5_MoEConfig(Ernie4_5_Config):
336
- r"""
337
- Configuration class for ErnieMoE model architecture.
338
-
339
- This class stores the configuration for a [`~ErnieModel`] and is used to instantiate
340
- an ErnieMoE model according to the specified arguments. Inherits from [`PretrainedConfig`]
341
- and can control model outputs.
342
-
343
- Attributes:
344
- Inherits all attributes from Ernie4_5_Config and adds MoE-specific configurations.
345
- """
346
-
347
- model_type = "ernie"
348
- attribute_map = {
349
- "n_positions": "max_position_embeddings",
350
- "n_embd": "hidden_size",
351
- "n_layer": "num_hidden_layers",
352
- "n_head": "num_attention_heads",
353
- "n_inner": "intermediate_size",
354
- "activation_function": "hidden_act",
355
- }
356
- pretrained_init_configuration = ERNIE_PRETRAINED_INIT_CONFIGURATION
357
- base_model_tp_plan = {}
358
-
359
- def __init__(
360
- self,
361
- moe_num_experts: Union[int, list] = 0,
362
- use_recompute_moe=False,
363
- moe_capacity=(),
364
- moe_layer_interval=2,
365
- moe_layer_start_index=0,
366
- moe_layer_end_index=-1,
367
- moe_aux_loss_lambda=1e-2,
368
- moe_z_loss_lambda=1e-4,
369
- moe_orthogonal_loss_lambda=1e-2,
370
- sinkhorn_2gate=True,
371
- sinkhorn_temp=3e-2,
372
- global_aux_loss=False,
373
- moe_dropout_prob=0.0,
374
- moe_group="world",
375
- moe_gate="top2",
376
- moe_intermediate_size: Union[int, list] = 0,
377
- moe_num_shared_experts: int = 0,
378
- moe_reverse_token_drop: bool = False,
379
- moe_gate_act: str = "softmax",
380
- moe_norm_gate_logits=True,
381
- moe_all_to_all_dropout: float = 0.0,
382
- moe_k=2,
383
- moe_use_aux_free: bool = False,
384
- # `moe_group_experts` must be used with `moe_use_hard_gate=True`
385
- moe_group_experts: bool = False,
386
- moe_group_orthogonal_loss: bool = True,
387
- enable_delay_scale_loss: bool = True,
388
- num_acc_steps: int = 1,
389
- fuse_gate_detach_matmul: bool = False,
390
- dpo_config=None,
391
- moe_multimodal_dispatch_use_allgather: str = "",
392
- moe_use_hard_gate=False,
393
- moe_dense_experts_token_type_id=3,
394
- **kwargs,
395
- ):
396
- """
397
- Initialize ErnieMoE configuration with MoE-specific parameters.
398
-
399
- Args:
400
- moe_num_experts: Number of experts in MoE layers
401
- use_recompute_moe: Whether to use recomputation for MoE layers
402
- moe_capacity: Capacity configuration for MoE layers
403
- moe_layer_interval: Interval between MoE layers
404
- moe_layer_start_index: Starting layer index for MoE
405
- moe_layer_end_index: Ending layer index for MoE (-1 means last layer)
406
- moe_aux_loss_lambda: Weight for auxiliary loss
407
- moe_z_loss_lambda: Weight for z-loss
408
- moe_orthogonal_loss_lambda: Weight for orthogonal loss
409
- sinkhorn_2gate: Whether to use sinkhorn 2-gate routing
410
- sinkhorn_temp: Temperature for sinkhorn routing
411
- global_aux_loss: Whether to use global auxiliary loss
412
- moe_dropout_prob: Dropout probability for MoE layers
413
- moe_group: Group configuration for MoE experts
414
- moe_gate: Type of gating mechanism ('top2', etc.)
415
- moe_intermediate_size: Intermediate size for MoE layers
416
- moe_num_shared_experts: Number of shared experts
417
- moe_reverse_token_drop: Whether to use reverse token dropping
418
- moe_gate_act: Activation function for gating
419
- moe_norm_gate_logits: Whether to normalize gate logits
420
- moe_all_to_all_dropout: Dropout for all-to-all communication
421
- moe_k: Number of experts to route to
422
- moe_use_aux_free: Whether to use auxiliary-free routing
423
- moe_group_experts: Whether to group experts (requires hard gating)
424
- moe_group_orthogonal_loss: Whether to use group orthogonal loss
425
- enable_delay_scale_loss: Whether to enable delayed loss scaling
426
- num_acc_steps: Number of accumulation steps
427
- fuse_gate_detach_matmul: Whether to fuse gate detach matmul
428
- **kwargs: Additional base model configuration parameters
429
-
430
- Note:
431
- When use_recompute_moe is True, recompute_granularity will be changed to full_attn.
432
- """
433
-
434
- if use_recompute_moe:
435
- logger.warning(
436
- "set `use_recompute_moe`=True, disabling `recompute_granularity=full`, change to full_attn."
437
- )
438
- if kwargs["recompute"] and kwargs["recompute_granularity"] == "full":
439
- kwargs["recompute_granularity"] = "full_attn"
440
- super().__init__(**kwargs)
441
-
442
- self.moe_num_experts = moe_num_experts
443
- self.use_recompute_moe = use_recompute_moe
444
- self.moe_capacity = moe_capacity
445
- self.moe_aux_loss_lambda = moe_aux_loss_lambda
446
- self.moe_z_loss_lambda = moe_z_loss_lambda
447
- self.moe_orthogonal_loss_lambda = moe_orthogonal_loss_lambda
448
- self.global_aux_loss = global_aux_loss
449
- self.sinkhorn_2gate = sinkhorn_2gate
450
- self.sinkhorn_temp = sinkhorn_temp
451
- self.moe_layer_interval = moe_layer_interval
452
- self.moe_dropout_prob = moe_dropout_prob
453
- self.moe_group = moe_group
454
- self.moe_gate = moe_gate
455
- self.moe_intermediate_size = moe_intermediate_size
456
- self.moe_num_shared_experts = moe_num_shared_experts
457
- self.moe_reverse_token_drop = moe_reverse_token_drop
458
- self.moe_k = moe_k
459
- self.moe_all_to_all_dropout = moe_all_to_all_dropout
460
- self.moe_group_experts = moe_group_experts
461
- self.moe_group_orthogonal_loss = moe_group_orthogonal_loss
462
- self.enable_delay_scale_loss = enable_delay_scale_loss
463
- self.num_acc_steps = num_acc_steps
464
- self.moe_layer_start_index = moe_layer_start_index
465
- self.moe_layer_end_index = (
466
- self.num_hidden_layers - 1
467
- if moe_layer_end_index == -1
468
- else moe_layer_end_index
469
- )
470
- self.moe_gate_act = moe_gate_act
471
- self.moe_norm_gate_logits = moe_norm_gate_logits
472
- self.moe_use_aux_free = moe_use_aux_free
473
- self.fuse_gate_detach_matmul = fuse_gate_detach_matmul
474
- self.dpo_config = dpo_config
475
- self.moe_multimodal_dispatch_use_allgather = (
476
- moe_multimodal_dispatch_use_allgather
477
- )
478
- self.moe_use_hard_gate = moe_use_hard_gate
479
- self.moe_dense_experts_token_type_id = moe_dense_experts_token_type_id
480
-
481
- @property
482
- def multimodel_experts(self) -> bool:
483
- """multimodel experts."""
484
- return (
485
- isinstance(self.moe_num_experts, (tuple, list))
486
- and len(self.moe_num_experts) > 1
487
- )
488
-
489
- @property
490
- def use_moe(self) -> bool:
491
- """
492
- Check if model is using MoE architecture.
493
-
494
- Returns:
495
- bool: True if moe_num_experts > 0, False otherwise
496
- """
497
- return self.moe_num_experts > 0
498
-
499
-
500
- class Ernie4_5_VLMoEConfig(Ernie4_5_MoEConfig):
501
- """
502
- This is the configuration class to store the configuration of a [`~ErnieModel`]. It is used to instantiate an Ernie
503
- model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
504
- defaults will yield a similar configuration to that of the Ernie-7B.
505
- Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
506
- documentation from [`PretrainedConfig`] for more information.
507
- Args:
508
- vocab_size (`int`, *optional*, defaults to 32000):
509
- Vocabulary size of the Ernie model. Defines the number of different tokens that can be represented by the
510
- `inputs_ids` passed when calling [`~ErnieModel`] or [`~TFErnieModel`].
511
- hidden_size (`int`, *optional*, defaults to 4096):
512
- Dimension of the hidden representations.
513
- intermediate_size (`int`, *optional*, defaults to 11008):
514
- Dimension of the MLP representations.
515
- num_hidden_layers (`int`, *optional*, defaults to 32):
516
- Number of hidden layers in the Transformer encoder.
517
- num_attention_heads (`int`, *optional*, defaults to 32):
518
- Number of attention heads for each attention layer in the Transformer encoder.
519
- hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
520
- The non-linear activation function (function or string) in the decoder.
521
- initializer_range (`float`, *optional*, defaults to 0.02):
522
- The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
523
- rms_norm_eps (`float`, *optional*, defaults to 1e-12):
524
- The epsilon used by the rms normalization layers.
525
- use_cache (`bool`, *optional*, defaults to `True`):
526
- Whether or not the model should return the last key/values attentions (not used by all models). Only
527
- relevant if `config.is_decoder=True`.
528
- tie_word_embeddings(`bool`, *optional*, defaults to `False`):
529
- Whether to tie weight embeddings
530
- """
531
-
532
- model_type = "ernie4_5_moe_vl"
533
- attribute_map = {
534
- "n_positions": "max_position_embeddings",
535
- "n_embd": "hidden_size",
536
- "n_layer": "num_hidden_layers",
537
- "n_head": "num_attention_heads",
538
- "n_inner": "intermediate_size",
539
- "activation_function": "hidden_act",
540
- }
541
- base_model_tp_plan = {
542
- "model.layers.*.self_attn.q_proj": "colwise_rep",
543
- "model.layers.*.self_attn.k_proj": "colwise_rep",
544
- "model.layers.*.self_attn.v_proj": "colwise_rep",
545
- "model.layers.*.self_attn.o_proj": "rowwise_rep",
546
- "model.layers.*.mlp.experts.*.gate_proj": "colwise",
547
- "model.layers.*.mlp.experts.*.up_proj": "colwise",
548
- "model.layers.*.mlp.experts.*.down_proj": "rowwise",
549
- "model.layers.*.mlp_text.experts.*.gate_proj": "colwise",
550
- "model.layers.*.mlp_text.experts.*.up_proj": "colwise",
551
- "model.layers.*.mlp_text.experts.*.down_proj": "rowwise",
552
- "model.layers.*.mlp.gate_proj": "colwise",
553
- "model.layers.*.mlp.up_proj": "colwise",
554
- "model.layers.*.mlp.down_proj": "rowwise"
555
- }
556
-
557
- def __init__(
558
- self,
559
- vision_config=None,
560
- im_patch_id=None,
561
- pixel_hidden_size=None,
562
- modality_detach=False,
563
- temporal_conv_size=2,
564
- spatial_conv_size=2,
565
- mm_vocab_size=0, # vocab for mm specialtokens
566
- max_text_id=None,
567
- use_temporal_conv=True,
568
- moe_use_size_all2all=False,
569
- moe_num_attn_experts=False,
570
- moe_dense_experts_token_type_id: int = 3,
571
- moe_use_hard_gate: bool = True,
572
- moe_fuse_experts: bool = False,
573
- moe_use_token_type_bias: bool = False,
574
- disable_ffn_model_parallel=False,
575
- fuse_attn_ffn=True,
576
- rope_3d=True,
577
- freq_allocation=20,
578
- using_precision_check=False,
579
- use_recompute_resampler=False,
580
- resampler_fuse_rms_norm=False,
581
- moe_layer_feed_fake_token=False,
582
- tensor_parallel_degree=1,
583
- **kwargs,
584
- ):
585
- super().__init__(**kwargs)
586
- if isinstance(vision_config, dict):
587
- self.vision_config = DFNRopeVisionTransformerConfig(**vision_config)
588
- else:
589
- self.vision_config = DFNRopeVisionTransformerConfig()
590
- self.im_patch_id = im_patch_id
591
- self.pixel_hidden_size = pixel_hidden_size
592
- self.modality_detach = modality_detach
593
- self.temporal_conv_size = temporal_conv_size
594
- self.spatial_conv_size = spatial_conv_size
595
- self.mm_vocab_size = mm_vocab_size
596
- self.max_text_id = max_text_id
597
- self.use_temporal_conv = use_temporal_conv
598
-
599
- self.moe_use_size_all2all = moe_use_size_all2all
600
- self.moe_num_attn_experts = moe_num_attn_experts
601
- self.moe_dense_experts_token_type_id = moe_dense_experts_token_type_id
602
- self.moe_use_hard_gate = moe_use_hard_gate
603
- self.moe_fuse_experts = moe_fuse_experts
604
- self.moe_use_token_type_bias = moe_use_token_type_bias
605
- self.disable_ffn_model_parallel = disable_ffn_model_parallel
606
-
607
- self.fuse_attn_ffn = fuse_attn_ffn
608
- self.rope_3d = rope_3d
609
- self.freq_allocation = freq_allocation
610
- self.using_precision_check = using_precision_check
611
- self.use_recompute_resampler = use_recompute_resampler
612
- self.resampler_fuse_rms_norm = resampler_fuse_rms_norm
613
- self.moe_layer_feed_fake_token = moe_layer_feed_fake_token
614
-
615
- self.tensor_parallel_degree = tensor_parallel_degree
616
-
617
- @property
618
- def multimodel_experts(self) -> bool:
619
- """Check if model is using more than 1 multimodel experts."""
620
- return (
621
- isinstance(self.moe_num_experts, (tuple, list))
622
- and len(self.moe_num_experts) > 1
623
- )
624
-
625
- @property
626
- def use_moe(self) -> bool:
627
- """
628
- Check if model is using MoE architecture.
629
-
630
- Returns:
631
- bool: True if moe_num_experts > 0, False otherwise
632
- """
633
- return (
634
- sum(self.moe_num_experts) > 0
635
- if self.multimodel_experts
636
- else self.moe_num_experts > 0
637
- )
638
-
639
- def to_dict(self, saving_file=False):
640
- """to_dict"""
641
- output = copy.deepcopy(self.__dict__)
642
- if self.vision_config:
643
- output["vision_config"] = (
644
- self.vision_config.to_dict()
645
- if isinstance(self.vision_config, (DFNRopeVisionTransformerConfig))
646
- else self.vision_config
647
- )
648
-
649
- output["model_type"] = self.__class__.model_type
650
- return output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
modeling_ernie4_5_vl.py DELETED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json DELETED
@@ -1,29 +0,0 @@
1
- {
2
- "crop_size": {
3
- "height": 224,
4
- "width": 224
5
- },
6
- "do_center_crop": false,
7
- "do_convert_rgb": true,
8
- "do_normalize": true,
9
- "do_rescale": true,
10
- "do_resize": true,
11
- "image_mean": [
12
- 0.48145466,
13
- 0.4578275,
14
- 0.40821073
15
- ],
16
- "image_std": [
17
- 0.26862954,
18
- 0.26130258,
19
- 0.27577711
20
- ],
21
- "resample": 3,
22
- "rescale_factor": 0.00392156862745098,
23
- "size": {
24
- "height": 224,
25
- "width": 224
26
- },
27
- "min_pixels": 3136,
28
- "max_pixels": 4816896
29
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
processing_ernie4_5_vl.py DELETED
@@ -1,1822 +0,0 @@
1
- # Copyright (c) 2025 Baidu, Inc. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- """Tokenization classes and Image processor class, Processor class for Ernie_45T_VL."""
16
-
17
- import copy
18
- import io
19
- import os
20
- import math
21
- import random
22
- import requests
23
- import base64
24
- import datetime
25
- import hashlib
26
- import threading
27
- import uuid
28
- import decord
29
- from shutil import copyfile
30
- from typing import Any, Dict, List, Optional, Tuple, Union
31
-
32
- import numpy as np
33
- import torch
34
- from PIL import Image, ImageDraw, ImageFont
35
- from PIL.ExifTags import TAGS
36
- from collections import defaultdict
37
- from pathlib import Path
38
- from tempfile import NamedTemporaryFile as ntf
39
-
40
- try:
41
- # moviepy 1.0
42
- import moviepy.editor as mp
43
- except:
44
- # moviepy 2.0
45
- import moviepy as mp
46
-
47
- import sentencepiece as spm
48
- from transformers.tokenization_utils import PreTrainedTokenizer
49
- from transformers.tokenization_utils_base import (
50
- PaddingStrategy,
51
- TextInput,
52
- )
53
- from transformers.utils import TensorType, logging
54
- from transformers.video_utils import VideoInput
55
- from transformers.processing_utils import ProcessorMixin
56
- from transformers.feature_extraction_utils import BatchFeature
57
- from transformers.image_processing_utils import BaseImageProcessor, BatchFeature
58
- from transformers.image_transforms import (
59
- convert_to_rgb,
60
- normalize,
61
- rescale,
62
- resize,
63
- to_channel_dimension_format,
64
- )
65
- from transformers.image_utils import (
66
- OPENAI_CLIP_MEAN,
67
- OPENAI_CLIP_STD,
68
- ChannelDimension,
69
- ImageInput,
70
- PILImageResampling,
71
- get_image_size,
72
- infer_channel_dimension_format,
73
- is_valid_image,
74
- make_list_of_images,
75
- to_numpy_array,
76
- valid_images,
77
- )
78
-
79
- logger = logging.get_logger(__name__)
80
-
81
-
82
- class Ernie4_5_VLTokenizer(PreTrainedTokenizer):
83
- """
84
- Ernie4_5_VLTokenizer
85
- """
86
-
87
- vocab_files_names = {
88
- "vocab_file": "tokenizer.model",
89
- }
90
- # Model input names expected by the tokenizer
91
- model_input_names = ["input_ids", "position_ids", "attention_mask", "labels"]
92
- # Padding side (where to add padding tokens)
93
- padding_side = "right"
94
-
95
- def __init__(
96
- self,
97
- vocab_file,
98
- bos_token="<s>",
99
- cls_token="<cls>",
100
- eos_token="</s>",
101
- mask_token="<mask:0>",
102
- pad_token="<pad>",
103
- sep_token="<sep>",
104
- unk_token="<unk>",
105
- additional_special_tokens=None,
106
- **kwargs,
107
- ):
108
- """
109
- Initialize the Ernie4_5_VLTokenizer
110
-
111
- Args:
112
- vocab_file (str): Path to the tokenizer vocabulary model.
113
- bos_token (str, optional): The beginning of sequence token. Defaults to `"<s>"`.
114
- cls_token (str, optional): The classifier token. Defaults to `"<cls>"`.
115
- eos_token (str, optional): The end of sequence token. Defaults to `"</s>"`.
116
- mask_token (str, optional): The masking token. Defaults to `"<mask:0>"`.
117
- pad_token (str, optional): The padding token. Defaults to `"<pad>"`.
118
- sep_token (str, optional): The separation token. Defaults to `"<sep>"`.
119
- unk_token (str, optional): The unknown tokens symbol. Defaults to `"<unk>"`.
120
- additional_special_tokens (List[str], optional): Additional special tokens to use.
121
- Defaults to `["<mask:1>", "<mask:7>"]`.
122
- **kwargs (dict): Additional keyword arguments passed along to the superclass.
123
- """
124
-
125
- # Store vocabulary file path
126
- self.vocab_file = vocab_file
127
- # Initialize SentencePiece processor
128
- self.sp_model = spm.SentencePieceProcessor()
129
- # Load the vocabulary model
130
- self.sp_model.Load(vocab_file)
131
-
132
- # Set default additional special tokens if none provided
133
- if additional_special_tokens is None:
134
- additional_special_tokens = ["<mask:1>", "<mask:7>"]
135
- super().__init__(
136
- bos_token=bos_token,
137
- cls_token=cls_token,
138
- eos_token=eos_token,
139
- mask_token=mask_token,
140
- pad_token=pad_token,
141
- sep_token=sep_token,
142
- unk_token=unk_token,
143
- additional_special_tokens=additional_special_tokens,
144
- **kwargs,
145
- )
146
-
147
- @property
148
- def space_token(self):
149
- """Return the space token"""
150
- return "<mask:1>"
151
-
152
- @property
153
- def space_token_id(self):
154
- """Return the ID of the space token"""
155
- return self.sp_model.piece_to_id("<mask:1>")
156
-
157
- @property
158
- def gend_token(self):
159
- """Return the gender token"""
160
- return "<mask:7>"
161
-
162
- @property
163
- def gend_token_id(self):
164
- """Return the ID of the gender token"""
165
- return self.sp_model.piece_to_id("<mask:7>")
166
-
167
- @property
168
- def im_start_id(self):
169
- """Return the ID of the image start token"""
170
- return self.sp_model.piece_to_id("<|im_start|>")
171
-
172
- @property
173
- def im_end_id(self):
174
- """Return the ID of the image end token"""
175
- return self.sp_model.piece_to_id("<|im_end|>")
176
-
177
- @property
178
- def vocab_size(self):
179
- """Return the size of the vocabulary"""
180
- return self.sp_model.vocab_size()
181
-
182
- def get_vocab(self):
183
- """Return the vocabulary as a dictionary mapping tokens to IDs"""
184
- vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)}
185
- vocab.update(self.added_tokens_encoder)
186
- return vocab
187
-
188
- def _tokenize(self, text):
189
- """Tokenize the input text into pieces"""
190
- return self.sp_model.encode_as_pieces(text)
191
-
192
- def _convert_token_to_id(self, token):
193
- """Convert a token to its corresponding ID"""
194
- return self.sp_model.piece_to_id(token)
195
-
196
- def _convert_id_to_token(self, id):
197
- """Convert an ID to its corresponding token"""
198
- return self.sp_model.id_to_piece(id)
199
-
200
- def convert_tokens_to_string(self, tokens):
201
- """Convert a sequence of tokens back to a string"""
202
- current_sub_tokens = []
203
- out_string = ""
204
-
205
- for token in tokens:
206
- # Handle special tokens differently
207
- if token in self.all_special_tokens:
208
- out_string += self.sp_model.decode(current_sub_tokens) + token
209
- current_sub_tokens = []
210
- else:
211
- current_sub_tokens.append(token)
212
-
213
- # Add any remaining sub-tokens
214
- out_string += self.sp_model.decode(current_sub_tokens)
215
- return out_string
216
-
217
- def prepare_for_model(self, *args, **kwargs):
218
- """Prepare the tokenized inputs for the model"""
219
- # Remove add_special_tokens if present (not supported)
220
- if "add_special_tokens" in kwargs:
221
- kwargs.pop("add_special_tokens")
222
- return super().prepare_for_model(*args, **kwargs)
223
-
224
- def save_vocabulary(
225
- self, save_directory, filename_prefix: Optional[str] = None
226
- ) -> Tuple[str]:
227
- """
228
- Save the vocabulary and special tokens file to a directory.
229
-
230
- Args:
231
- save_directory (`str`): The directory to save the vocabulary to
232
- filename_prefix (`str`, optional): Prefix to add to the filename
233
-
234
- Returns:
235
- `Tuple(str)`: Paths to the saved files
236
- """
237
- if not os.path.isdir(save_directory):
238
- logger.error(f"Vocabulary path ({save_directory}) should be a directory")
239
- return
240
-
241
- # Construct output vocabulary file path
242
- out_vocab_file = os.path.join(
243
- save_directory,
244
- (filename_prefix + "-" if filename_prefix else "")
245
- + self.vocab_files_names["vocab_file"],
246
- )
247
-
248
- # Copy or create vocabulary file
249
- if os.path.abspath(self.vocab_file) != os.path.abspath(
250
- out_vocab_file
251
- ) and os.path.isfile(self.vocab_file):
252
- copyfile(self.vocab_file, out_vocab_file)
253
- elif not os.path.isfile(self.vocab_file):
254
- with open(out_vocab_file, "wb") as fi:
255
- content_spiece_model = self.sp_model.serialized_model_proto()
256
- fi.write(content_spiece_model)
257
-
258
- return (out_vocab_file,)
259
-
260
- def _decode(self, *args, **kwargs):
261
- """Decode token_id back to text"""
262
- # Remove some parameters that aren't used
263
- kwargs.pop("clean_up_tokenization_spaces", None)
264
- kwargs.pop("spaces_between_special_tokens", None)
265
-
266
- # Call parent decode method with specific parameters
267
- return super()._decode(
268
- *args,
269
- **kwargs,
270
- clean_up_tokenization_spaces=False,
271
- spaces_between_special_tokens=False,
272
- )
273
-
274
- def _pad(
275
- self,
276
- encoded_inputs: Dict,
277
- max_length: Optional[int] = None,
278
- padding_strategy=PaddingStrategy.DO_NOT_PAD,
279
- pad_to_multiple_of: Optional[int] = None,
280
- return_attention_mask: Optional[bool] = None,
281
- **kwargs
282
- ) -> dict:
283
- """Pad the encoded inputs to the specified length"""
284
- if return_attention_mask is None:
285
- return_attention_mask = "attention_mask" in self.model_input_names
286
- if return_attention_mask:
287
- required_input = encoded_inputs[self.model_input_names[0]]
288
- if padding_strategy == PaddingStrategy.LONGEST:
289
- max_length = len(required_input)
290
-
291
- # Adjust max_length if needed for multiple of padding
292
- if (
293
- max_length is not None
294
- and pad_to_multiple_of is not None
295
- and (max_length % pad_to_multiple_of != 0)
296
- ):
297
- max_length = (
298
- (max_length // pad_to_multiple_of) + 1
299
- ) * pad_to_multiple_of
300
-
301
- # Check if padding is needed
302
- needs_to_be_padded = (
303
- padding_strategy != PaddingStrategy.DO_NOT_PAD
304
- and len(required_input) != max_length
305
- )
306
-
307
- # Handle attention mask if present
308
- if (
309
- "attention_mask" in encoded_inputs
310
- and encoded_inputs["attention_mask"] is not None
311
- ):
312
- attention_mask = encoded_inputs.pop("attention_mask")
313
- if isinstance(attention_mask, torch.Tensor):
314
- attention_mask = attention_mask.numpy()
315
- elif isinstance(attention_mask, list):
316
- attention_mask = np.array(attention_mask)
317
- elif not isinstance(attention_mask, np.ndarray):
318
- raise ValueError(
319
- f"Unexpected type {type(attention_mask)} of attention_mask, "
320
- )
321
- else:
322
- # Create default attention mask if none provided
323
- attention_mask = np.tril(
324
- np.ones((len(required_input), len(required_input)), dtype=np.int64)
325
- )
326
- attention_mask = np.expand_dims(attention_mask, axis=0)
327
-
328
- # Perform padding if needed
329
- if needs_to_be_padded:
330
- difference = max_length - len(required_input)
331
- if self.padding_side == "right":
332
- if attention_mask.ndim == 1:
333
- pad_width = [(0, difference)]
334
- else:
335
- pad_width = [(0, 0), (0, difference), (0, difference)]
336
- elif self.padding_side == "left":
337
- if attention_mask.ndim == 1:
338
- pad_width = [(difference, 0)]
339
- else:
340
- pad_width = [(0, 0), (difference, 0), (difference, 0)]
341
- else:
342
- raise ValueError(
343
- "Invalid padding strategy:" + str(self.padding_side)
344
- )
345
-
346
- attention_mask = np.pad(
347
- attention_mask,
348
- pad_width=pad_width,
349
- mode="constant",
350
- constant_values=0,
351
- )
352
-
353
- # Call parent padding method
354
- encoded_inputs = super()._pad(
355
- encoded_inputs,
356
- max_length,
357
- padding_strategy=padding_strategy,
358
- pad_to_multiple_of=pad_to_multiple_of,
359
- return_attention_mask=False,
360
- )
361
-
362
- # Add attention mask back if needed
363
- if return_attention_mask:
364
- encoded_inputs["attention_mask"] = attention_mask.tolist()
365
-
366
- return encoded_inputs
367
-
368
-
369
- def round_by_factor(number: int, factor: int) -> int:
370
- """Returns the closest integer to 'number' that is divisible by 'factor'."""
371
- return round(number / factor) * factor
372
-
373
-
374
- def ceil_by_factor(number: int, factor: int) -> int:
375
- """Returns the smallest integer greater than or equal to 'number' that is divisible by 'factor'."""
376
- return math.ceil(number / factor) * factor
377
-
378
-
379
- def floor_by_factor(number: int, factor: int) -> int:
380
- """Returns the largest integer less than or equal to 'number' that is divisible by 'factor'."""
381
- return math.floor(number / factor) * factor
382
-
383
-
384
- def smart_resize(
385
- height: int,
386
- width: int,
387
- factor: int = 28,
388
- min_pixels: int = 4 * 28 * 28,
389
- max_pixels: int = 16384 * 28 * 28,
390
- ):
391
- """
392
- Rescales the image so that the following conditions are met:
393
-
394
- 1. Both dimensions (height and width) are divisible by 'factor'.
395
-
396
- 2. The total number of pixels is within the range ['min_pixels', 'max_pixels'].
397
-
398
- 3. The aspect ratio of the image is maintained as closely as possible.
399
- """
400
- MAX_RATIO = 200
401
- if max(height, width) / min(height, width) > MAX_RATIO:
402
- if height > width:
403
- new_width = max(factor, round_by_factor(width, factor))
404
- new_height = floor_by_factor(new_width * MAX_RATIO, factor)
405
- else:
406
- new_height = max(factor, round_by_factor(height, factor))
407
- new_width = floor_by_factor(new_height * MAX_RATIO, factor)
408
-
409
- logger.info(
410
- f"absolute aspect ratio must be smaller than {MAX_RATIO}, got {max(height, width) / min(height, width)},\
411
- resize to {max(new_height, new_width) / min(new_height, new_width)}"
412
- )
413
-
414
- height = new_height
415
- width = new_width
416
-
417
- h_bar = max(factor, round_by_factor(height, factor))
418
- w_bar = max(factor, round_by_factor(width, factor))
419
- if h_bar * w_bar > max_pixels:
420
- beta = math.sqrt((height * width) / max_pixels)
421
- h_bar = floor_by_factor(height / beta, factor)
422
- w_bar = floor_by_factor(width / beta, factor)
423
- elif h_bar * w_bar < min_pixels:
424
- beta = math.sqrt(min_pixels / (height * width))
425
- h_bar = ceil_by_factor(height * beta, factor)
426
- w_bar = ceil_by_factor(width * beta, factor)
427
-
428
- if min_pixels > h_bar * w_bar or h_bar * w_bar > max_pixels:
429
- raise ValueError(f"encounter invalid h_bar: {h_bar}, w_bar: {w_bar}")
430
-
431
- return h_bar, w_bar
432
-
433
-
434
- def is_scaled_image(image: np.ndarray) -> bool:
435
- """
436
- Checks to see whether the pixel values have already been rescaled to [0, 1].
437
- """
438
- if image.dtype == np.uint8:
439
- return False
440
-
441
- # It's possible the image has pixel values in [0, 255] but is of floating type
442
- return np.min(image) >= 0 and np.max(image) <= 1
443
-
444
-
445
- def make_batched_images(images) -> List[List[ImageInput]]:
446
- """
447
- Accepts images in list or nested list format, and makes a list of images for preprocessing.
448
-
449
- Args:
450
- images (`Union[List[List[ImageInput]], List[ImageInput], ImageInput]`):
451
- The input image.
452
-
453
- Returns:
454
- list: A list of images.
455
- """
456
- if (
457
- isinstance(images, (list, tuple))
458
- and isinstance(images[0], (list, tuple))
459
- and is_valid_image(images[0][0])
460
- ):
461
- return [img for img_list in images for img in img_list]
462
-
463
- elif isinstance(images, (list, tuple)) and is_valid_image(images[0]):
464
- return images
465
-
466
- elif is_valid_image(images):
467
- return [images]
468
-
469
- raise ValueError(f"Could not make batched images from {images}")
470
-
471
-
472
- # Copied from transformers.models.llava_next_video.image_processing_llava_next_video.make_batched_videos
473
- def make_batched_videos(videos) -> List[VideoInput]:
474
- """dummy"""
475
- if (
476
- isinstance(videos, (list, tuple))
477
- and isinstance(videos[0], (list, tuple))
478
- and is_valid_image(videos[0][0])
479
- ):
480
- return videos
481
-
482
- elif isinstance(videos, (list, tuple)) and is_valid_image(videos[0]):
483
- if isinstance(videos[0], Image.Image):
484
- return [videos]
485
- elif len(videos[0].shape) == 4:
486
- return [list(video) for video in videos]
487
-
488
- elif is_valid_image(videos) and len(videos.shape) == 4:
489
- return [list(videos)]
490
-
491
- raise ValueError(f"Could not make batched video from {videos}")
492
-
493
-
494
- class Ernie4_5_VLImageProcessor(BaseImageProcessor):
495
- r"""
496
- Constructs a adaptive image processor that dynamically resizes images based on the original images.
497
-
498
- Args:
499
- do_resize (`bool`, *optional*, defaults to `True`):
500
- Whether to resize the image's (height, width) dimensions.
501
- resample (`PILImageResampling`, *optional*, defaults to `Resampling.BICUBIC`):
502
- Resampling filter to use when resizing the image.
503
- do_rescale (`bool`, *optional*, defaults to `True`):
504
- Whether to rescale the image by the specified scale `rescale_factor`.
505
- rescale_factor (`int` or `float`, *optional*, defaults to `1/255`):
506
- Scale factor to use if rescaling the image.
507
- do_normalize (`bool`, *optional*, defaults to `True`):
508
- Whether to normalize the image.
509
- image_mean (`float` or `List[float]`, *optional*, defaults to `[0.48145466, 0.4578275, 0.40821073]`):
510
- Mean to use if normalizing the image. This is a float or list of floats for each channel in the image.
511
- image_std (`float` or `List[float]`, *optional*, defaults to `[0.26862954, 0.26130258, 0.27577711]`):
512
- Standard deviation to use if normalizing the image. This is a float or list of floats for each channel
513
- in the image.
514
- do_convert_rgb (`bool`, *optional*, defaults to `True`):
515
- Whether to convert the image to RGB.
516
- min_pixels (`int`, *optional*, defaults to `56 * 56`):
517
- The min pixels of the image to resize the image.
518
- max_pixels (`int`, *optional*, defaults to `28 * 28 * 1280`):
519
- The max pixels of the image to resize the image.
520
- patch_size (`int`, *optional*, defaults to 14):
521
- The spacial patch size of the vision encoder.
522
- temporal_conv_size (`int`, *optional*, defaults to 2):
523
- The temporal conv size in resampler.
524
- merge_size (`int`, *optional*, defaults to 2):
525
- The merge size of the vision encoder to llm encoder.
526
- """
527
-
528
- model_input_names = [
529
- "pixel_values",
530
- "image_grid_thw",
531
- "pixel_values_videos",
532
- "video_grid_thw",
533
- ]
534
-
535
- def __init__(
536
- self,
537
- do_resize: bool = True,
538
- resample: PILImageResampling = PILImageResampling.BICUBIC,
539
- do_rescale: bool = True,
540
- rescale_factor: Union[float, List[float]] = 1 / 255,
541
- do_normalize: bool = True,
542
- image_mean: Optional[Union[float, List[float]]] = None,
543
- image_std: Optional[Union[float, List[float]]] = None,
544
- do_convert_rgb: bool = True,
545
- min_pixels: int = 56 * 56,
546
- max_pixels: int = 28 * 28 * 1280,
547
- patch_size: int = 14,
548
- temporal_conv_size: int = 2,
549
- merge_size: int = 2,
550
- **kwargs,
551
- ) -> None:
552
- """init"""
553
- super().__init__(**kwargs)
554
- self.do_resize = do_resize
555
- self.resample = resample
556
- self.do_rescale = do_rescale
557
- self.rescale_factor = rescale_factor
558
- self.do_normalize = do_normalize
559
- self.image_mean = image_mean if image_mean is not None else OPENAI_CLIP_MEAN
560
- self.image_std = image_std if image_std is not None else OPENAI_CLIP_STD
561
- self.min_pixels = min_pixels
562
- self.max_pixels = max_pixels
563
- self.patch_size = patch_size
564
- self.temporal_conv_size = temporal_conv_size
565
- self.merge_size = merge_size
566
- self.size = {"min_pixels": min_pixels, "max_pixels": max_pixels}
567
- self.do_convert_rgb = do_convert_rgb
568
-
569
- def set_pixels(self, min_pixels=None, max_pixels=None, msg=""):
570
- """set_pixels"""
571
- if min_pixels is not None:
572
- assert (
573
- isinstance(min_pixels, int) and min_pixels >= 0
574
- ), "min_pixels must be positive int"
575
- logger.info(
576
- f"{msg} Ernie4_5_VLImageProcessor set min_pixels = {min_pixels}"
577
- )
578
- self.min_pixels = min_pixels
579
- self.size["min_pixels"] = int(min_pixels)
580
- if max_pixels is not None:
581
- assert (
582
- isinstance(max_pixels, int) and max_pixels > 0
583
- ), "max_pixels must be positive int"
584
- logger.info(
585
- f"{msg} Ernie4_5_VLImageProcessor set max_pixels = {max_pixels}"
586
- )
587
- self.max_pixels = max_pixels
588
- self.size["max_pixels"] = int(max_pixels)
589
-
590
- def get_smarted_resize(self, height, width, min_pixels=None, max_pixels=None):
591
- """dummy"""
592
- actual_min_pixels = min_pixels if min_pixels is not None else self.min_pixels
593
- actual_max_pixels = max_pixels if max_pixels is not None else self.max_pixels
594
- resized_height, resized_width = smart_resize(
595
- height,
596
- width,
597
- factor=self.patch_size * self.merge_size,
598
- min_pixels=actual_min_pixels,
599
- max_pixels=actual_max_pixels,
600
- )
601
- return (resized_height, resized_width), (
602
- resized_height // self.patch_size,
603
- resized_width // self.patch_size,
604
- )
605
-
606
- def _preprocess(
607
- self,
608
- images: Union[ImageInput, VideoInput],
609
- do_resize: bool = True,
610
- resample: PILImageResampling = None,
611
- do_rescale: bool = True,
612
- rescale_factor: float = 1 / 255,
613
- do_normalize: bool = True,
614
- image_mean: Optional[Union[float, List[float]]] = None,
615
- image_std: Optional[Union[float, List[float]]] = None,
616
- do_convert_rgb: bool = False,
617
- data_format: Optional[ChannelDimension] = ChannelDimension.FIRST,
618
- input_data_format: Optional[Union[str, ChannelDimension]] = None,
619
- predetermined_grid_thw=None,
620
- ):
621
- """
622
- Preprocess an image or batch of images. Copy of the `preprocess` method from `CLIPImageProcessor`.
623
-
624
- Args:
625
- images (`ImageInput` or `VideoInput`):
626
- Image or batch of images to preprocess. Expects pixel values ranging from 0 to 255.
627
- If pixel values range from 0 to 1, set `do_rescale=False`.
628
- do_resize (`bool`, *optional*, defaults to `self.do_resize`):
629
- Whether to resize the image.
630
- resample (`PILImageResampling`, *optional*, defaults to `self.resample`):
631
- Resampling filter to use if resizing the image. This can be one of the `PILImageResampling` enums.
632
- do_rescale (`bool`, *optional*, defaults to `self.do_rescale`):
633
- Whether to rescale the image.
634
- rescale_factor (`float`, *optional*, defaults to `self.rescale_factor`):
635
- Scale factor to use if rescaling the image.
636
- do_normalize (`bool`, *optional*, defaults to `self.do_normalize`):
637
- Whether to normalize the image.
638
- image_mean (`float` or `List[float]`, *optional*, defaults to `self.image_mean`):
639
- Mean to use if normalizing the image.
640
- Can be a float or a list of floats corresponding to the number of channels in the image.
641
- image_std (`float` or `List[float]`, *optional*, defaults to `self.image_std`):
642
- Standard deviation to use if normalizing the image.
643
- Can be a float or a list of floats corresponding to the number of channels in the image.
644
- do_convert_rgb (`bool`, *optional*, defaults to `self.do_convert_rgb`):
645
- Whether to convert the image to RGB.
646
- data_format (`ChannelDimension`, *optional*, defaults to `ChannelDimension.FIRST`):
647
- The channel dimension format for the output image. Can be one of:
648
- - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
649
- - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
650
- - Unset: Use the channel dimension format of the input image.
651
- input_data_format (`ChannelDimension` or `str`, *optional*):
652
- The channel dimension format for the input image. Can be one of:
653
- - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
654
- - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
655
- - `"none"` or `ChannelDimension.NONE`: image in (height, width) format.
656
- - `"none"` or `ChannelDimension.NONE`: image in (height, width) format.
657
- """
658
- images = make_list_of_images(images)
659
-
660
- if do_convert_rgb:
661
- images = [convert_to_rgb(image) for image in images]
662
-
663
- # All transformations expect numpy arrays.
664
- images = [to_numpy_array(image) for image in images]
665
-
666
- if is_scaled_image(images[0]) and do_rescale:
667
- logger.warning_once(
668
- "It looks like you are trying to rescale already rescaled images. If the input"
669
- " images have pixel values between 0 and 1, set `do_rescale=False` to avoid rescaling them again."
670
- )
671
- if input_data_format is None:
672
- # We assume that all images have the same channel dimension format.
673
- input_data_format = infer_channel_dimension_format(images[0])
674
-
675
- height, width = get_image_size(images[0], channel_dim=input_data_format)
676
- resized_height, resized_width = height, width
677
- processed_images = []
678
-
679
- if predetermined_grid_thw is not None:
680
- assert len(predetermined_grid_thw) == len(
681
- images
682
- ), f"len(predetermined_grid_thw) {len(predetermined_grid_thw)} == len(images) {len(images)}"
683
-
684
- for img_idx, image in enumerate(images):
685
- if do_resize:
686
- if predetermined_grid_thw is not None:
687
- (resized_height, resized_width) = predetermined_grid_thw[img_idx]
688
- resized_height *= self.patch_size
689
- resized_width *= self.patch_size
690
- else:
691
- resized_height, resized_width = smart_resize(
692
- height,
693
- width,
694
- factor=self.patch_size * self.merge_size,
695
- min_pixels=self.min_pixels,
696
- max_pixels=self.max_pixels,
697
- )
698
-
699
- image = resize(
700
- image,
701
- size=(resized_height, resized_width),
702
- resample=resample,
703
- data_format=input_data_format,
704
- )
705
- if do_rescale:
706
- image = rescale(
707
- image, scale=rescale_factor, data_format=input_data_format
708
- )
709
-
710
- if do_normalize:
711
- image = normalize(
712
- image=image,
713
- mean=image_mean,
714
- std=image_std,
715
- data_format=input_data_format,
716
- )
717
-
718
- image = to_channel_dimension_format(
719
- image, data_format, input_channel_dim=input_data_format
720
- ) # [C, H, W]
721
-
722
- processed_images.append(image)
723
- patches = np.array(processed_images)
724
- if data_format == ChannelDimension.LAST:
725
- patches = patches.transpose([0, 3, 1, 2])
726
-
727
- channel = patches.shape[1] # [time, C, H, W]
728
- grid_t = patches.shape[0]
729
- grid_h, grid_w = (
730
- resized_height // self.patch_size,
731
- resized_width // self.patch_size,
732
- )
733
- patches = patches.reshape(
734
- [
735
- grid_t,
736
- channel,
737
- grid_h // self.merge_size,
738
- self.merge_size,
739
- self.patch_size,
740
- grid_w // self.merge_size,
741
- self.merge_size,
742
- self.patch_size,
743
- ]
744
- )
745
- # [grid_t, grid_h/merge_size, grid_w/merge_size, merge_size, merge_size, C, psz, psz]
746
- patches = patches.transpose([0, 2, 5, 3, 6, 1, 4, 7])
747
-
748
- flatten_patches = patches.reshape(
749
- [grid_t * grid_h * grid_w, channel * self.patch_size * self.patch_size]
750
- ) # [grid_t * grid_h * grid_w, C * psz * psz]
751
-
752
- return flatten_patches, (grid_t, grid_h, grid_w)
753
-
754
- def preprocess(
755
- self,
756
- images: ImageInput,
757
- videos: VideoInput = None,
758
- do_resize: bool = True,
759
- size: Optional[Union[int, List[int]]] = None,
760
- resample: PILImageResampling = None,
761
- do_rescale: bool = True,
762
- rescale_factor: float = 1 / 255,
763
- do_normalize: bool = True,
764
- image_mean: Optional[Union[float, List[float]]] = None,
765
- image_std: Optional[Union[float, List[float]]] = None,
766
- do_convert_rgb: bool = False,
767
- return_tensors: Optional[Union[str, TensorType]] = None,
768
- data_format: Optional[ChannelDimension] = ChannelDimension.FIRST,
769
- input_data_format: Optional[Union[str, ChannelDimension]] = None,
770
- predetermined_grid_thw=None,
771
- ):
772
- """
773
- Args:
774
- images (`ImageInput`):
775
- Image to preprocess. Expects a single or batch of images with pixel values ranging from 0 to 255. If
776
- passing in images with pixel values between 0 and 1, set `do_rescale=False`.
777
- videos (`VideoInput`):
778
- Video to preprocess. Expects a single or batch of videos with pixel values ranging from 0 to 255. If
779
- passing in videos with pixel values between 0 and 1, set `do_rescale=False`.
780
- do_resize (`bool`, *optional*, defaults to `self.do_resize`):
781
- Whether to resize the image.
782
- size (`Dict[str, int]`, *optional*, defaults to `self.size`):
783
- Size of the image after resizing. Shortest edge of the image is resized to size["shortest_edge"], with
784
- the longest edge resized to keep the input aspect ratio.
785
- resample (`int`, *optional*, defaults to `self.resample`):
786
- Resampling filter to use if resizing the image. This can be one of the enum `PILImageResampling`. Only
787
- has an effect if `do_resize` is set to `True`.
788
- do_rescale (`bool`, *optional*, defaults to `self.do_rescale`):
789
- Whether to rescale the image.
790
- rescale_factor (`float`, *optional*, defaults to `self.rescale_factor`):
791
- Rescale factor to rescale the image by if `do_rescale` is set to `True`.
792
- do_normalize (`bool`, *optional*, defaults to `self.do_normalize`):
793
- Whether to normalize the image.
794
- image_mean (`float` or `List[float]`, *optional*, defaults to `self.image_mean`):
795
- Image mean to use for normalization. Only has an effect if `do_normalize` is set to `True`.
796
- image_std (`float` or `List[float]`, *optional*, defaults to `self.image_std`):
797
- Image standard deviation to use for normalization. Only has an effect if `do_normalize` is set to
798
- `True`.
799
- do_convert_rgb (`bool`, *optional*, defaults to `self.do_convert_rgb`):
800
- Whether to convert the image to RGB.
801
- return_tensors (`str` or `TensorType`, *optional*):
802
- The type of tensors to return. Can be one of:
803
- - Unset: Return a list of `np.ndarray`.
804
- - `TensorType.PYTORCH` or `'pt'`: Return a batch of type `torch.Tensor`.
805
- - `TensorType.NUMPY` or `'np'`: Return a batch of type `np.ndarray`.
806
- data_format (`ChannelDimension` or `str`, *optional*, defaults to `ChannelDimension.FIRST`):
807
- The channel dimension format for the output image. Can be one of:
808
- - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
809
- - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
810
- - Unset: Use the channel dimension format of the input image.
811
- input_data_format (`ChannelDimension` or `str`, *optional*):
812
- The channel dimension format for the input image. If unset, the channel dimension format is inferred
813
- from the input image. Can be one of:
814
- - `"channels_first"` or `ChannelDimension.FIRST`: image in (num_channels, height, width) format.
815
- - `"channels_last"` or `ChannelDimension.LAST`: image in (height, width, num_channels) format.
816
- - `"none"` or `ChannelDimension.NONE`: image in (height, width) format.
817
-
818
- """
819
- do_resize = do_resize if do_resize is not None else self.do_resize
820
- size = size if size is not None else self.size
821
- resample = resample if resample is not None else self.resample
822
- do_rescale = do_rescale if do_rescale is not None else self.do_rescale
823
- rescale_factor = (
824
- rescale_factor if rescale_factor is not None else self.rescale_factor
825
- )
826
- do_normalize = do_normalize if do_normalize is not None else self.do_normalize
827
- image_mean = image_mean if image_mean is not None else self.image_mean
828
- image_std = image_std if image_std is not None else self.image_std
829
- do_convert_rgb = (
830
- do_convert_rgb if do_convert_rgb is not None else self.do_convert_rgb
831
- )
832
-
833
- if images is not None:
834
- images = make_batched_images(images)
835
-
836
- if images is not None and not valid_images(images):
837
- raise ValueError(
838
- "Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, "
839
- "torch.Tensor."
840
- )
841
-
842
- data = {}
843
- if images is not None:
844
- pixel_values, vision_grid_thws = [], []
845
- for img_idx, image in enumerate(images):
846
- if predetermined_grid_thw is not None:
847
- predetermined_grid_thw_one = [predetermined_grid_thw[img_idx]]
848
- else:
849
- predetermined_grid_thw_one = None
850
- patches, image_grid_thw = self._preprocess(
851
- image,
852
- do_resize=do_resize,
853
- resample=resample,
854
- do_rescale=do_rescale,
855
- rescale_factor=rescale_factor,
856
- do_normalize=do_normalize,
857
- image_mean=image_mean,
858
- image_std=image_std,
859
- data_format=data_format,
860
- do_convert_rgb=do_convert_rgb,
861
- input_data_format=input_data_format,
862
- predetermined_grid_thw=predetermined_grid_thw_one,
863
- )
864
- pixel_values.extend(patches)
865
- vision_grid_thws.append(image_grid_thw)
866
- pixel_values = np.array(pixel_values)
867
- vision_grid_thws = np.array(vision_grid_thws)
868
- data.update(
869
- {"pixel_values": pixel_values, "image_grid_thw": vision_grid_thws}
870
- )
871
-
872
- if videos is not None:
873
- videos = make_batched_videos(videos)
874
- pixel_values, vision_grid_thws = [], []
875
- for images in videos:
876
- patches, video_grid_thw = self._preprocess(
877
- images,
878
- do_resize=do_resize,
879
- resample=resample,
880
- do_rescale=do_rescale,
881
- rescale_factor=rescale_factor,
882
- do_normalize=do_normalize,
883
- image_mean=image_mean,
884
- image_std=image_std,
885
- data_format=data_format,
886
- do_convert_rgb=do_convert_rgb,
887
- input_data_format=input_data_format,
888
- predetermined_grid_thw=predetermined_grid_thw,
889
- )
890
- pixel_values.extend(patches)
891
- vision_grid_thws.append(video_grid_thw)
892
- pixel_values = np.array(pixel_values)
893
- vision_grid_thws = np.array(vision_grid_thws)
894
-
895
- data.update(
896
- {
897
- "pixel_values_videos": pixel_values,
898
- "video_grid_thw": vision_grid_thws,
899
- }
900
- )
901
-
902
- return BatchFeature(data=data, tensor_type=return_tensors)
903
-
904
-
905
- RAW_VIDEO_DIR = "./download_tmp/raw_video/"
906
- RAW_IMAGE_DIR = "./download_tmp/raw_images/"
907
- EXTRACTED_FRAME_DIR = "./download_tmp/extracted_frames/"
908
- TMP_DIR = "./download_tmp/upload_tmp/"
909
-
910
- FONT_PATH = os.path.join(Path(__file__).parent.absolute(), "Roboto-Regular.ttf")
911
- if not os.path.exists(FONT_PATH):
912
- ttf = requests.get("https://paddlenlp.bj.bcebos.com/vision-language-models/materials/Roboto-Regular.ttf")
913
- open(FONT_PATH, "wb").write(ttf.content)
914
-
915
-
916
- def is_gif(data: bytes) -> bool:
917
- """
918
- check if a bytes is a gif based on the magic head
919
- """
920
- return data[:6] in (b"GIF87a", b"GIF89a")
921
-
922
-
923
- class VideoReaderWrapper(decord.VideoReader):
924
- """
925
- Solving memory leak bug
926
-
927
- https://github.com/dmlc/decord/issues/208
928
- """
929
-
930
- def __init__(self, video_path, *args, **kwargs):
931
- with ntf(delete=True, suffix=".gif") as gif_file:
932
- gif_input = None
933
- self.original_file = None
934
- if isinstance(video_path, str):
935
- self.original_file = video_path
936
- if video_path.lower().endswith(".gif"):
937
- gif_input = video_path
938
- elif isinstance(video_path, bytes):
939
- if is_gif(video_path):
940
- gif_file.write(video_path)
941
- gif_input = gif_file.name
942
- elif isinstance(video_path, io.BytesIO):
943
- video_path.seek(0)
944
- tmp_bytes = video_path.read()
945
- video_path.seek(0)
946
- if is_gif(tmp_bytes):
947
- gif_file.write(tmp_bytes)
948
- gif_input = gif_file.name
949
-
950
- if gif_input is not None:
951
- clip = mp.VideoFileClip(gif_input)
952
- mp4_file = ntf(delete=False, suffix=".mp4")
953
- clip.write_videofile(mp4_file.name, verbose=False, logger=None)
954
- clip.close()
955
- video_path = mp4_file.name
956
- self.original_file = video_path
957
-
958
- super().__init__(video_path, *args, **kwargs)
959
- self.seek(0)
960
-
961
- def __getitem__(self, key):
962
- frames = super().__getitem__(key)
963
- self.seek(0)
964
- return frames
965
-
966
- def __del__(self):
967
- if self.original_file and os.path.exists(self.original_file):
968
- os.remove(self.original_file)
969
-
970
-
971
- def get_filename(url=None):
972
- """
973
- Get Filename
974
- """
975
- if url is None:
976
- return str(uuid.uuid4()).replace("-", "")
977
- t = datetime.datetime.now()
978
- if not isinstance(url, bytes):
979
- url = url.encode("utf-8")
980
-
981
- md5_hash = hashlib.md5(url).hexdigest()
982
- pid = os.getpid()
983
- tid = threading.get_ident()
984
-
985
- # Remove the suffix to prevent save-jpg from reporting errors
986
- image_filname = f"{t.year}-{t.month:02d}-{t.day:02d}-{pid}-{tid}-{md5_hash}"
987
- return image_filname
988
-
989
-
990
- def file_download(url, download_dir, save_to_disk=False, retry=0, retry_interval=3):
991
- """
992
- Description: Download url, if url is PIL, return directly
993
- Args:
994
- url(str, PIL): http/local path/io.Bytes, note that io.Bytes is the image byte stream
995
- download_path: when save_to_disk=True, return the saved address
996
- save_to_disk: whether to save in the local path
997
- """
998
-
999
- if isinstance(url, Image.Image):
1000
- return url
1001
- elif isinstance(url, VideoReaderWrapper):
1002
- return url
1003
- elif url.startswith("http"):
1004
- response = requests.get(url)
1005
- bytes_data = response.content
1006
- elif os.path.isfile(url):
1007
- if save_to_disk:
1008
- return url
1009
- bytes_data = open(url, "rb").read()
1010
- else:
1011
- bytes_data = base64.b64decode(url)
1012
- if not save_to_disk:
1013
- return bytes_data
1014
-
1015
- download_path = os.path.join(download_dir, get_filename(url))
1016
- Path(download_path).parent.mkdir(parents=True, exist_ok=True)
1017
- with open(download_path, "wb") as f:
1018
- f.write(bytes_data)
1019
- return download_path
1020
-
1021
-
1022
- def get_downloadable(
1023
- url, download_dir=RAW_VIDEO_DIR, save_to_disk=False, retry=0, retry_interval=3
1024
- ):
1025
- """download video and store it in the disk
1026
-
1027
- return downloaded **path** if save_to_disk is set to true
1028
- return downloaded **bytes** if save_to_disk is set to false
1029
- """
1030
-
1031
- if not os.path.exists(download_dir):
1032
- os.makedirs(download_dir)
1033
- downloaded_path = file_download(
1034
- url,
1035
- download_dir,
1036
- save_to_disk=save_to_disk,
1037
- retry=retry,
1038
- retry_interval=retry_interval,
1039
- )
1040
- return downloaded_path
1041
-
1042
-
1043
- def get_downloadable_image(
1044
- download_path, need_exif_info, retry_max_time=0, retry_interval=3
1045
- ):
1046
- """
1047
- Get downloadable with exif info and image processing
1048
- """
1049
-
1050
- def get_image_exif(image):
1051
- exif_data = image._getexif()
1052
- exif_info = {}
1053
- if exif_data is not None:
1054
- for tag, value in exif_data.items():
1055
- tag_name = TAGS.get(tag, tag)
1056
- exif_info[tag_name] = value.strip()
1057
- return exif_info
1058
-
1059
- def has_transparent_background(img):
1060
- """has_transparent_background"""
1061
- if img.mode in ("RGBA", "LA") or (
1062
- img.mode == "P" and "transparency" in img.info
1063
- ):
1064
- # Check for any pixel with alpha channel less than 255 (fully opaque)
1065
- alpha = img.convert("RGBA").split()[-1]
1066
- if alpha.getextrema()[0] < 255:
1067
- return True
1068
- return False
1069
-
1070
- def add_white_background(img):
1071
- """
1072
- Add a white background to a transparent background image
1073
- """
1074
- if img.mode != "RGBA":
1075
- img = img.convert("RGBA")
1076
- # Create an image with a white background and the same size as the original image
1077
- img_white_background = Image.new("RGBA", img.size, (255, 255, 255))
1078
-
1079
- # Paste the original image onto a white background
1080
- img_white_background.paste(img, (0, 0), img)
1081
-
1082
- return img_white_background
1083
-
1084
- def change_I16_to_L(img):
1085
- """
1086
- Convert image from I;16 mode to L mode
1087
- """
1088
- # Since the point function in I mode only supports addition, subtraction, and multiplication,
1089
- # the following * (1 / 256) cannot be changed to division.
1090
- return img.point(lambda i: i * (1 / 256)).convert("L")
1091
-
1092
- image = get_downloadable(
1093
- download_path,
1094
- save_to_disk=False,
1095
- retry=retry_max_time,
1096
- retry_interval=retry_interval,
1097
- )
1098
- if isinstance(image, Image.Image):
1099
- pil_image = image
1100
- else:
1101
- pil_image = Image.open(io.BytesIO(image))
1102
- if need_exif_info:
1103
- try:
1104
- exif_info = get_image_exif(pil_image)
1105
- except Exception as why:
1106
- exif_info = {}
1107
- else:
1108
- exif_info = {}
1109
-
1110
- try:
1111
- if pil_image.mode == "I;16":
1112
- pil_image = change_I16_to_L(pil_image)
1113
- if has_transparent_background(pil_image):
1114
- pil_image = add_white_background(pil_image)
1115
- except Exception as e:
1116
- pass
1117
-
1118
- return pil_image.convert("RGB"), exif_info
1119
-
1120
-
1121
- def read_video_decord(video_path, save_to_disk):
1122
- """get reader and meta by decord"""
1123
- video_path = get_downloadable(video_path, save_to_disk=save_to_disk)
1124
- if isinstance(video_path, VideoReaderWrapper):
1125
- video_reader = video_path
1126
- else:
1127
- if isinstance(video_path, bytes):
1128
- video_path = io.BytesIO(video_path)
1129
- video_reader = VideoReaderWrapper(video_path, num_threads=1)
1130
- vlen = len(video_reader)
1131
- fps = video_reader.get_avg_fps()
1132
- duration = vlen / float(fps)
1133
-
1134
- video_meta = {"fps": fps, "duration": duration, "num_of_frame": vlen}
1135
-
1136
- return video_reader, video_meta, video_path
1137
-
1138
-
1139
- def get_frame_indices(
1140
- vlen,
1141
- target_frames=-1,
1142
- target_fps=-1,
1143
- frames_sample="middle",
1144
- fix_start=None,
1145
- input_fps=-1,
1146
- ):
1147
- """get_frame_indices"""
1148
- assert frames_sample in ["rand", "middle", "leading"]
1149
- if target_frames > 0:
1150
- assert target_fps <= 0, "target_fps must be negative if target_frames is given."
1151
- if target_frames > vlen:
1152
- acc_samples = vlen
1153
- logger.info(
1154
- f"target_frames={target_frames} is larger than video length {vlen}, "
1155
- f"will sample {acc_samples} frames."
1156
- )
1157
- else:
1158
- acc_samples = target_frames
1159
- logger.debug(
1160
- f"sampling at target_frames={target_frames}, frames_sample={frames_sample}"
1161
- )
1162
-
1163
- # split the video into `acc_samples` intervals, and sample from each interval.
1164
- intervals = np.linspace(start=0, stop=vlen, num=acc_samples + 1).astype(int)
1165
- ranges = []
1166
- for idx, interv in enumerate(intervals[:-1]):
1167
- ranges.append((interv, intervals[idx + 1] - 1))
1168
- if frames_sample == "rand":
1169
- try:
1170
- frame_indices = [random.choice(range(x[0], x[1])) for x in ranges]
1171
- except Exception as e:
1172
- frame_indices = np.random.permutation(vlen)[:acc_samples]
1173
- frame_indices.sort()
1174
- frame_indices = list(frame_indices)
1175
- elif fix_start is not None:
1176
- frame_indices = [x[0] + fix_start for x in ranges]
1177
- elif frames_sample == "leading":
1178
- frame_indices = [x[0] for x in ranges]
1179
- elif frames_sample == "middle":
1180
- frame_indices = [(x[0] + x[1]) // 2 for x in ranges]
1181
- else:
1182
- raise NotImplementedError
1183
-
1184
- elif target_fps > 0:
1185
- assert (
1186
- target_frames <= 0
1187
- ), "target_frames must be negative if target_fps is given."
1188
- assert input_fps > 0, "input_fps must be provided if target_fps is given."
1189
- logger.info(f"sampling at fps={target_fps}, frames_sample={frames_sample}")
1190
- duration = float(vlen) / input_fps
1191
- delta = (
1192
- 1 / target_fps
1193
- ) # gap between frames, this is also the clip length each frame represents
1194
- if frames_sample == "middle":
1195
- frame_seconds = np.arange(0 + delta / 2, duration + delta / 2, delta)
1196
- elif frames_sample == "leading":
1197
- frame_seconds = np.arange(0, duration, delta)
1198
- if frames_sample == "rand":
1199
- frame_seconds = np.arange(0 + delta / 2, duration + delta / 2, delta)
1200
- rand_offset = np.random.rand(*(frame_seconds.shape)) - 0.5
1201
- frame_seconds += rand_offset * delta
1202
- frame_indices = np.around(frame_seconds * input_fps).astype(int)
1203
- frame_indices = [e for e in frame_indices if e < vlen]
1204
-
1205
- else:
1206
- raise ValueError(
1207
- "Must provide either positive target_fps or positive target_frames."
1208
- )
1209
-
1210
- return frame_indices
1211
-
1212
-
1213
- def read_frames_decord(
1214
- video_path,
1215
- video_reader,
1216
- video_meta,
1217
- target_frames=-1,
1218
- target_fps=-1,
1219
- frames_sample="middle",
1220
- fix_start=None,
1221
- save_to_disk=False,
1222
- cache_dir=EXTRACTED_FRAME_DIR,
1223
- frame_indices=None,
1224
- tol=10,
1225
- ):
1226
- """get frames by decord"""
1227
-
1228
- if frame_indices is None:
1229
- frame_indices = get_frame_indices(
1230
- video_meta["num_of_frame"],
1231
- target_frames=target_frames,
1232
- target_fps=target_fps,
1233
- frames_sample=frames_sample,
1234
- fix_start=fix_start,
1235
- input_fps=video_meta["fps"],
1236
- )
1237
-
1238
- frames = []
1239
- for frame_indice_index in range(0, len(frame_indices)):
1240
- frame_indice = frame_indices[frame_indice_index]
1241
- try:
1242
- frames.append(video_reader[frame_indice].asnumpy()) # (T, H, W, C)
1243
- except Exception as e:
1244
- logger.debug(f"encounter error when get frame: {frame_indice}, error: {e}")
1245
- previous_counter = 1
1246
- later_counter = 1
1247
- previous_after_flag = True
1248
- if frame_indice == 0 or frame_indice == len(video_reader) - 1:
1249
- cur_tol = tol * 2
1250
- else:
1251
- cur_tol = tol
1252
- while previous_counter < cur_tol or later_counter < cur_tol:
1253
- if previous_after_flag:
1254
- if frame_indice - previous_counter < 0:
1255
- previous_counter += 1
1256
- previous_after_flag = not previous_after_flag
1257
- continue
1258
- try:
1259
- frames.append(
1260
- video_reader[frame_indice - previous_counter].asnumpy()
1261
- )
1262
- logger.info(
1263
- f"replace {frame_indice}-th frame with {frame_indice-previous_counter}-th frame"
1264
- )
1265
- frame_indices[frame_indice_index] = (
1266
- frame_indice - previous_counter
1267
- )
1268
- break
1269
- except Exception as e:
1270
- previous_counter += 1
1271
- else:
1272
- if frame_indice + later_counter >= len(video_reader):
1273
- later_counter += 1
1274
- previous_after_flag = not previous_after_flag
1275
- continue
1276
- try:
1277
- frames.append(
1278
- video_reader[frame_indice + later_counter].asnumpy()
1279
- )
1280
- logger.info(
1281
- f"replace {frame_indice}-th frame with {frame_indice+later_counter}-th frame"
1282
- )
1283
- frame_indices[frame_indice_index] = frame_indice + later_counter
1284
- break
1285
- except Exception as e:
1286
- later_counter += 1
1287
- previous_after_flag = not previous_after_flag
1288
-
1289
- frames = np.stack(frames, axis=0)
1290
- assert len(frames) == len(
1291
- frame_indices
1292
- ), f"len(frames): {len(frames)} != len(frame_indices): {len(frame_indices)}"
1293
-
1294
- ret = []
1295
-
1296
- url_sha1 = get_filename()
1297
- for idx, frame in enumerate(frames):
1298
- tmp = Image.fromarray(frame, "RGB")
1299
- if save_to_disk:
1300
- save_path = os.path.join(cache_dir, f"{url_sha1}", f"{idx}.png")
1301
- if not os.path.exists(os.path.dirname(save_path)):
1302
- os.makedirs(os.path.dirname(save_path))
1303
- tmp.save(save_path)
1304
- tmp = save_path
1305
- ret.append(tmp)
1306
-
1307
- time_stamps = [
1308
- frame_idx * video_meta["duration"] / video_meta["num_of_frame"]
1309
- for frame_idx in frame_indices
1310
- ]
1311
-
1312
- return ret, frame_indices, time_stamps
1313
-
1314
-
1315
- def render_single_image_with_timestamp(
1316
- image: Image, number: str, rate: float, font_path: str = FONT_PATH
1317
- ):
1318
- """
1319
- Function: Renders a timestamp to the image of pil.image
1320
- The timestamp size is the rate of min(width, height)
1321
- The font color is black, the outline is white, and the outline size is 10% of the font
1322
- Returns an Image object
1323
- """
1324
- draw = ImageDraw.Draw(image)
1325
- width, height = image.size
1326
- font_size = int(min(width, height) * rate)
1327
- outline_size = int(font_size * 0.1)
1328
- font = ImageFont.truetype(font_path, font_size)
1329
- x = 0
1330
- y = 0
1331
-
1332
- # Draw a black timestamp with a white border
1333
- draw.text(
1334
- (x, y),
1335
- number,
1336
- font=font,
1337
- fill=(0, 0, 0),
1338
- stroke_width=outline_size,
1339
- stroke_fill=(255, 255, 255),
1340
- )
1341
-
1342
- return image
1343
-
1344
-
1345
- def timestamp_converting(time_stamp_in_seconds):
1346
- """
1347
- convert timestamp format from seconds to hr:min:sec
1348
- """
1349
- # get hours
1350
- hours = 0
1351
- while time_stamp_in_seconds >= 3600:
1352
- hours += 1
1353
- time_stamp_in_seconds -= 3600
1354
- # get minutes
1355
- mins = 0
1356
- while time_stamp_in_seconds >= 60:
1357
- mins += 1
1358
- time_stamp_in_seconds -= 60
1359
- time_hours = f"{int(hours):02d}"
1360
- time_mins = f"{int(mins):02d}"
1361
- time_secs = f"{time_stamp_in_seconds:05.02f}"
1362
- fi_time_stamp = time_hours + ":" + time_mins + ":" + time_secs
1363
-
1364
- return fi_time_stamp
1365
-
1366
-
1367
- def render_frame_timestamp(frame, timestamp, font_rate=0.1):
1368
- """
1369
- Function, given a frame, render the index in order
1370
- Logic: render the index to the upper left corner of the image
1371
- frame: frame, PIL.Image object
1372
- timestamp: timestamp, in seconds
1373
- font_rate: the ratio of font size to min(wi, hei)
1374
- """
1375
- time_stamp = "time: " + timestamp_converting(timestamp)
1376
- new_frame = render_single_image_with_timestamp(frame, time_stamp, font_rate)
1377
-
1378
- return new_frame
1379
-
1380
-
1381
- IDS_TYPE_FLAG = {"text": 0, "image": 1, "video": 2, "audio": 3}
1382
-
1383
-
1384
- class Ernie4_5_VLProcessor(ProcessorMixin):
1385
- """
1386
- Processes multimodal chat messages into model-ready inputs,
1387
- handling text, images, and videos with 3D positional embeddings.
1388
- """
1389
-
1390
- attributes = ["image_processor", "tokenizer"]
1391
- valid_kwargs = [
1392
- "chat_template",
1393
- "spatial_conv_size",
1394
- "temporal_conv_size",
1395
- "image_min_pixels",
1396
- "image_max_pixels",
1397
- "video_min_pixels",
1398
- "video_max_pixels",
1399
- "video_target_frames",
1400
- "video_frames_sample",
1401
- "video_max_frames",
1402
- "video_min_frames",
1403
- "video_fps",
1404
- ]
1405
- image_processor_class = "AutoImageProcessor"
1406
- tokenizer_class = "AutoTokenizer"
1407
-
1408
- CLS_TOKEN = "<|begin_of_sentence|>"
1409
- SEP_TOKEN = "<|end_of_sentence|>"
1410
- IMG_START = "<|IMAGE_START|>"
1411
- IMG_END = "<|IMAGE_END|>"
1412
- VID_START = "<|VIDEO_START|>"
1413
- VID_END = "<|VIDEO_END|>"
1414
-
1415
- def __init__(
1416
- self,
1417
- image_processor=None,
1418
- tokenizer=None,
1419
- chat_template=None,
1420
- spatial_conv_size: int = 2,
1421
- temporal_conv_size: int = 2,
1422
- image_min_pixels: int = 4 * 28 * 28,
1423
- image_max_pixels: int = 6177 * 28 * 28,
1424
- video_min_pixels: int = 299 * 28 * 28,
1425
- video_max_pixels: int = 1196 * 28 * 28,
1426
- video_target_frames: int = -1,
1427
- video_frames_sample: str = "leading",
1428
- video_max_frames: int = 180,
1429
- video_min_frames: int = 16,
1430
- video_fps: int = 2,
1431
- **kwargs,
1432
- ):
1433
- super().__init__(image_processor, tokenizer, chat_template=chat_template)
1434
- self.tokenizer.ignored_index = -100
1435
-
1436
- # Convolution sizes for patch aggregation
1437
- self.spatial_conv_size = spatial_conv_size
1438
- self.temporal_conv_size = temporal_conv_size
1439
-
1440
- # Pixel constraints
1441
- self.image_min_pixels = image_min_pixels
1442
- self.image_max_pixels = image_max_pixels
1443
- self.video_min_pixels = video_min_pixels
1444
- self.video_max_pixels = video_max_pixels
1445
-
1446
- # Video sampling parameters
1447
- self.target_frames = video_target_frames
1448
- self.frames_sample = video_frames_sample
1449
- self.max_frames = video_max_frames
1450
- self.min_frames = video_min_frames
1451
- self.fps = video_fps
1452
-
1453
- # Special tokens and IDs
1454
- self.cls_token = self.CLS_TOKEN
1455
- self.sep_token = self.SEP_TOKEN
1456
- self.image_start = self.IMG_START
1457
- self.image_end = self.IMG_END
1458
- self.video_start = self.VID_START
1459
- self.video_end = self.VID_END
1460
- self.image_patch_id = self.tokenizer.convert_tokens_to_ids(
1461
- "<|IMAGE_PLACEHOLDER|>"
1462
- )
1463
-
1464
- self.token_type_mapping = self._build_token_type_mapping()
1465
- self.is_training = True
1466
- self.role_prefixes = {"system": "", "user": "User: ", "bot": "Assistant: "}
1467
-
1468
- def _build_token_type_mapping(self) -> Dict[Any, int]:
1469
- mapping = defaultdict(lambda: IDS_TYPE_FLAG["text"])
1470
- for token in (self.IMG_START, self.IMG_END, self.VID_START, self.VID_END):
1471
- mapping[token] = IDS_TYPE_FLAG["image"]
1472
- mapping[self.image_patch_id] = IDS_TYPE_FLAG["image"]
1473
- return mapping
1474
-
1475
- def train(self) -> None:
1476
- """Enable training mode (produces labels)."""
1477
- self.is_training = True
1478
-
1479
- def eval(self) -> None:
1480
- """Enable evaluation mode (doesn't produce labels)."""
1481
- self.is_training = False
1482
-
1483
- def _download_image(
1484
- self,
1485
- item: Dict,
1486
- ):
1487
- """Download image from url and resize it to the specified size."""
1488
- url_info = item.get("image_url", {})
1489
- url = url_info.get("url")
1490
- w = url_info.get("image_width", None)
1491
- h = url_info.get("image_height", None)
1492
- data = get_downloadable(url, download_dir=RAW_IMAGE_DIR, save_to_disk=False)
1493
-
1494
- img = Image.open(io.BytesIO(data) if isinstance(data, bytes) else data)
1495
- if w and h:
1496
- img = img.resize((w, h))
1497
- return img
1498
-
1499
- def _download_video(self, item: Dict):
1500
- """Download video from url and resize it to the specified size."""
1501
- url_info = item.get("video_url", {})
1502
- url = url_info.get("url")
1503
-
1504
- frames = self._load_and_process_video(url, item)
1505
-
1506
- pixel_stack = np.stack([np.array(f.convert("RGB")) for f in frames], axis=0)
1507
- return pixel_stack
1508
-
1509
- def process_vision_info(self, messages: List[Dict[str, Any]]):
1510
- """Preprocess messages into lists of text, images, and videos."""
1511
- images = []
1512
- videos = []
1513
-
1514
- for msg in messages:
1515
- content_items = msg.get("content")
1516
- if not isinstance(content_items, list):
1517
- content_items = [content_items]
1518
-
1519
- for item in content_items:
1520
- if item.get("type") == "image_url":
1521
- img = self._download_image(item)
1522
- images.append(img)
1523
- elif item.get("type") == "video_url":
1524
- pixel_stack = self._download_video(item)
1525
- videos.append(pixel_stack)
1526
-
1527
- return images, videos
1528
-
1529
- def __call__(
1530
- self,
1531
- text: Union[str, List[str]],
1532
- images: List[Image.Image] = None,
1533
- videos: List[List[Image.Image]] = None,
1534
- **kwargs,
1535
- ) -> BatchFeature:
1536
- """
1537
- Convert chat messages into model inputs.
1538
- Returns a dict with input_ids, token_type_ids, position_ids, images, grid_thw, image_type_ids, labels.
1539
- """
1540
- outputs = {
1541
- "input_ids": [],
1542
- "token_type_ids": [],
1543
- "position_ids": [],
1544
- "images": [],
1545
- "grid_thw": [],
1546
- "image_type_ids": [],
1547
- "cur_position": 0,
1548
- "pic_cnt": 0,
1549
- "video_cnt": 0,
1550
- }
1551
- if images is None:
1552
- images = []
1553
- if videos is None:
1554
- videos = []
1555
- if not isinstance(text, list):
1556
- text = [text]
1557
-
1558
- texts = text[0]
1559
-
1560
- new_video_seg = True
1561
- for text_with_image in texts.split(self.VID_START + "<|video@placeholder|>" + self.VID_END):
1562
- new_text_seg = True
1563
- if not new_video_seg:
1564
- self._add_video(videos[outputs["video_cnt"]], outputs)
1565
- for text in text_with_image.split(self.IMG_START + "<|image@placeholder|>" + self.IMG_END):
1566
- if not new_text_seg:
1567
- self._add_image(images[outputs["pic_cnt"]], outputs)
1568
- self._add_text(text, outputs)
1569
- new_text_seg = False
1570
- new_video_seg = False
1571
-
1572
- for key in ["cur_position", "pic_cnt", "video_cnt"]:
1573
- outputs.pop(key, None)
1574
-
1575
- outputs = self._pack_outputs(outputs)
1576
- for key in outputs.keys():
1577
- if isinstance(outputs[key], np.ndarray):
1578
- if key in ["images", "grid_thw"]:
1579
- outputs[key] = torch.tensor(np.array(outputs[key]))
1580
- else:
1581
- outputs[key] = torch.tensor(np.array([outputs[key]]))
1582
-
1583
- return BatchFeature(data=outputs)
1584
-
1585
- def _add_special_token(self, token: Union[str, int], outputs: Dict) -> None:
1586
- """add special token to outputs"""
1587
- token_id = (
1588
- token
1589
- if isinstance(token, int)
1590
- else self.tokenizer.convert_tokens_to_ids(token)
1591
- )
1592
- outputs["input_ids"].append(token_id)
1593
- outputs["token_type_ids"].append(self.token_type_mapping[token])
1594
- pos = outputs["cur_position"]
1595
- outputs["position_ids"].append([pos] * 3)
1596
- outputs["cur_position"] += 1
1597
-
1598
- def _add_text(self, text: str, outputs: Dict) -> None:
1599
- """add text to outputs"""
1600
- tokens = self.tokenizer.convert_tokens_to_ids(self.tokenizer.tokenize(text))
1601
- outputs["input_ids"].extend(tokens)
1602
- outputs["token_type_ids"].extend([IDS_TYPE_FLAG["text"]] * len(tokens))
1603
-
1604
- start = outputs["cur_position"]
1605
- for i in range(len(tokens)):
1606
- outputs["position_ids"].append([start + i] * 3)
1607
- outputs["cur_position"] += len(tokens)
1608
-
1609
- def _add_image(self, img: Image.Image, outputs: Dict) -> None:
1610
- """add image to outputs"""
1611
- outputs["pic_cnt"] += 1
1612
- self._add_special_token(self.IMG_START, outputs)
1613
-
1614
- patches_h, patches_w = self.image_processor.get_smarted_resize(
1615
- img.height,
1616
- img.width,
1617
- min_pixels=self.image_min_pixels,
1618
- max_pixels=self.image_max_pixels,
1619
- )[1]
1620
- num_tokens = (patches_h * patches_w) // (self.spatial_conv_size**2)
1621
-
1622
- outputs["input_ids"].extend([self.image_patch_id] * num_tokens)
1623
- outputs["token_type_ids"].extend([IDS_TYPE_FLAG["image"]] * num_tokens)
1624
-
1625
- pos_ids = self._compute_3d_positions(
1626
- 1, patches_h, patches_w, outputs["cur_position"]
1627
- )
1628
- outputs["position_ids"].extend(pos_ids)
1629
- outputs["cur_position"] = np.max(pos_ids) + 1
1630
-
1631
- # Preprocess pixels
1632
- ret = self.image_processor.preprocess(
1633
- images=[img.convert("RGB")],
1634
- do_normalize=False,
1635
- do_rescale=False,
1636
- predetermined_grid_thw=np.array([[patches_h, patches_w]]),
1637
- do_convert_rgb=True,
1638
- input_data_format=ChannelDimension.LAST,
1639
- )
1640
- outputs["images"].append(ret["pixel_values"])
1641
- outputs["grid_thw"].append(ret["image_grid_thw"])
1642
- outputs["image_type_ids"].append(0)
1643
-
1644
- self._add_special_token(self.IMG_END, outputs)
1645
-
1646
- def _add_video(
1647
- self, pixel_stack: np.ndarray, outputs: Dict
1648
- ) -> None:
1649
- outputs["video_cnt"] += 1
1650
- self._add_special_token(self.VID_START, outputs)
1651
-
1652
- patches_h, patches_w = self.image_processor.get_smarted_resize(
1653
- pixel_stack.shape[1],
1654
- pixel_stack.shape[2],
1655
- min_pixels=self.video_min_pixels,
1656
- max_pixels=self.video_max_pixels,
1657
- )[1]
1658
- num_frames = pixel_stack.shape[0]
1659
- num_tokens = (num_frames * patches_h * patches_w) // (
1660
- self.spatial_conv_size**2 * self.temporal_conv_size
1661
- )
1662
-
1663
- ret = self.image_processor.preprocess(
1664
- images=None,
1665
- videos=pixel_stack,
1666
- do_normalize=False,
1667
- do_rescale=False,
1668
- predetermined_grid_thw=np.array([[patches_h, patches_w]] * num_frames),
1669
- do_convert_rgb=True,
1670
- input_data_format=ChannelDimension.LAST,
1671
- )
1672
- outputs["images"].append(ret["pixel_values_videos"])
1673
- outputs["grid_thw"].append(ret["video_grid_thw"])
1674
- outputs["image_type_ids"].extend([1] * num_frames)
1675
-
1676
- outputs["input_ids"].extend([self.image_patch_id] * num_tokens)
1677
- outputs["token_type_ids"].extend([IDS_TYPE_FLAG["video"]] * num_tokens)
1678
-
1679
- pos_ids = self._compute_3d_positions(
1680
- num_frames, patches_h, patches_w, outputs["cur_position"]
1681
- )
1682
- outputs["position_ids"].extend(pos_ids)
1683
- outputs["cur_position"] = np.max(pos_ids) + 1
1684
-
1685
- self._add_special_token(self.VID_END, outputs)
1686
-
1687
- def _load_and_process_video(self, url: str, item: Dict) -> List[Image.Image]:
1688
- reader, meta, path = read_video_decord(url, save_to_disk=False)
1689
-
1690
- video_frame_args = dict()
1691
- video_frame_args["fps"] = item.get("fps", self.fps)
1692
- video_frame_args["min_frames"] = item.get("min_frames", self.min_frames)
1693
- video_frame_args["max_frames"] = item.get("max_frames", self.max_frames)
1694
- video_frame_args["target_frames"] = item.get(
1695
- "target_frames", self.target_frames
1696
- )
1697
- video_frame_args["frames_sample"] = item.get(
1698
- "frames_sample", self.frames_sample
1699
- )
1700
-
1701
- video_frame_args = self._set_video_frame_args(video_frame_args, meta)
1702
-
1703
- frames_data, _, timestamps = read_frames_decord(
1704
- path,
1705
- reader,
1706
- meta,
1707
- target_frames=video_frame_args["target_frames"],
1708
- target_fps=video_frame_args["fps"],
1709
- frames_sample=video_frame_args["frames_sample"],
1710
- save_to_disk=False,
1711
- )
1712
-
1713
- frames: List[Image.Image] = []
1714
- for img_array, ts in zip(frames_data, timestamps):
1715
- frames.append(render_frame_timestamp(img_array, ts))
1716
- # Ensure even number of frames for temporal conv
1717
- if len(frames) % 2 != 0:
1718
- frames.append(copy.deepcopy(frames[-1]))
1719
- return frames
1720
-
1721
- def _set_video_frame_args(self, video_frame_args, video_meta):
1722
- """
1723
- Set the final frame extraction parameters based on known parameters and priorities
1724
- """
1725
- # Priority: video_target_frames > (video_min_frames, video_max_frames) > video_fps
1726
- if video_frame_args["target_frames"] > 0:
1727
- if video_frame_args["fps"] >= 0:
1728
- raise ValueError("fps must be negative if target_frames is given")
1729
- if (
1730
- video_frame_args["min_frames"] > 0
1731
- and video_frame_args["target_frames"] < video_frame_args["min_frames"]
1732
- ):
1733
- raise ValueError("target_frames must be larger than min_frames")
1734
- if (
1735
- video_frame_args["max_frames"] > 0
1736
- and video_frame_args["target_frames"] > video_frame_args["max_frames"]
1737
- ):
1738
- raise ValueError("target_frames must be smaller than max_frames")
1739
- else:
1740
- if video_frame_args["fps"] < 0:
1741
- raise ValueError(
1742
- "Must provide either positive target_fps or positive target_frames."
1743
- )
1744
- # First calculate the number of frames extracted under video_fps
1745
- frames_to_extract = int(video_meta["duration"] * video_frame_args["fps"])
1746
- # Determine whether it is within the target range. If not, take target_frames as the upper or lower bound
1747
- if (
1748
- video_frame_args["min_frames"] > 0
1749
- and video_frame_args["max_frames"] > 0
1750
- and video_frame_args["min_frames"] > video_frame_args["max_frames"]
1751
- ):
1752
- raise ValueError("min_frames must be smaller than max_frames")
1753
- if (
1754
- video_frame_args["min_frames"] > 0
1755
- and frames_to_extract < video_frame_args["min_frames"]
1756
- ):
1757
- video_frame_args["target_frames"] = video_frame_args["min_frames"]
1758
- video_frame_args["fps"] = -1
1759
- if (
1760
- video_frame_args["max_frames"] > 0
1761
- and frames_to_extract > video_frame_args["max_frames"]
1762
- ):
1763
- video_frame_args["target_frames"] = video_frame_args["max_frames"]
1764
- video_frame_args["fps"] = -1
1765
-
1766
- return video_frame_args
1767
-
1768
- def _compute_3d_positions(
1769
- self, t: int, h: int, w: int, start_idx: int
1770
- ) -> List[List[int]]:
1771
- # Downsample time if needed
1772
- t_eff = t // self.temporal_conv_size if t != 1 else 1
1773
- gh, gw = h // self.spatial_conv_size, w // self.spatial_conv_size
1774
- time_idx = np.repeat(np.arange(t_eff), gh * gw)
1775
- h_idx = np.tile(np.repeat(np.arange(gh), gw), t_eff)
1776
- w_idx = np.tile(np.arange(gw), t_eff * gh)
1777
-
1778
- coords = list(zip(time_idx, h_idx, w_idx))
1779
- return [
1780
- [start_idx + ti, start_idx + hi, start_idx + wi] for ti, hi, wi in coords
1781
- ]
1782
-
1783
- def _pack_outputs(self, outs: Dict) -> Dict[str, Any]:
1784
- # Stack or nullify image-related fields
1785
- if not outs["images"]:
1786
- outs["images"] = None
1787
- outs["grid_thw"] = None
1788
- outs["image_type_ids"] = None
1789
- else:
1790
- outs["images"] = np.vstack(outs["images"])
1791
- outs["grid_thw"] = np.vstack(outs["grid_thw"])
1792
- outs["image_type_ids"] = np.array(outs["image_type_ids"])
1793
-
1794
- # Convert lists to arrays
1795
- outs["input_ids"] = np.array(outs["input_ids"], dtype=np.int64)
1796
- outs["token_type_ids"] = np.array(outs["token_type_ids"], dtype=np.int64)
1797
- outs["position_ids"] = np.array(outs["position_ids"], dtype=np.int64)
1798
- return outs
1799
-
1800
- def batch_decode(self, *args, **kwargs):
1801
- """
1802
- This method forwards all its arguments to Ernie4_5_VLTokenizer's [`~PreTrainedTokenizer.batch_decode`]. Please
1803
- refer to the docstring of this method for more information.
1804
- """
1805
- return self.tokenizer.batch_decode(*args, **kwargs)
1806
-
1807
- def decode(self, *args, **kwargs):
1808
- """
1809
- This method forwards all its arguments to Ernie4_5_VLTokenizer's [`~PreTrainedTokenizer.decode`].
1810
- Please refer to the docstring of this method for more information.
1811
- """
1812
- return self.tokenizer.decode(*args, **kwargs)
1813
-
1814
- @property
1815
- def model_input_names(self):
1816
- """get model input names"""
1817
- tokenizer_input_names = self.tokenizer.model_input_names
1818
- image_processor_input_names = self.image_processor.model_input_names
1819
- return list(tokenizer_input_names) + list(image_processor_input_names)
1820
-
1821
-
1822
- __all__ = ["Ernie4_5_VLTokenizer", "Ernie4_5_VLImageProcessor", "Ernie4_5_VLProcessor"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
processor_config.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "image_processor": {
3
+ "data_format": "channels_first",
4
+ "do_convert_rgb": true,
5
+ "do_normalize": true,
6
+ "do_rescale": true,
7
+ "do_resize": true,
8
+ "image_mean": [
9
+ 0.48145466,
10
+ 0.4578275,
11
+ 0.40821073
12
+ ],
13
+ "image_processor_type": "Ernie4_5_VL_MoeImageProcessorFast",
14
+ "image_std": [
15
+ 0.26862954,
16
+ 0.26130258,
17
+ 0.27577711
18
+ ],
19
+ "merge_size": 2,
20
+ "patch_size": 14,
21
+ "resample": 3,
22
+ "rescale_factor": 0.00392156862745098,
23
+ "size": {
24
+ "longest_edge": 4842768,
25
+ "shortest_edge": 3136
26
+ }
27
+ },
28
+ "processor_class": "Ernie4_5_VL_MoeProcessor",
29
+ "video_processor": {
30
+ "data_format": "channels_first",
31
+ "default_to_square": true,
32
+ "do_convert_rgb": true,
33
+ "do_normalize": true,
34
+ "do_rescale": true,
35
+ "do_resize": true,
36
+ "do_sample_frames": true,
37
+ "draw_on_frames": true,
38
+ "font": "Roboto-Regular.ttf",
39
+ "image_mean": [
40
+ 0.48145466,
41
+ 0.4578275,
42
+ 0.40821073
43
+ ],
44
+ "image_std": [
45
+ 0.26862954,
46
+ 0.26130258,
47
+ 0.27577711
48
+ ],
49
+ "max_frames": 180,
50
+ "merge_size": 2,
51
+ "min_frames": 16,
52
+ "patch_size": 14,
53
+ "resample": 3,
54
+ "rescale_factor": 0.00392156862745098,
55
+ "return_metadata": false,
56
+ "size": {
57
+ "longest_edge": 937664,
58
+ "shortest_edge": 234416
59
+ },
60
+ "temporal_patch_size": 2,
61
+ "video_processor_type": "Ernie4_5_VL_MoeVideoProcessor"
62
+ }
63
+ }
special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "<|end_of_sentence|>", "pad_token": "<unk>", "cls_token": "<|begin_of_sentence|>", "mask_token": "<mask:1>", "sys_start_token": "<mask:4>", "sys_end_token": "<mask:5>", "header_start_token": "<mask:6>", "header_end_token": "<mask:7>", "additional_special_tokens": ["<|IMAGE_PLACEHOLDER|>", "<|AUDIO_PLACEHOLDER|>", "<|LOC_0|>", "<|LOC_1|>", "<|LOC_2|>", "<|LOC_3|>", "<|LOC_4|>", "<|LOC_5|>", "<|LOC_6|>", "<|LOC_7|>", "<|LOC_8|>", "<|LOC_9|>", "<|LOC_10|>", "<|LOC_11|>", "<|LOC_12|>", "<|LOC_13|>", "<|LOC_14|>", "<|LOC_15|>", "<|LOC_16|>", "<|LOC_17|>", "<|LOC_18|>", "<|LOC_19|>", "<|LOC_20|>", "<|LOC_21|>", "<|LOC_22|>", "<|LOC_23|>", "<|LOC_24|>", "<|LOC_25|>", "<|LOC_26|>", "<|LOC_27|>", "<|LOC_28|>", "<|LOC_29|>", "<|LOC_30|>", "<|LOC_31|>", "<|LOC_32|>", "<|LOC_33|>", "<|LOC_34|>", "<|LOC_35|>", "<|LOC_36|>", "<|LOC_37|>", "<|LOC_38|>", "<|LOC_39|>", "<|LOC_40|>", "<|LOC_41|>", "<|LOC_42|>", "<|LOC_43|>", "<|LOC_44|>", "<|LOC_45|>", "<|LOC_46|>", "<|LOC_47|>", "<|LOC_48|>", "<|LOC_49|>", "<|LOC_50|>", "<|LOC_51|>", "<|LOC_52|>", "<|LOC_53|>", "<|LOC_54|>", "<|LOC_55|>", "<|LOC_56|>", "<|LOC_57|>", "<|LOC_58|>", "<|LOC_59|>", "<|LOC_60|>", "<|LOC_61|>", "<|LOC_62|>", "<|LOC_63|>", "<|LOC_64|>", "<|LOC_65|>", "<|LOC_66|>", "<|LOC_67|>", "<|LOC_68|>", "<|LOC_69|>", "<|LOC_70|>", "<|LOC_71|>", "<|LOC_72|>", "<|LOC_73|>", "<|LOC_74|>", "<|LOC_75|>", "<|LOC_76|>", "<|LOC_77|>", "<|LOC_78|>", "<|LOC_79|>", "<|LOC_80|>", "<|LOC_81|>", "<|LOC_82|>", "<|LOC_83|>", "<|LOC_84|>", "<|LOC_85|>", "<|LOC_86|>", "<|LOC_87|>", "<|LOC_88|>", "<|LOC_89|>", "<|LOC_90|>", "<|LOC_91|>", "<|LOC_92|>", "<|LOC_93|>", "<|LOC_94|>", "<|LOC_95|>", "<|LOC_96|>", "<|LOC_97|>", "<|LOC_98|>", "<|LOC_99|>", "<|LOC_100|>", "<|LOC_101|>", "<|LOC_102|>", "<|LOC_103|>", "<|LOC_104|>", "<|LOC_105|>", "<|LOC_106|>", "<|LOC_107|>", "<|LOC_108|>", "<|LOC_109|>", "<|LOC_110|>", "<|LOC_111|>", "<|LOC_112|>", "<|LOC_113|>", "<|LOC_114|>", "<|LOC_115|>", "<|LOC_116|>", "<|LOC_117|>", "<|LOC_118|>", "<|LOC_119|>", "<|LOC_120|>", "<|LOC_121|>", "<|LOC_122|>", "<|LOC_123|>", "<|LOC_124|>", "<|LOC_125|>", "<|LOC_126|>", "<|LOC_127|>", "<|LOC_128|>", "<|LOC_129|>", "<|LOC_130|>", "<|LOC_131|>", "<|LOC_132|>", "<|LOC_133|>", "<|LOC_134|>", "<|LOC_135|>", "<|LOC_136|>", "<|LOC_137|>", "<|LOC_138|>", "<|LOC_139|>", "<|LOC_140|>", "<|LOC_141|>", "<|LOC_142|>", "<|LOC_143|>", "<|LOC_144|>", "<|LOC_145|>", "<|LOC_146|>", "<|LOC_147|>", "<|LOC_148|>", "<|LOC_149|>", "<|LOC_150|>", "<|LOC_151|>", "<|LOC_152|>", "<|LOC_153|>", "<|LOC_154|>", "<|LOC_155|>", "<|LOC_156|>", "<|LOC_157|>", "<|LOC_158|>", "<|LOC_159|>", "<|LOC_160|>", "<|LOC_161|>", "<|LOC_162|>", "<|LOC_163|>", "<|LOC_164|>", "<|LOC_165|>", "<|LOC_166|>", "<|LOC_167|>", "<|LOC_168|>", "<|LOC_169|>", "<|LOC_170|>", "<|LOC_171|>", "<|LOC_172|>", "<|LOC_173|>", "<|LOC_174|>", "<|LOC_175|>", "<|LOC_176|>", "<|LOC_177|>", "<|LOC_178|>", "<|LOC_179|>", "<|LOC_180|>", "<|LOC_181|>", "<|LOC_182|>", "<|LOC_183|>", "<|LOC_184|>", "<|LOC_185|>", "<|LOC_186|>", "<|LOC_187|>", "<|LOC_188|>", "<|LOC_189|>", "<|LOC_190|>", "<|LOC_191|>", "<|LOC_192|>", "<|LOC_193|>", "<|LOC_194|>", "<|LOC_195|>", "<|LOC_196|>", "<|LOC_197|>", "<|LOC_198|>", "<|LOC_199|>", "<|LOC_200|>", "<|LOC_201|>", "<|LOC_202|>", "<|LOC_203|>", "<|LOC_204|>", "<|LOC_205|>", "<|LOC_206|>", "<|LOC_207|>", "<|LOC_208|>", "<|LOC_209|>", "<|LOC_210|>", "<|LOC_211|>", "<|LOC_212|>", "<|LOC_213|>", "<|LOC_214|>", "<|LOC_215|>", "<|LOC_216|>", "<|LOC_217|>", "<|LOC_218|>", "<|LOC_219|>", "<|LOC_220|>", "<|LOC_221|>", "<|LOC_222|>", "<|LOC_223|>", "<|LOC_224|>", "<|LOC_225|>", "<|LOC_226|>", "<|LOC_227|>", "<|LOC_228|>", "<|LOC_229|>", "<|LOC_230|>", "<|LOC_231|>", "<|LOC_232|>", "<|LOC_233|>", "<|LOC_234|>", "<|LOC_235|>", "<|LOC_236|>", "<|LOC_237|>", "<|LOC_238|>", "<|LOC_239|>", "<|LOC_240|>", "<|LOC_241|>", "<|LOC_242|>", "<|LOC_243|>", "<|LOC_244|>", "<|LOC_245|>", "<|LOC_246|>", "<|LOC_247|>", "<|LOC_248|>", "<|LOC_249|>", "<|LOC_250|>", "<|LOC_251|>", "<|LOC_252|>", "<|LOC_253|>", "<|LOC_254|>", "<|LOC_255|>", "<|LOC_256|>", "<|LOC_257|>", "<|LOC_258|>", "<|LOC_259|>", "<|LOC_260|>", "<|LOC_261|>", "<|LOC_262|>", "<|LOC_263|>", "<|LOC_264|>", "<|LOC_265|>", "<|LOC_266|>", "<|LOC_267|>", "<|LOC_268|>", "<|LOC_269|>", "<|LOC_270|>", "<|LOC_271|>", "<|LOC_272|>", "<|LOC_273|>", "<|LOC_274|>", "<|LOC_275|>", "<|LOC_276|>", "<|LOC_277|>", "<|LOC_278|>", "<|LOC_279|>", "<|LOC_280|>", "<|LOC_281|>", "<|LOC_282|>", "<|LOC_283|>", "<|LOC_284|>", "<|LOC_285|>", "<|LOC_286|>", "<|LOC_287|>", "<|LOC_288|>", "<|LOC_289|>", "<|LOC_290|>", "<|LOC_291|>", "<|LOC_292|>", "<|LOC_293|>", "<|LOC_294|>", "<|LOC_295|>", "<|LOC_296|>", "<|LOC_297|>", "<|LOC_298|>", "<|LOC_299|>", "<|LOC_300|>", "<|LOC_301|>", "<|LOC_302|>", "<|LOC_303|>", "<|LOC_304|>", "<|LOC_305|>", "<|LOC_306|>", "<|LOC_307|>", "<|LOC_308|>", "<|LOC_309|>", "<|LOC_310|>", "<|LOC_311|>", "<|LOC_312|>", "<|LOC_313|>", "<|LOC_314|>", "<|LOC_315|>", "<|LOC_316|>", "<|LOC_317|>", "<|LOC_318|>", "<|LOC_319|>", "<|LOC_320|>", "<|LOC_321|>", "<|LOC_322|>", "<|LOC_323|>", "<|LOC_324|>", "<|LOC_325|>", "<|LOC_326|>", "<|LOC_327|>", "<|LOC_328|>", "<|LOC_329|>", "<|LOC_330|>", "<|LOC_331|>", "<|LOC_332|>", "<|LOC_333|>", "<|LOC_334|>", "<|LOC_335|>", "<|LOC_336|>", "<|LOC_337|>", "<|LOC_338|>", "<|LOC_339|>", "<|LOC_340|>", "<|LOC_341|>", "<|LOC_342|>", "<|LOC_343|>", "<|LOC_344|>", "<|LOC_345|>", "<|LOC_346|>", "<|LOC_347|>", "<|LOC_348|>", "<|LOC_349|>", "<|LOC_350|>", "<|LOC_351|>", "<|LOC_352|>", "<|LOC_353|>", "<|LOC_354|>", "<|LOC_355|>", "<|LOC_356|>", "<|LOC_357|>", "<|LOC_358|>", "<|LOC_359|>", "<|LOC_360|>", "<|LOC_361|>", "<|LOC_362|>", "<|LOC_363|>", "<|LOC_364|>", "<|LOC_365|>", "<|LOC_366|>", "<|LOC_367|>", "<|LOC_368|>", "<|LOC_369|>", "<|LOC_370|>", "<|LOC_371|>", "<|LOC_372|>", "<|LOC_373|>", "<|LOC_374|>", "<|LOC_375|>", "<|LOC_376|>", "<|LOC_377|>", "<|LOC_378|>", "<|LOC_379|>", "<|LOC_380|>", "<|LOC_381|>", "<|LOC_382|>", "<|LOC_383|>", "<|LOC_384|>", "<|LOC_385|>", "<|LOC_386|>", "<|LOC_387|>", "<|LOC_388|>", "<|LOC_389|>", "<|LOC_390|>", "<|LOC_391|>", "<|LOC_392|>", "<|LOC_393|>", "<|LOC_394|>", "<|LOC_395|>", "<|LOC_396|>", "<|LOC_397|>", "<|LOC_398|>", "<|LOC_399|>", "<|LOC_400|>", "<|LOC_401|>", "<|LOC_402|>", "<|LOC_403|>", "<|LOC_404|>", "<|LOC_405|>", "<|LOC_406|>", "<|LOC_407|>", "<|LOC_408|>", "<|LOC_409|>", "<|LOC_410|>", "<|LOC_411|>", "<|LOC_412|>", "<|LOC_413|>", "<|LOC_414|>", "<|LOC_415|>", "<|LOC_416|>", "<|LOC_417|>", "<|LOC_418|>", "<|LOC_419|>", "<|LOC_420|>", "<|LOC_421|>", "<|LOC_422|>", "<|LOC_423|>", "<|LOC_424|>", "<|LOC_425|>", "<|LOC_426|>", "<|LOC_427|>", "<|LOC_428|>", "<|LOC_429|>", "<|LOC_430|>", "<|LOC_431|>", "<|LOC_432|>", "<|LOC_433|>", "<|LOC_434|>", "<|LOC_435|>", "<|LOC_436|>", "<|LOC_437|>", "<|LOC_438|>", "<|LOC_439|>", "<|LOC_440|>", "<|LOC_441|>", "<|LOC_442|>", "<|LOC_443|>", "<|LOC_444|>", "<|LOC_445|>", "<|LOC_446|>", "<|LOC_447|>", "<|LOC_448|>", "<|LOC_449|>", "<|LOC_450|>", "<|LOC_451|>", "<|LOC_452|>", "<|LOC_453|>", "<|LOC_454|>", "<|LOC_455|>", "<|LOC_456|>", "<|LOC_457|>", "<|LOC_458|>", "<|LOC_459|>", "<|LOC_460|>", "<|LOC_461|>", "<|LOC_462|>", "<|LOC_463|>", "<|LOC_464|>", "<|LOC_465|>", "<|LOC_466|>", "<|LOC_467|>", "<|LOC_468|>", "<|LOC_469|>", "<|LOC_470|>", "<|LOC_471|>", "<|LOC_472|>", "<|LOC_473|>", "<|LOC_474|>", "<|LOC_475|>", "<|LOC_476|>", "<|LOC_477|>", "<|LOC_478|>", "<|LOC_479|>", "<|LOC_480|>", "<|LOC_481|>", "<|LOC_482|>", "<|LOC_483|>", "<|LOC_484|>", "<|LOC_485|>", "<|LOC_486|>", "<|LOC_487|>", "<|LOC_488|>", "<|LOC_489|>", "<|LOC_490|>", "<|LOC_491|>", "<|LOC_492|>", "<|LOC_493|>", "<|LOC_494|>", "<|LOC_495|>", "<|LOC_496|>", "<|LOC_497|>", "<|LOC_498|>", "<|LOC_499|>", "<|LOC_500|>", "<|LOC_501|>", "<|LOC_502|>", "<|LOC_503|>", "<|LOC_504|>", "<|LOC_505|>", "<|LOC_506|>", "<|LOC_507|>", "<|LOC_508|>", "<|LOC_509|>", "<|LOC_510|>", "<|LOC_511|>", "<|LOC_512|>", "<|LOC_513|>", "<|LOC_514|>", "<|LOC_515|>", "<|LOC_516|>", "<|LOC_517|>", "<|LOC_518|>", "<|LOC_519|>", "<|LOC_520|>", "<|LOC_521|>", "<|LOC_522|>", "<|LOC_523|>", "<|LOC_524|>", "<|LOC_525|>", "<|LOC_526|>", "<|LOC_527|>", "<|LOC_528|>", "<|LOC_529|>", "<|LOC_530|>", "<|LOC_531|>", "<|LOC_532|>", "<|LOC_533|>", "<|LOC_534|>", "<|LOC_535|>", "<|LOC_536|>", "<|LOC_537|>", "<|LOC_538|>", "<|LOC_539|>", "<|LOC_540|>", "<|LOC_541|>", "<|LOC_542|>", "<|LOC_543|>", "<|LOC_544|>", "<|LOC_545|>", "<|LOC_546|>", "<|LOC_547|>", "<|LOC_548|>", "<|LOC_549|>", "<|LOC_550|>", "<|LOC_551|>", "<|LOC_552|>", "<|LOC_553|>", "<|LOC_554|>", "<|LOC_555|>", "<|LOC_556|>", "<|LOC_557|>", "<|LOC_558|>", "<|LOC_559|>", "<|LOC_560|>", "<|LOC_561|>", "<|LOC_562|>", "<|LOC_563|>", "<|LOC_564|>", "<|LOC_565|>", "<|LOC_566|>", "<|LOC_567|>", "<|LOC_568|>", "<|LOC_569|>", "<|LOC_570|>", "<|LOC_571|>", "<|LOC_572|>", "<|LOC_573|>", "<|LOC_574|>", "<|LOC_575|>", "<|LOC_576|>", "<|LOC_577|>", "<|LOC_578|>", "<|LOC_579|>", "<|LOC_580|>", "<|LOC_581|>", "<|LOC_582|>", "<|LOC_583|>", "<|LOC_584|>", "<|LOC_585|>", "<|LOC_586|>", "<|LOC_587|>", "<|LOC_588|>", "<|LOC_589|>", "<|LOC_590|>", "<|LOC_591|>", "<|LOC_592|>", "<|LOC_593|>", "<|LOC_594|>", "<|LOC_595|>", "<|LOC_596|>", "<|LOC_597|>", "<|LOC_598|>", "<|LOC_599|>", "<|LOC_600|>", "<|LOC_601|>", "<|LOC_602|>", "<|LOC_603|>", "<|LOC_604|>", "<|LOC_605|>", "<|LOC_606|>", "<|LOC_607|>", "<|LOC_608|>", "<|LOC_609|>", "<|LOC_610|>", "<|LOC_611|>", "<|LOC_612|>", "<|LOC_613|>", "<|LOC_614|>", "<|LOC_615|>", "<|LOC_616|>", "<|LOC_617|>", "<|LOC_618|>", "<|LOC_619|>", "<|LOC_620|>", "<|LOC_621|>", "<|LOC_622|>", "<|LOC_623|>", "<|LOC_624|>", "<|LOC_625|>", "<|LOC_626|>", "<|LOC_627|>", "<|LOC_628|>", "<|LOC_629|>", "<|LOC_630|>", "<|LOC_631|>", "<|LOC_632|>", "<|LOC_633|>", "<|LOC_634|>", "<|LOC_635|>", "<|LOC_636|>", "<|LOC_637|>", "<|LOC_638|>", "<|LOC_639|>", "<|LOC_640|>", "<|LOC_641|>", "<|LOC_642|>", "<|LOC_643|>", "<|LOC_644|>", "<|LOC_645|>", "<|LOC_646|>", "<|LOC_647|>", "<|LOC_648|>", "<|LOC_649|>", "<|LOC_650|>", "<|LOC_651|>", "<|LOC_652|>", "<|LOC_653|>", "<|LOC_654|>", "<|LOC_655|>", "<|LOC_656|>", "<|LOC_657|>", "<|LOC_658|>", "<|LOC_659|>", "<|LOC_660|>", "<|LOC_661|>", "<|LOC_662|>", "<|LOC_663|>", "<|LOC_664|>", "<|LOC_665|>", "<|LOC_666|>", "<|LOC_667|>", "<|LOC_668|>", "<|LOC_669|>", "<|LOC_670|>", "<|LOC_671|>", "<|LOC_672|>", "<|LOC_673|>", "<|LOC_674|>", "<|LOC_675|>", "<|LOC_676|>", "<|LOC_677|>", "<|LOC_678|>", "<|LOC_679|>", "<|LOC_680|>", "<|LOC_681|>", "<|LOC_682|>", "<|LOC_683|>", "<|LOC_684|>", "<|LOC_685|>", "<|LOC_686|>", "<|LOC_687|>", "<|LOC_688|>", "<|LOC_689|>", "<|LOC_690|>", "<|LOC_691|>", "<|LOC_692|>", "<|LOC_693|>", "<|LOC_694|>", "<|LOC_695|>", "<|LOC_696|>", "<|LOC_697|>", "<|LOC_698|>", "<|LOC_699|>", "<|LOC_700|>", "<|LOC_701|>", "<|LOC_702|>", "<|LOC_703|>", "<|LOC_704|>", "<|LOC_705|>", "<|LOC_706|>", "<|LOC_707|>", "<|LOC_708|>", "<|LOC_709|>", "<|LOC_710|>", "<|LOC_711|>", "<|LOC_712|>", "<|LOC_713|>", "<|LOC_714|>", "<|LOC_715|>", "<|LOC_716|>", "<|LOC_717|>", "<|LOC_718|>", "<|LOC_719|>", "<|LOC_720|>", "<|LOC_721|>", "<|LOC_722|>", "<|LOC_723|>", "<|LOC_724|>", "<|LOC_725|>", "<|LOC_726|>", "<|LOC_727|>", "<|LOC_728|>", "<|LOC_729|>", "<|LOC_730|>", "<|LOC_731|>", "<|LOC_732|>", "<|LOC_733|>", "<|LOC_734|>", "<|LOC_735|>", "<|LOC_736|>", "<|LOC_737|>", "<|LOC_738|>", "<|LOC_739|>", "<|LOC_740|>", "<|LOC_741|>", "<|LOC_742|>", "<|LOC_743|>", "<|LOC_744|>", "<|LOC_745|>", "<|LOC_746|>", "<|LOC_747|>", "<|LOC_748|>", "<|LOC_749|>", "<|LOC_750|>", "<|LOC_751|>", "<|LOC_752|>", "<|LOC_753|>", "<|LOC_754|>", "<|LOC_755|>", "<|LOC_756|>", "<|LOC_757|>", "<|LOC_758|>", "<|LOC_759|>", "<|LOC_760|>", "<|LOC_761|>", "<|LOC_762|>", "<|LOC_763|>", "<|LOC_764|>", "<|LOC_765|>", "<|LOC_766|>", "<|LOC_767|>", "<|LOC_768|>", "<|LOC_769|>", "<|LOC_770|>", "<|LOC_771|>", "<|LOC_772|>", "<|LOC_773|>", "<|LOC_774|>", "<|LOC_775|>", "<|LOC_776|>", "<|LOC_777|>", "<|LOC_778|>", "<|LOC_779|>", "<|LOC_780|>", "<|LOC_781|>", "<|LOC_782|>", "<|LOC_783|>", "<|LOC_784|>", "<|LOC_785|>", "<|LOC_786|>", "<|LOC_787|>", "<|LOC_788|>", "<|LOC_789|>", "<|LOC_790|>", "<|LOC_791|>", "<|LOC_792|>", "<|LOC_793|>", "<|LOC_794|>", "<|LOC_795|>", "<|LOC_796|>", "<|LOC_797|>", "<|LOC_798|>", "<|LOC_799|>", "<|LOC_800|>", "<|LOC_801|>", "<|LOC_802|>", "<|LOC_803|>", "<|LOC_804|>", "<|LOC_805|>", "<|LOC_806|>", "<|LOC_807|>", "<|LOC_808|>", "<|LOC_809|>", "<|LOC_810|>", "<|LOC_811|>", "<|LOC_812|>", "<|LOC_813|>", "<|LOC_814|>", "<|LOC_815|>", "<|LOC_816|>", "<|LOC_817|>", "<|LOC_818|>", "<|LOC_819|>", "<|LOC_820|>", "<|LOC_821|>", "<|LOC_822|>", "<|LOC_823|>", "<|LOC_824|>", "<|LOC_825|>", "<|LOC_826|>", "<|LOC_827|>", "<|LOC_828|>", "<|LOC_829|>", "<|LOC_830|>", "<|LOC_831|>", "<|LOC_832|>", "<|LOC_833|>", "<|LOC_834|>", "<|LOC_835|>", "<|LOC_836|>", "<|LOC_837|>", "<|LOC_838|>", "<|LOC_839|>", "<|LOC_840|>", "<|LOC_841|>", "<|LOC_842|>", "<|LOC_843|>", "<|LOC_844|>", "<|LOC_845|>", "<|LOC_846|>", "<|LOC_847|>", "<|LOC_848|>", "<|LOC_849|>", "<|LOC_850|>", "<|LOC_851|>", "<|LOC_852|>", "<|LOC_853|>", "<|LOC_854|>", "<|LOC_855|>", "<|LOC_856|>", "<|LOC_857|>", "<|LOC_858|>", "<|LOC_859|>", "<|LOC_860|>", "<|LOC_861|>", "<|LOC_862|>", "<|LOC_863|>", "<|LOC_864|>", "<|LOC_865|>", "<|LOC_866|>", "<|LOC_867|>", "<|LOC_868|>", "<|LOC_869|>", "<|LOC_870|>", "<|LOC_871|>", "<|LOC_872|>", "<|LOC_873|>", "<|LOC_874|>", "<|LOC_875|>", "<|LOC_876|>", "<|LOC_877|>", "<|LOC_878|>", "<|LOC_879|>", "<|LOC_880|>", "<|LOC_881|>", "<|LOC_882|>", "<|LOC_883|>", "<|LOC_884|>", "<|LOC_885|>", "<|LOC_886|>", "<|LOC_887|>", "<|LOC_888|>", "<|LOC_889|>", "<|LOC_890|>", "<|LOC_891|>", "<|LOC_892|>", "<|LOC_893|>", "<|LOC_894|>", "<|LOC_895|>", "<|LOC_896|>", "<|LOC_897|>", "<|LOC_898|>", "<|LOC_899|>", "<|LOC_900|>", "<|LOC_901|>", "<|LOC_902|>", "<|LOC_903|>", "<|LOC_904|>", "<|LOC_905|>", "<|LOC_906|>", "<|LOC_907|>", "<|LOC_908|>", "<|LOC_909|>", "<|LOC_910|>", "<|LOC_911|>", "<|LOC_912|>", "<|LOC_913|>", "<|LOC_914|>", "<|LOC_915|>", "<|LOC_916|>", "<|LOC_917|>", "<|LOC_918|>", "<|LOC_919|>", "<|LOC_920|>", "<|LOC_921|>", "<|LOC_922|>", "<|LOC_923|>", "<|LOC_924|>", "<|LOC_925|>", "<|LOC_926|>", "<|LOC_927|>", "<|LOC_928|>", "<|LOC_929|>", "<|LOC_930|>", "<|LOC_931|>", "<|LOC_932|>", "<|LOC_933|>", "<|LOC_934|>", "<|LOC_935|>", "<|LOC_936|>", "<|LOC_937|>", "<|LOC_938|>", "<|LOC_939|>", "<|LOC_940|>", "<|LOC_941|>", "<|LOC_942|>", "<|LOC_943|>", "<|LOC_944|>", "<|LOC_945|>", "<|LOC_946|>", "<|LOC_947|>", "<|LOC_948|>", "<|LOC_949|>", "<|LOC_950|>", "<|LOC_951|>", "<|LOC_952|>", "<|LOC_953|>", "<|LOC_954|>", "<|LOC_955|>", "<|LOC_956|>", "<|LOC_957|>", "<|LOC_958|>", "<|LOC_959|>", "<|LOC_960|>", "<|LOC_961|>", "<|LOC_962|>", "<|LOC_963|>", "<|LOC_964|>", "<|LOC_965|>", "<|LOC_966|>", "<|LOC_967|>", "<|LOC_968|>", "<|LOC_969|>", "<|LOC_970|>", "<|LOC_971|>", "<|LOC_972|>", "<|LOC_973|>", "<|LOC_974|>", "<|LOC_975|>", "<|LOC_976|>", "<|LOC_977|>", "<|LOC_978|>", "<|LOC_979|>", "<|LOC_980|>", "<|LOC_981|>", "<|LOC_982|>", "<|LOC_983|>", "<|LOC_984|>", "<|LOC_985|>", "<|LOC_986|>", "<|LOC_987|>", "<|LOC_988|>", "<|LOC_989|>", "<|LOC_990|>", "<|LOC_991|>", "<|LOC_992|>", "<|LOC_993|>", "<|LOC_994|>", "<|LOC_995|>", "<|LOC_996|>", "<|LOC_997|>", "<|LOC_998|>", "<|LOC_999|>", "<|LOC_1000|>", "<|LOC_BEGIN|>", "<|LOC_END|>", "<|LOC_SEP|>", "<|CROP_COL_SEP|>", "<|CROP_ROW_SEP|>", "<|IMAGE_SEP|>", "<|IMAGE_START|>", "<|IMAGE_END|>", "<|VIDEO_START|>", "<|VIDEO_END|>", "<|ASR_START|>", "<|ASR_END|>", "<|IMAGE_UNUSE:6|>", "<|IMAGE_UNUSE:7|>", "<|IMAGE_UNUSE:8|>", "<|IMAGE_UNUSE:9|>", "<|IMAGE_UNUSE:10|>", "<|IMAGE_UNUSE:11|>", "<|IMAGE_UNUSE:12|>", "<|IMAGE_UNUSE:13|>", "<|IMAGE_UNUSE:14|>", "<|IMAGE_UNUSE:15|>", "<|IMAGE_UNUSE:16|>", "<|IMAGE_UNUSE:17|>", "<|IMAGE_UNUSE:18|>", "<|IMAGE_UNUSE:19|>", "<|IMAGE_UNUSE:20|>", "<|IMAGE_UNUSE:21|>", "<|IMAGE_UNUSE:22|>", "<|IMAGE_UNUSE:23|>", "<|IMAGE_UNUSE:24|>", "<|IMAGE_UNUSE:25|>", "<|IMAGE_UNUSE:26|>", "<|IMAGE_UNUSE:27|>", "<|IMAGE_UNUSE:28|>", "<|IMAGE_UNUSE:29|>", "<|IMAGE_UNUSE:30|>", "<|IMAGE_UNUSE:31|>", "<|IMAGE_UNUSE:32|>", "<|IMAGE_UNUSE:33|>", "<|IMAGE_UNUSE:34|>", "<|IMAGE_UNUSE:35|>", "<|IMAGE_UNUSE:36|>", "<|IMAGE_UNUSE:37|>", "<|IMAGE_UNUSE:38|>", "<|IMAGE_UNUSE:39|>", "<|IMAGE_UNUSE:40|>", "<|IMAGE_UNUSE:41|>", "<|IMAGE_UNUSE:42|>", "<|IMAGE_UNUSE:43|>", "<|IMAGE_UNUSE:44|>", "<|IMAGE_UNUSE:45|>", "<|IMAGE_UNUSE:46|>", "<|IMAGE_UNUSE:47|>", "<|IMAGE_UNUSE:48|>", "<|IMAGE_UNUSE:49|>", "<|IMAGE_UNUSE:50|>", "<|IMAGE_UNUSE:51|>", "<|IMAGE_UNUSE:52|>", "<|IMAGE_UNUSE:53|>", "<|IMAGE_UNUSE:54|>", "<|IMAGE_UNUSE:55|>", "<|IMAGE_UNUSE:56|>", "<|IMAGE_UNUSE:57|>", "<|IMAGE_UNUSE:58|>", "<|IMAGE_UNUSE:59|>", "<|IMAGE_UNUSE:60|>", "<|IMAGE_UNUSE:61|>", "<|IMAGE_UNUSE:62|>", "<|IMAGE_UNUSE:63|>", "<|IMAGE_UNUSE:64|>", "<|IMAGE_UNUSE:65|>", "<|IMAGE_UNUSE:66|>", "<|IMAGE_UNUSE:67|>", "<|IMAGE_UNUSE:68|>", "<|IMAGE_UNUSE:69|>", "<|IMAGE_UNUSE:70|>", "<|IMAGE_UNUSE:71|>", "<|IMAGE_UNUSE:72|>", "<|IMAGE_UNUSE:73|>", "<|IMAGE_UNUSE:74|>", "<|IMAGE_UNUSE:75|>", "<|IMAGE_UNUSE:76|>", "<|IMAGE_UNUSE:77|>", "<|IMAGE_UNUSE:78|>", "<|IMAGE_UNUSE:79|>", "<|IMAGE_UNUSE:80|>", "<|IMAGE_UNUSE:81|>", "<|IMAGE_UNUSE:82|>", "<|IMAGE_UNUSE:83|>", "<|IMAGE_UNUSE:84|>", "<|IMAGE_UNUSE:85|>", "<|IMAGE_UNUSE:86|>", "<|IMAGE_UNUSE:87|>", "<|IMAGE_UNUSE:88|>", "<|IMAGE_UNUSE:89|>", "<|IMAGE_UNUSE:90|>", "<|IMAGE_UNUSE:91|>", "<|IMAGE_UNUSE:92|>", "<|IMAGE_UNUSE:93|>", "<|IMAGE_UNUSE:94|>", "<|IMAGE_UNUSE:95|>", "<|IMAGE_UNUSE:96|>", "<|IMAGE_UNUSE:97|>", "<|IMAGE_UNUSE:98|>", "<|IMAGE_UNUSE:99|>", "<|IMAGE_UNUSE:100|>", "<|IMAGE_UNUSE:101|>", "<|IMAGE_UNUSE:102|>", "<|IMAGE_UNUSE:103|>", "<|IMAGE_UNUSE:104|>", "<|IMAGE_UNUSE:105|>", "<|IMAGE_UNUSE:106|>", "<|IMAGE_UNUSE:107|>", "<|IMAGE_UNUSE:108|>", "<|IMAGE_UNUSE:109|>", "<|IMAGE_UNUSE:110|>", "<|IMAGE_UNUSE:111|>", "<|IMAGE_UNUSE:112|>", "<|IMAGE_UNUSE:113|>", "<|IMAGE_UNUSE:114|>", "<|IMAGE_UNUSE:115|>", "<|IMAGE_UNUSE:116|>", "<|IMAGE_UNUSE:117|>", "<|IMAGE_UNUSE:118|>", "<|IMAGE_UNUSE:119|>", "<|IMAGE_UNUSE:120|>", "<|IMAGE_UNUSE:121|>", "<|IMAGE_UNUSE:122|>", "<|IMAGE_UNUSE:123|>", "<|IMAGE_UNUSE:124|>", "<|IMAGE_UNUSE:125|>", "<|IMAGE_UNUSE:126|>", "<|IMAGE_UNUSE:127|>", "<|IMAGE_UNUSE:128|>", "<|IMAGE_UNUSE:129|>", "<|IMAGE_UNUSE:130|>", "<|IMAGE_UNUSE:131|>", "<|IMAGE_UNUSE:132|>", "<|IMAGE_UNUSE:133|>", "<|IMAGE_UNUSE:134|>", "<|IMAGE_UNUSE:135|>", "<|IMAGE_UNUSE:136|>", "<|IMAGE_UNUSE:137|>", "<|IMAGE_UNUSE:138|>", "<|IMAGE_UNUSE:139|>", "<|IMAGE_UNUSE:140|>", "<|IMAGE_UNUSE:141|>", "<|IMAGE_UNUSE:142|>", "<|IMAGE_UNUSE:143|>", "<|IMAGE_UNUSE:144|>", "<|IMAGE_UNUSE:145|>", "<|IMAGE_UNUSE:146|>", "<|IMAGE_UNUSE:147|>", "<|IMAGE_UNUSE:148|>", "<|IMAGE_UNUSE:149|>", "<|IMAGE_UNUSE:150|>", "<|IMAGE_UNUSE:151|>", "<|IMAGE_UNUSE:152|>", "<|IMAGE_UNUSE:153|>", "<|IMAGE_UNUSE:154|>", "<|IMAGE_UNUSE:155|>", "<|IMAGE_UNUSE:156|>", "<|IMAGE_UNUSE:157|>", "<|IMAGE_UNUSE:158|>", "<|IMAGE_UNUSE:159|>", "<|IMAGE_UNUSE:160|>", "<|IMAGE_UNUSE:161|>", "<|IMAGE_UNUSE:162|>", "<|IMAGE_UNUSE:163|>", "<|IMAGE_UNUSE:164|>", "<|IMAGE_UNUSE:165|>", "<|IMAGE_UNUSE:166|>", "<|IMAGE_UNUSE:167|>", "<|IMAGE_UNUSE:168|>", "<|IMAGE_UNUSE:169|>", "<|IMAGE_UNUSE:170|>", "<|IMAGE_UNUSE:171|>", "<|IMAGE_UNUSE:172|>", "<|IMAGE_UNUSE:173|>", "<|IMAGE_UNUSE:174|>", "<|IMAGE_UNUSE:175|>", "<|IMAGE_UNUSE:176|>", "<|IMAGE_UNUSE:177|>", "<|IMAGE_UNUSE:178|>", "<|IMAGE_UNUSE:179|>", "<|IMAGE_UNUSE:180|>", "<|IMAGE_UNUSE:181|>", "<|IMAGE_UNUSE:182|>", "<|IMAGE_UNUSE:183|>", "<|IMAGE_UNUSE:184|>", "<|IMAGE_UNUSE:185|>", "<|IMAGE_UNUSE:186|>", "<|IMAGE_UNUSE:187|>", "<|IMAGE_UNUSE:188|>", "<|IMAGE_UNUSE:189|>", "<|IMAGE_UNUSE:190|>", "<|IMAGE_UNUSE:191|>", "<|IMAGE_UNUSE:192|>", "<|IMAGE_UNUSE:193|>", "<|IMAGE_UNUSE:194|>", "<|IMAGE_UNUSE:195|>", "<|IMAGE_UNUSE:196|>", "<|IMAGE_UNUSE:197|>", "<|IMAGE_UNUSE:198|>", "<|IMAGE_UNUSE:199|>", "<|IMAGE_UNUSE:200|>", "<|IMAGE_UNUSE:201|>", "<|IMAGE_UNUSE:202|>", "<|IMAGE_UNUSE:203|>", "<|IMAGE_UNUSE:204|>", "<|IMAGE_UNUSE:205|>", "<|IMAGE_UNUSE:206|>", "<|IMAGE_UNUSE:207|>", "<|IMAGE_UNUSE:208|>", "<|IMAGE_UNUSE:209|>", "<|IMAGE_UNUSE:210|>", "<|IMAGE_UNUSE:211|>", "<|IMAGE_UNUSE:212|>", "<|IMAGE_UNUSE:213|>", "<|IMAGE_UNUSE:214|>", "<|IMAGE_UNUSE:215|>", "<|IMAGE_UNUSE:216|>", "<|IMAGE_UNUSE:217|>", "<|IMAGE_UNUSE:218|>", "<|IMAGE_UNUSE:219|>", "<|IMAGE_UNUSE:220|>", "<|IMAGE_UNUSE:221|>", "<|IMAGE_UNUSE:222|>", "<|IMAGE_UNUSE:223|>", "<|IMAGE_UNUSE:224|>", "<|IMAGE_UNUSE:225|>", "<|IMAGE_UNUSE:226|>", "<|IMAGE_UNUSE:227|>", "<|IMAGE_UNUSE:228|>", "<|IMAGE_UNUSE:229|>", "<|IMAGE_UNUSE:230|>", "<|IMAGE_UNUSE:231|>", "<|IMAGE_UNUSE:232|>", "<|IMAGE_UNUSE:233|>", "<|IMAGE_UNUSE:234|>", "<|IMAGE_UNUSE:235|>", "<|IMAGE_UNUSE:236|>", "<|IMAGE_UNUSE:237|>", "<|IMAGE_UNUSE:238|>", "<|IMAGE_UNUSE:239|>", "<|IMAGE_UNUSE:240|>", "<|IMAGE_UNUSE:241|>", "<|IMAGE_UNUSE:242|>", "<|IMAGE_UNUSE:243|>", "<|IMAGE_UNUSE:244|>", "<|IMAGE_UNUSE:245|>", "<|IMAGE_UNUSE:246|>", "<|IMAGE_UNUSE:247|>", "<|IMAGE_UNUSE:248|>", "<|IMAGE_UNUSE:249|>", "<|IMAGE_UNUSE:250|>", "<|IMAGE_UNUSE:251|>", "<|IMAGE_UNUSE:252|>", "<|IMAGE_UNUSE:253|>", "<|IMAGE_UNUSE:254|>", "<|IMAGE_UNUSE:255|>", "<|IMAGE_UNUSE:256|>", "<|IMAGE_UNUSE:257|>", "<|IMAGE_UNUSE:258|>", "<|IMAGE_UNUSE:259|>", "<|IMAGE_UNUSE:260|>", "<|IMAGE_UNUSE:261|>", "<|IMAGE_UNUSE:262|>", "<|IMAGE_UNUSE:263|>", "<|IMAGE_UNUSE:264|>", "<|IMAGE_UNUSE:265|>", "<|IMAGE_UNUSE:266|>", "<|IMAGE_UNUSE:267|>", "<|IMAGE_UNUSE:268|>", "<|IMAGE_UNUSE:269|>", "<|IMAGE_UNUSE:270|>", "<|IMAGE_UNUSE:271|>", "<|IMAGE_UNUSE:272|>", "<|IMAGE_UNUSE:273|>", "<|IMAGE_UNUSE:274|>", "<|IMAGE_UNUSE:275|>", "<|IMAGE_UNUSE:276|>", "<|IMAGE_UNUSE:277|>", "<|IMAGE_UNUSE:278|>", "<|IMAGE_UNUSE:279|>", "<|IMAGE_UNUSE:280|>", "<|IMAGE_UNUSE:281|>", "<|IMAGE_UNUSE:282|>", "<|IMAGE_UNUSE:283|>", "<|IMAGE_UNUSE:284|>", "<|IMAGE_UNUSE:285|>", "<|IMAGE_UNUSE:286|>", "<|IMAGE_UNUSE:287|>", "<|IMAGE_UNUSE:288|>", "<|IMAGE_UNUSE:289|>", "<|IMAGE_UNUSE:290|>", "<|IMAGE_UNUSE:291|>", "<|IMAGE_UNUSE:292|>", "<|IMAGE_UNUSE:293|>", "<|IMAGE_UNUSE:294|>", "<|IMAGE_UNUSE:295|>", "<|IMAGE_UNUSE:296|>", "<|IMAGE_UNUSE:297|>", "<|IMAGE_UNUSE:298|>", "<|IMAGE_UNUSE:299|>", "<|IMAGE_UNUSE:300|>", "<|IMAGE_UNUSE:301|>", "<|IMAGE_UNUSE:302|>", "<|IMAGE_UNUSE:303|>", "<|IMAGE_UNUSE:304|>", "<|IMAGE_UNUSE:305|>", "<|IMAGE_UNUSE:306|>", "<|IMAGE_UNUSE:307|>", "<|IMAGE_UNUSE:308|>", "<|IMAGE_UNUSE:309|>", "<|IMAGE_UNUSE:310|>", "<|IMAGE_UNUSE:311|>", "<|IMAGE_UNUSE:312|>", "<|IMAGE_UNUSE:313|>", "<|IMAGE_UNUSE:314|>", "<|IMAGE_UNUSE:315|>", "<|IMAGE_UNUSE:316|>", "<|IMAGE_UNUSE:317|>", "<|IMAGE_UNUSE:318|>", "<|IMAGE_UNUSE:319|>", "<|IMAGE_UNUSE:320|>", "<|IMAGE_UNUSE:321|>", "<|IMAGE_UNUSE:322|>", "<|IMAGE_UNUSE:323|>", "<|IMAGE_UNUSE:324|>", "<|IMAGE_UNUSE:325|>", "<|IMAGE_UNUSE:326|>", "<|IMAGE_UNUSE:327|>", "<|IMAGE_UNUSE:328|>", "<|IMAGE_UNUSE:329|>", "<|IMAGE_UNUSE:330|>", "<|IMAGE_UNUSE:331|>", "<|IMAGE_UNUSE:332|>", "<|IMAGE_UNUSE:333|>", "<|IMAGE_UNUSE:334|>", "<|IMAGE_UNUSE:335|>", "<|IMAGE_UNUSE:336|>", "<|IMAGE_UNUSE:337|>", "<|IMAGE_UNUSE:338|>", "<|IMAGE_UNUSE:339|>", "<|IMAGE_UNUSE:340|>", "<|IMAGE_UNUSE:341|>", "<|IMAGE_UNUSE:342|>", "<|IMAGE_UNUSE:343|>", "<|IMAGE_UNUSE:344|>", "<|IMAGE_UNUSE:345|>", "<|IMAGE_UNUSE:346|>", "<|IMAGE_UNUSE:347|>", "<|IMAGE_UNUSE:348|>", "<|IMAGE_UNUSE:349|>", "<|IMAGE_UNUSE:350|>", "<|IMAGE_UNUSE:351|>", "<|IMAGE_UNUSE:352|>", "<|IMAGE_UNUSE:353|>", "<|IMAGE_UNUSE:354|>", "<|IMAGE_UNUSE:355|>", "<|IMAGE_UNUSE:356|>", "<|IMAGE_UNUSE:357|>", "<|IMAGE_UNUSE:358|>", "<|IMAGE_UNUSE:359|>", "<|IMAGE_UNUSE:360|>", "<|IMAGE_UNUSE:361|>", "<|IMAGE_UNUSE:362|>", "<|IMAGE_UNUSE:363|>", "<|IMAGE_UNUSE:364|>", "<|IMAGE_UNUSE:365|>", "<|IMAGE_UNUSE:366|>", "<|IMAGE_UNUSE:367|>", "<|IMAGE_UNUSE:368|>", "<|IMAGE_UNUSE:369|>", "<|IMAGE_UNUSE:370|>", "<|IMAGE_UNUSE:371|>", "<|IMAGE_UNUSE:372|>", "<|IMAGE_UNUSE:373|>", "<|IMAGE_UNUSE:374|>", "<|IMAGE_UNUSE:375|>", "<|IMAGE_UNUSE:376|>", "<|IMAGE_UNUSE:377|>", "<|IMAGE_UNUSE:378|>", "<|IMAGE_UNUSE:379|>", "<|IMAGE_UNUSE:380|>", "<|IMAGE_UNUSE:381|>", "<|IMAGE_UNUSE:382|>", "<|IMAGE_UNUSE:383|>", "<|IMAGE_UNUSE:384|>", "<|IMAGE_UNUSE:385|>", "<|IMAGE_UNUSE:386|>", "<|IMAGE_UNUSE:387|>", "<|IMAGE_UNUSE:388|>", "<|IMAGE_UNUSE:389|>", "<|IMAGE_UNUSE:390|>", "<|IMAGE_UNUSE:391|>", "<|IMAGE_UNUSE:392|>", "<|IMAGE_UNUSE:393|>", "<|IMAGE_UNUSE:394|>", "<|IMAGE_UNUSE:395|>", "<|IMAGE_UNUSE:396|>", "<|IMAGE_UNUSE:397|>", "<|IMAGE_UNUSE:398|>", "<|IMAGE_UNUSE:399|>", "<|IMAGE_UNUSE:400|>", "<|IMAGE_UNUSE:401|>", "<|IMAGE_UNUSE:402|>", "<|IMAGE_UNUSE:403|>", "<|IMAGE_UNUSE:404|>", "<|IMAGE_UNUSE:405|>", "<|IMAGE_UNUSE:406|>", "<|IMAGE_UNUSE:407|>", "<|IMAGE_UNUSE:408|>", "<|IMAGE_UNUSE:409|>", "<|IMAGE_UNUSE:410|>", "<|IMAGE_UNUSE:411|>", "<|IMAGE_UNUSE:412|>", "<|IMAGE_UNUSE:413|>", "<|IMAGE_UNUSE:414|>", "<|IMAGE_UNUSE:415|>", "<|IMAGE_UNUSE:416|>", "<|IMAGE_UNUSE:417|>", "<|IMAGE_UNUSE:418|>", "<|IMAGE_UNUSE:419|>", "<|IMAGE_UNUSE:420|>", "<|IMAGE_UNUSE:421|>", "<|IMAGE_UNUSE:422|>", "<|IMAGE_UNUSE:423|>", "<|IMAGE_UNUSE:424|>", "<|IMAGE_UNUSE:425|>", "<|IMAGE_UNUSE:426|>", "<|IMAGE_UNUSE:427|>", "<|IMAGE_UNUSE:428|>", "<|IMAGE_UNUSE:429|>", "<|IMAGE_UNUSE:430|>", "<|IMAGE_UNUSE:431|>", "<|IMAGE_UNUSE:432|>", "<|IMAGE_UNUSE:433|>", "<|IMAGE_UNUSE:434|>", "<|IMAGE_UNUSE:435|>", "<|IMAGE_UNUSE:436|>", "<|IMAGE_UNUSE:437|>", "<|IMAGE_UNUSE:438|>", "<|IMAGE_UNUSE:439|>", "<|IMAGE_UNUSE:440|>", "<|IMAGE_UNUSE:441|>", "<|IMAGE_UNUSE:442|>", "<|IMAGE_UNUSE:443|>", "<|IMAGE_UNUSE:444|>", "<|IMAGE_UNUSE:445|>", "<|IMAGE_UNUSE:446|>", "<|IMAGE_UNUSE:447|>", "<|IMAGE_UNUSE:448|>", "<|IMAGE_UNUSE:449|>", "<|IMAGE_UNUSE:450|>", "<|IMAGE_UNUSE:451|>", "<|IMAGE_UNUSE:452|>", "<|IMAGE_UNUSE:453|>", "<|IMAGE_UNUSE:454|>", "<|IMAGE_UNUSE:455|>", "<|IMAGE_UNUSE:456|>", "<|IMAGE_UNUSE:457|>", "<|IMAGE_UNUSE:458|>", "<|IMAGE_UNUSE:459|>", "<|IMAGE_UNUSE:460|>", "<|IMAGE_UNUSE:461|>", "<|IMAGE_UNUSE:462|>", "<|IMAGE_UNUSE:463|>", "<|IMAGE_UNUSE:464|>", "<|IMAGE_UNUSE:465|>", "<|IMAGE_UNUSE:466|>", "<|IMAGE_UNUSE:467|>", "<|IMAGE_UNUSE:468|>", "<|IMAGE_UNUSE:469|>", "<|IMAGE_UNUSE:470|>", "<|IMAGE_UNUSE:471|>", "<|IMAGE_UNUSE:472|>", "<|IMAGE_UNUSE:473|>", "<|IMAGE_UNUSE:474|>", "<|IMAGE_UNUSE:475|>", "<|IMAGE_UNUSE:476|>", "<|IMAGE_UNUSE:477|>", "<|IMAGE_UNUSE:478|>", "<|IMAGE_UNUSE:479|>", "<|IMAGE_UNUSE:480|>", "<|IMAGE_UNUSE:481|>", "<|IMAGE_UNUSE:482|>", "<|IMAGE_UNUSE:483|>", "<|IMAGE_UNUSE:484|>", "<|IMAGE_UNUSE:485|>", "<|IMAGE_UNUSE:486|>", "<|IMAGE_UNUSE:487|>", "<|IMAGE_UNUSE:488|>", "<|IMAGE_UNUSE:489|>", "<|IMAGE_UNUSE:490|>", "<|IMAGE_UNUSE:491|>", "<|IMAGE_UNUSE:492|>", "<|IMAGE_UNUSE:493|>", "<|IMAGE_UNUSE:494|>", "<|IMAGE_UNUSE:495|>", "<|IMAGE_UNUSE:496|>", "<|IMAGE_UNUSE:497|>", "<|IMAGE_UNUSE:498|>", "<|IMAGE_UNUSE:499|>", "<|IMAGE_UNUSE:500|>", "<|IMAGE_UNUSE:501|>", "<|IMAGE_UNUSE:502|>", "<|IMAGE_UNUSE:503|>", "<|IMAGE_UNUSE:504|>", "<|IMAGE_UNUSE:505|>", "<|IMAGE_UNUSE:506|>", "<|IMAGE_UNUSE:507|>", "<|IMAGE_UNUSE:508|>", "<|IMAGE_UNUSE:509|>", "<|IMAGE_UNUSE:510|>", "<|IMAGE_UNUSE:511|>", "<|IMAGE_UNUSE:512|>", "<|IMAGE_UNUSE:513|>", "<|IMAGE_UNUSE:514|>", "<|IMAGE_UNUSE:515|>", "<|IMAGE_UNUSE:516|>", "<|IMAGE_UNUSE:517|>", "<|IMAGE_UNUSE:518|>", "<|IMAGE_UNUSE:519|>", "<|IMAGE_UNUSE:520|>", "<|IMAGE_UNUSE:521|>", "<|IMAGE_UNUSE:522|>", "<|IMAGE_UNUSE:523|>", "<|IMAGE_UNUSE:524|>", "<|IMAGE_UNUSE:525|>", "<|IMAGE_UNUSE:526|>", "<|IMAGE_UNUSE:527|>", "<|IMAGE_UNUSE:528|>", "<|IMAGE_UNUSE:529|>", "<|IMAGE_UNUSE:530|>", "<|IMAGE_UNUSE:531|>", "<|IMAGE_UNUSE:532|>", "<|IMAGE_UNUSE:533|>", "<|IMAGE_UNUSE:534|>", "<|IMAGE_UNUSE:535|>", "<|IMAGE_UNUSE:536|>", "<|IMAGE_UNUSE:537|>", "<|IMAGE_UNUSE:538|>", "<|IMAGE_UNUSE:539|>", "<|IMAGE_UNUSE:540|>", "<|IMAGE_UNUSE:541|>", "<|IMAGE_UNUSE:542|>", "<|IMAGE_UNUSE:543|>", "<|IMAGE_UNUSE:544|>", "<|IMAGE_UNUSE:545|>", "<|IMAGE_UNUSE:546|>", "<|IMAGE_UNUSE:547|>", "<|IMAGE_UNUSE:548|>", "<|IMAGE_UNUSE:549|>", "<|IMAGE_UNUSE:550|>", "<|IMAGE_UNUSE:551|>", "<|IMAGE_UNUSE:552|>", "<|IMAGE_UNUSE:553|>", "<|IMAGE_UNUSE:554|>", "<|IMAGE_UNUSE:555|>", "<|IMAGE_UNUSE:556|>", "<|IMAGE_UNUSE:557|>", "<|IMAGE_UNUSE:558|>", "<|IMAGE_UNUSE:559|>", "<|IMAGE_UNUSE:560|>", "<|IMAGE_UNUSE:561|>", "<|IMAGE_UNUSE:562|>", "<|IMAGE_UNUSE:563|>", "<|IMAGE_UNUSE:564|>", "<|IMAGE_UNUSE:565|>", "<|IMAGE_UNUSE:566|>", "<|IMAGE_UNUSE:567|>", "<|IMAGE_UNUSE:568|>", "<|IMAGE_UNUSE:569|>", "<|IMAGE_UNUSE:570|>", "<|IMAGE_UNUSE:571|>", "<|IMAGE_UNUSE:572|>", "<|IMAGE_UNUSE:573|>", "<|IMAGE_UNUSE:574|>", "<|IMAGE_UNUSE:575|>", "<|IMAGE_UNUSE:576|>", "<|IMAGE_UNUSE:577|>", "<|IMAGE_UNUSE:578|>", "<|IMAGE_UNUSE:579|>", "<|IMAGE_UNUSE:580|>", "<|IMAGE_UNUSE:581|>", "<|IMAGE_UNUSE:582|>", "<|IMAGE_UNUSE:583|>", "<|IMAGE_UNUSE:584|>", "<|IMAGE_UNUSE:585|>", "<|IMAGE_UNUSE:586|>", "<|IMAGE_UNUSE:587|>", "<|IMAGE_UNUSE:588|>", "<|IMAGE_UNUSE:589|>", "<|IMAGE_UNUSE:590|>", "<|IMAGE_UNUSE:591|>", "<|IMAGE_UNUSE:592|>", "<|IMAGE_UNUSE:593|>", "<|IMAGE_UNUSE:594|>", "<|IMAGE_UNUSE:595|>", "<|IMAGE_UNUSE:596|>", "<|IMAGE_UNUSE:597|>", "<|IMAGE_UNUSE:598|>", "<|IMAGE_UNUSE:599|>", "<|IMAGE_UNUSE:600|>", "<|IMAGE_UNUSE:601|>", "<|IMAGE_UNUSE:602|>", "<|IMAGE_UNUSE:603|>", "<|IMAGE_UNUSE:604|>", "<|IMAGE_UNUSE:605|>", "<|IMAGE_UNUSE:606|>", "<|IMAGE_UNUSE:607|>", "<|IMAGE_UNUSE:608|>", "<|IMAGE_UNUSE:609|>", "<|IMAGE_UNUSE:610|>", "<|IMAGE_UNUSE:611|>", "<|IMAGE_UNUSE:612|>", "<|IMAGE_UNUSE:613|>", "<|IMAGE_UNUSE:614|>", "<|IMAGE_UNUSE:615|>", "<|IMAGE_UNUSE:616|>", "<|IMAGE_UNUSE:617|>", "<|IMAGE_UNUSE:618|>", "<|IMAGE_UNUSE:619|>", "<|IMAGE_UNUSE:620|>", "<|IMAGE_UNUSE:621|>", "<|IMAGE_UNUSE:622|>", "<|IMAGE_UNUSE:623|>", "<|IMAGE_UNUSE:624|>", "<|IMAGE_UNUSE:625|>", "<|IMAGE_UNUSE:626|>", "<|IMAGE_UNUSE:627|>", "<|IMAGE_UNUSE:628|>", "<|IMAGE_UNUSE:629|>", "<|IMAGE_UNUSE:630|>", "<|IMAGE_UNUSE:631|>", "<|IMAGE_UNUSE:632|>", "<|IMAGE_UNUSE:633|>", "<|IMAGE_UNUSE:634|>", "<|IMAGE_UNUSE:635|>", "<|IMAGE_UNUSE:636|>", "<|IMAGE_UNUSE:637|>", "<|IMAGE_UNUSE:638|>", "<|IMAGE_UNUSE:639|>", "<|IMAGE_UNUSE:640|>", "<|IMAGE_UNUSE:641|>", "<|IMAGE_UNUSE:642|>", "<|IMAGE_UNUSE:643|>", "<|IMAGE_UNUSE:644|>", "<|IMAGE_UNUSE:645|>", "<|IMAGE_UNUSE:646|>", "<|IMAGE_UNUSE:647|>", "<|IMAGE_UNUSE:648|>", "<|IMAGE_UNUSE:649|>", "<|IMAGE_UNUSE:650|>", "<|IMAGE_UNUSE:651|>", "<|IMAGE_UNUSE:652|>", "<|IMAGE_UNUSE:653|>", "<|IMAGE_UNUSE:654|>", "<|IMAGE_UNUSE:655|>", "<|IMAGE_UNUSE:656|>", "<|IMAGE_UNUSE:657|>", "<|IMAGE_UNUSE:658|>", "<|IMAGE_UNUSE:659|>", "<|IMAGE_UNUSE:660|>", "<|IMAGE_UNUSE:661|>", "<|IMAGE_UNUSE:662|>", "<|IMAGE_UNUSE:663|>", "<|IMAGE_UNUSE:664|>", "<|IMAGE_UNUSE:665|>", "<|IMAGE_UNUSE:666|>", "<|IMAGE_UNUSE:667|>", "<|IMAGE_UNUSE:668|>", "<|IMAGE_UNUSE:669|>", "<|IMAGE_UNUSE:670|>", "<|IMAGE_UNUSE:671|>", "<|IMAGE_UNUSE:672|>", "<|IMAGE_UNUSE:673|>", "<|IMAGE_UNUSE:674|>", "<|IMAGE_UNUSE:675|>", "<|IMAGE_UNUSE:676|>", "<|IMAGE_UNUSE:677|>", "<|IMAGE_UNUSE:678|>", "<|IMAGE_UNUSE:679|>", "<|IMAGE_UNUSE:680|>", "<|IMAGE_UNUSE:681|>", "<|IMAGE_UNUSE:682|>", "<|IMAGE_UNUSE:683|>", "<|IMAGE_UNUSE:684|>", "<|IMAGE_UNUSE:685|>", "<|IMAGE_UNUSE:686|>", "<|IMAGE_UNUSE:687|>", "<|IMAGE_UNUSE:688|>", "<|IMAGE_UNUSE:689|>", "<|IMAGE_UNUSE:690|>", "<|IMAGE_UNUSE:691|>", "<|IMAGE_UNUSE:692|>", "<|IMAGE_UNUSE:693|>", "<|IMAGE_UNUSE:694|>", "<|IMAGE_UNUSE:695|>", "<|IMAGE_UNUSE:696|>", "<|IMAGE_UNUSE:697|>", "<|IMAGE_UNUSE:698|>", "<|IMAGE_UNUSE:699|>", "<|IMAGE_UNUSE:700|>", "<|IMAGE_UNUSE:701|>", "<|IMAGE_UNUSE:702|>", "<|IMAGE_UNUSE:703|>", "<|IMAGE_UNUSE:704|>", "<|IMAGE_UNUSE:705|>", "<|IMAGE_UNUSE:706|>", "<|IMAGE_UNUSE:707|>", "<|IMAGE_UNUSE:708|>", "<|IMAGE_UNUSE:709|>", "<|IMAGE_UNUSE:710|>", "<|IMAGE_UNUSE:711|>", "<|IMAGE_UNUSE:712|>", "<|IMAGE_UNUSE:713|>", "<|IMAGE_UNUSE:714|>", "<|IMAGE_UNUSE:715|>", "<|IMAGE_UNUSE:716|>", "<|IMAGE_UNUSE:717|>", "<|IMAGE_UNUSE:718|>", "<|IMAGE_UNUSE:719|>", "<|IMAGE_UNUSE:720|>", "<|IMAGE_UNUSE:721|>", "<|IMAGE_UNUSE:722|>", "<|IMAGE_UNUSE:723|>", "<|IMAGE_UNUSE:724|>", "<|IMAGE_UNUSE:725|>", "<|IMAGE_UNUSE:726|>", "<|IMAGE_UNUSE:727|>", "<|IMAGE_UNUSE:728|>", "<|IMAGE_UNUSE:729|>", "<|IMAGE_UNUSE:730|>", "<|IMAGE_UNUSE:731|>", "<|IMAGE_UNUSE:732|>", "<|IMAGE_UNUSE:733|>", "<|IMAGE_UNUSE:734|>", "<|IMAGE_UNUSE:735|>", "<|IMAGE_UNUSE:736|>", "<|IMAGE_UNUSE:737|>", "<|IMAGE_UNUSE:738|>", "<|IMAGE_UNUSE:739|>", "<|IMAGE_UNUSE:740|>", "<|IMAGE_UNUSE:741|>", "<|IMAGE_UNUSE:742|>", "<|IMAGE_UNUSE:743|>", "<|IMAGE_UNUSE:744|>", "<|IMAGE_UNUSE:745|>", "<|IMAGE_UNUSE:746|>", "<|IMAGE_UNUSE:747|>", "<|IMAGE_UNUSE:748|>", "<|IMAGE_UNUSE:749|>", "<|IMAGE_UNUSE:750|>", "<|IMAGE_UNUSE:751|>", "<|IMAGE_UNUSE:752|>", "<|IMAGE_UNUSE:753|>", "<|IMAGE_UNUSE:754|>", "<|IMAGE_UNUSE:755|>", "<|IMAGE_UNUSE:756|>", "<|IMAGE_UNUSE:757|>", "<|IMAGE_UNUSE:758|>", "<|IMAGE_UNUSE:759|>", "<|IMAGE_UNUSE:760|>", "<|IMAGE_UNUSE:761|>", "<|IMAGE_UNUSE:762|>", "<|IMAGE_UNUSE:763|>", "<|IMAGE_UNUSE:764|>", "<|IMAGE_UNUSE:765|>", "<|IMAGE_UNUSE:766|>", "<|IMAGE_UNUSE:767|>", "<|IMAGE_UNUSE:768|>", "<|IMAGE_UNUSE:769|>", "<|IMAGE_UNUSE:770|>", "<|IMAGE_UNUSE:771|>", "<|IMAGE_UNUSE:772|>", "<|IMAGE_UNUSE:773|>", "<|IMAGE_UNUSE:774|>", "<|IMAGE_UNUSE:775|>", "<|IMAGE_UNUSE:776|>", "<|IMAGE_UNUSE:777|>", "<|IMAGE_UNUSE:778|>", "<|IMAGE_UNUSE:779|>", "<|IMAGE_UNUSE:780|>", "<|IMAGE_UNUSE:781|>", "<|IMAGE_UNUSE:782|>", "<|IMAGE_UNUSE:783|>", "<|IMAGE_UNUSE:784|>", "<|IMAGE_UNUSE:785|>", "<|IMAGE_UNUSE:786|>", "<|IMAGE_UNUSE:787|>", "<|IMAGE_UNUSE:788|>", "<|IMAGE_UNUSE:789|>", "<|IMAGE_UNUSE:790|>", "<|IMAGE_UNUSE:791|>", "<|IMAGE_UNUSE:792|>", "<|IMAGE_UNUSE:793|>", "<|IMAGE_UNUSE:794|>", "<|IMAGE_UNUSE:795|>", "<|IMAGE_UNUSE:796|>", "<|IMAGE_UNUSE:797|>", "<|IMAGE_UNUSE:798|>", "<|IMAGE_UNUSE:799|>", "<|IMAGE_UNUSE:800|>", "<|IMAGE_UNUSE:801|>", "<|IMAGE_UNUSE:802|>", "<|IMAGE_UNUSE:803|>", "<|IMAGE_UNUSE:804|>", "<|IMAGE_UNUSE:805|>", "<|IMAGE_UNUSE:806|>", "<|IMAGE_UNUSE:807|>", "<|IMAGE_UNUSE:808|>", "<|IMAGE_UNUSE:809|>", "<|IMAGE_UNUSE:810|>", "<|IMAGE_UNUSE:811|>", "<|IMAGE_UNUSE:812|>", "<|IMAGE_UNUSE:813|>", "<|IMAGE_UNUSE:814|>", "<|IMAGE_UNUSE:815|>", "<|IMAGE_UNUSE:816|>", "<|IMAGE_UNUSE:817|>", "<|IMAGE_UNUSE:818|>", "<|IMAGE_UNUSE:819|>", "<|IMAGE_UNUSE:820|>", "<|IMAGE_UNUSE:821|>", "<|IMAGE_UNUSE:822|>", "<|IMAGE_UNUSE:823|>", "<|IMAGE_UNUSE:824|>", "<|IMAGE_UNUSE:825|>", "<|IMAGE_UNUSE:826|>", "<|IMAGE_UNUSE:827|>", "<|IMAGE_UNUSE:828|>", "<|IMAGE_UNUSE:829|>", "<|IMAGE_UNUSE:830|>", "<|IMAGE_UNUSE:831|>", "<|IMAGE_UNUSE:832|>", "<|IMAGE_UNUSE:833|>", "<|IMAGE_UNUSE:834|>", "<|IMAGE_UNUSE:835|>", "<|IMAGE_UNUSE:836|>", "<|IMAGE_UNUSE:837|>", "<|IMAGE_UNUSE:838|>", "<|IMAGE_UNUSE:839|>", "<|IMAGE_UNUSE:840|>", "<|IMAGE_UNUSE:841|>", "<|IMAGE_UNUSE:842|>", "<|IMAGE_UNUSE:843|>", "<|IMAGE_UNUSE:844|>", "<|IMAGE_UNUSE:845|>", "<|IMAGE_UNUSE:846|>", "<|IMAGE_UNUSE:847|>", "<|IMAGE_UNUSE:848|>", "<|IMAGE_UNUSE:849|>", "<|IMAGE_UNUSE:850|>", "<|IMAGE_UNUSE:851|>", "<|IMAGE_UNUSE:852|>", "<|IMAGE_UNUSE:853|>", "<|IMAGE_UNUSE:854|>", "<|IMAGE_UNUSE:855|>", "<|IMAGE_UNUSE:856|>", "<|IMAGE_UNUSE:857|>", "<|IMAGE_UNUSE:858|>", "<|IMAGE_UNUSE:859|>", "<|IMAGE_UNUSE:860|>", "<|IMAGE_UNUSE:861|>", "<|IMAGE_UNUSE:862|>", "<|IMAGE_UNUSE:863|>", "<|IMAGE_UNUSE:864|>", "<|IMAGE_UNUSE:865|>", "<|IMAGE_UNUSE:866|>", "<|IMAGE_UNUSE:867|>", "<|IMAGE_UNUSE:868|>", "<|IMAGE_UNUSE:869|>", "<|IMAGE_UNUSE:870|>", "<|IMAGE_UNUSE:871|>", "<|IMAGE_UNUSE:872|>", "<|IMAGE_UNUSE:873|>", "<|IMAGE_UNUSE:874|>", "<|IMAGE_UNUSE:875|>", "<|IMAGE_UNUSE:876|>", "<|IMAGE_UNUSE:877|>", "<|IMAGE_UNUSE:878|>", "<|IMAGE_UNUSE:879|>", "<|IMAGE_UNUSE:880|>", "<|IMAGE_UNUSE:881|>", "<|IMAGE_UNUSE:882|>", "<|IMAGE_UNUSE:883|>", "<|IMAGE_UNUSE:884|>", "<|IMAGE_UNUSE:885|>", "<|IMAGE_UNUSE:886|>", "<|IMAGE_UNUSE:887|>", "<|IMAGE_UNUSE:888|>", "<|IMAGE_UNUSE:889|>", "<|IMAGE_UNUSE:890|>", "<|IMAGE_UNUSE:891|>", "<|IMAGE_UNUSE:892|>", "<|IMAGE_UNUSE:893|>", "<|IMAGE_UNUSE:894|>", "<|IMAGE_UNUSE:895|>", "<|IMAGE_UNUSE:896|>", "<|IMAGE_UNUSE:897|>", "<|IMAGE_UNUSE:898|>", "<|IMAGE_UNUSE:899|>", "<|IMAGE_UNUSE:900|>", "<|IMAGE_UNUSE:901|>", "<|IMAGE_UNUSE:902|>", "<|IMAGE_UNUSE:903|>", "<|IMAGE_UNUSE:904|>", "<|IMAGE_UNUSE:905|>", "<|IMAGE_UNUSE:906|>", "<|IMAGE_UNUSE:907|>", "<|IMAGE_UNUSE:908|>", "<|IMAGE_UNUSE:909|>", "<|IMAGE_UNUSE:910|>", "<|IMAGE_UNUSE:911|>", "<|IMAGE_UNUSE:912|>", "<|IMAGE_UNUSE:913|>", "<|IMAGE_UNUSE:914|>", "<|IMAGE_UNUSE:915|>", "<|IMAGE_UNUSE:916|>", "<|IMAGE_UNUSE:917|>", "<|IMAGE_UNUSE:918|>", "<|IMAGE_UNUSE:919|>", "<|IMAGE_UNUSE:920|>", "<|IMAGE_UNUSE:921|>", "<|IMAGE_UNUSE:922|>", "<|IMAGE_UNUSE:923|>", "<|IMAGE_UNUSE:924|>", "<|IMAGE_UNUSE:925|>", "<|IMAGE_UNUSE:926|>", "<|IMAGE_UNUSE:927|>", "<|IMAGE_UNUSE:928|>", "<|IMAGE_UNUSE:929|>", "<|IMAGE_UNUSE:930|>", "<|IMAGE_UNUSE:931|>", "<|IMAGE_UNUSE:932|>", "<|IMAGE_UNUSE:933|>", "<|IMAGE_UNUSE:934|>", "<|IMAGE_UNUSE:935|>", "<|IMAGE_UNUSE:936|>", "<|IMAGE_UNUSE:937|>", "<|IMAGE_UNUSE:938|>", "<|IMAGE_UNUSE:939|>", "<|IMAGE_UNUSE:940|>", "<|IMAGE_UNUSE:941|>", "<|IMAGE_UNUSE:942|>", "<|IMAGE_UNUSE:943|>", "<|IMAGE_UNUSE:944|>", "<|IMAGE_UNUSE:945|>", "<|IMAGE_UNUSE:946|>", "<|IMAGE_UNUSE:947|>", "<|IMAGE_UNUSE:948|>", "<|IMAGE_UNUSE:949|>", "<|IMAGE_UNUSE:950|>", "<|IMAGE_UNUSE:951|>", "<|IMAGE_UNUSE:952|>", "<|IMAGE_UNUSE:953|>", "<|IMAGE_UNUSE:954|>", "<|IMAGE_UNUSE:955|>", "<|IMAGE_UNUSE:956|>", "<|IMAGE_UNUSE:957|>", "<|IMAGE_UNUSE:958|>", "<|IMAGE_UNUSE:959|>", "<|IMAGE_UNUSE:960|>", "<|IMAGE_UNUSE:961|>", "<|IMAGE_UNUSE:962|>", "<|IMAGE_UNUSE:963|>", "<|IMAGE_UNUSE:964|>", "<|IMAGE_UNUSE:965|>", "<|IMAGE_UNUSE:966|>", "<|IMAGE_UNUSE:967|>", "<|IMAGE_UNUSE:968|>", "<|IMAGE_UNUSE:969|>", "<|IMAGE_UNUSE:970|>", "<|IMAGE_UNUSE:971|>", "<|IMAGE_UNUSE:972|>", "<|IMAGE_UNUSE:973|>", "<|IMAGE_UNUSE:974|>", "<|IMAGE_UNUSE:975|>", "<|IMAGE_UNUSE:976|>", "<|IMAGE_UNUSE:977|>", "<|IMAGE_UNUSE:978|>", "<|IMAGE_UNUSE:979|>", "<|IMAGE_UNUSE:980|>", "<|IMAGE_UNUSE:981|>", "<|IMAGE_UNUSE:982|>", "<|IMAGE_UNUSE:983|>", "<|IMAGE_UNUSE:984|>", "<|IMAGE_UNUSE:985|>", "<|IMAGE_UNUSE:986|>", "<|IMAGE_UNUSE:987|>", "<|IMAGE_UNUSE:988|>", "<|IMAGE_UNUSE:989|>", "<|IMAGE_UNUSE:990|>", "<|IMAGE_UNUSE:991|>", "<|IMAGE_UNUSE:992|>", "<|IMAGE_UNUSE:993|>", "<|IMAGE_UNUSE:994|>", "<|IMAGE_UNUSE:995|>", "<|IMAGE_UNUSE:996|>", "<|IMAGE_UNUSE:997|>", "<|IMAGE_UNUSE:998|>", "<|IMAGE_UNUSE:999|>", "<|IMAGE_UNUSE:1000|>", "<|IMAGE_UNUSE:1001|>", "<|IMAGE_UNUSE:1002|>", "<|IMAGE_UNUSE:1003|>", "<|IMAGE_UNUSE:1004|>", "<|IMAGE_UNUSE:1005|>", "<|IMAGE_UNUSE:1006|>", "<|IMAGE_UNUSE:1007|>", "<|IMAGE_UNUSE:1008|>", "<|IMAGE_UNUSE:1009|>", "<|IMAGE_UNUSE:1010|>", "<|IMAGE_UNUSE:1011|>", "<|IMAGE_UNUSE:1012|>", "<|IMAGE_UNUSE:1013|>", "<|IMAGE_UNUSE:1014|>", "<|IMAGE_UNUSE:1015|>", "<|IMAGE_UNUSE:1016|>", "<|IMAGE_UNUSE:1017|>", "<|IMAGE_UNUSE:1018|>", "<|IMAGE_UNUSE:1019|>", "<|IMAGE_UNUSE:1020|>", "<|IMAGE_UNUSE:1021|>", "<|IMAGE_UNUSE:1022|>", "<|IMAGE_UNUSE:1023|>", "<|IMAGE_UNUSE:1024|>", "<|IMAGE_UNUSE:1025|>", "<|IMAGE_UNUSE:1026|>", "<|IMAGE_UNUSE:1027|>", "<|IMAGE_UNUSE:1028|>", "<|IMAGE_UNUSE:1029|>", "<|IMAGE_UNUSE:1030|>", "<|IMAGE_UNUSE:1031|>", "<|IMAGE_UNUSE:1032|>", "<|IMAGE_UNUSE:1033|>", "<|IMAGE_UNUSE:1034|>", "<|IMAGE_UNUSE:1035|>", "<|IMAGE_UNUSE:1036|>", "<|IMAGE_UNUSE:1037|>", "<|IMAGE_UNUSE:1038|>", "<|STREAMING_BEGIN|>", "<|STREAMING_END|>", "<|STREAMING_TEXT_END|>", "<|AUDIO_UNUSE:0|>", "<|AUDIO_UNUSE:1|>", "<|AUDIO_UNUSE:2|>", "<|AUDIO_UNUSE:3|>", "<|AUDIO_UNUSE:4|>", "<|AUDIO_UNUSE:5|>", "<|AUDIO_UNUSE:6|>", "<|AUDIO_UNUSE:7|>", "<|AUDIO_UNUSE:8|>", "<|AUDIO_UNUSE:9|>", "<|AUDIO_UNUSE:10|>", "<|AUDIO_UNUSE:11|>", "<|AUDIO_UNUSE:12|>", "<|AUDIO_UNUSE:13|>", "<|AUDIO_UNUSE:14|>", "<|AUDIO_UNUSE:15|>", "<|AUDIO_UNUSE:16|>", "<|AUDIO_UNUSE:17|>", "<|AUDIO_UNUSE:18|>", "<|AUDIO_UNUSE:19|>", "<|AUDIO_UNUSE:20|>", "<|AUDIO_UNUSE:21|>", "<|AUDIO_UNUSE:22|>", "<|AUDIO_UNUSE:23|>", "<|AUDIO_UNUSE:24|>", "<|AUDIO_UNUSE:25|>", "<|AUDIO_UNUSE:26|>", "<|AUDIO_UNUSE:27|>", "<|AUDIO_UNUSE:28|>", "<|AUDIO_UNUSE:29|>", "<|AUDIO_UNUSE:30|>", "<|AUDIO_UNUSE:31|>", "<|AUDIO_UNUSE:32|>", "<|AUDIO_UNUSE:33|>", "<|AUDIO_UNUSE:34|>", "<|AUDIO_UNUSE:35|>", "<|AUDIO_UNUSE:36|>", "<|AUDIO_UNUSE:37|>", "<|AUDIO_UNUSE:38|>", "<|AUDIO_UNUSE:39|>", "<|AUDIO_UNUSE:40|>", "<|AUDIO_UNUSE:41|>", "<|AUDIO_UNUSE:42|>", "<|AUDIO_UNUSE:43|>", "<|AUDIO_UNUSE:44|>", "<|AUDIO_UNUSE:45|>", "<|AUDIO_UNUSE:46|>", "<|AUDIO_UNUSE:47|>", "<|AUDIO_UNUSE:48|>", "<|AUDIO_UNUSE:49|>", "<|AUDIO_UNUSE:50|>", "<|AUDIO_UNUSE:51|>", "<|AUDIO_UNUSE:52|>", "<|AUDIO_UNUSE:53|>", "<|AUDIO_UNUSE:54|>", "<|AUDIO_UNUSE:55|>", "<|AUDIO_UNUSE:56|>", "<|AUDIO_UNUSE:57|>", "<|AUDIO_UNUSE:58|>", "<|AUDIO_UNUSE:59|>", "<|AUDIO_UNUSE:60|>", "<|AUDIO_UNUSE:61|>", "<|AUDIO_UNUSE:62|>", "<|AUDIO_UNUSE:63|>", "<|AUDIO_UNUSE:64|>", "<|AUDIO_UNUSE:65|>", "<|AUDIO_UNUSE:66|>", "<|AUDIO_UNUSE:67|>", "<|AUDIO_UNUSE:68|>", "<|AUDIO_UNUSE:69|>", "<|AUDIO_UNUSE:70|>", "<|AUDIO_UNUSE:71|>", "<|AUDIO_UNUSE:72|>", "<|AUDIO_UNUSE:73|>", "<|AUDIO_UNUSE:74|>", "<|AUDIO_UNUSE:75|>", "<|AUDIO_UNUSE:76|>", "<|AUDIO_UNUSE:77|>", "<|AUDIO_UNUSE:78|>", "<|AUDIO_UNUSE:79|>", "<|AUDIO_UNUSE:80|>", "<|AUDIO_UNUSE:81|>", "<|AUDIO_UNUSE:82|>", "<|AUDIO_UNUSE:83|>", "<|AUDIO_UNUSE:84|>", "<|AUDIO_UNUSE:85|>", "<|AUDIO_UNUSE:86|>", "<|AUDIO_UNUSE:87|>", "<|AUDIO_UNUSE:88|>", "<|AUDIO_UNUSE:89|>", "<|AUDIO_UNUSE:90|>", "<|AUDIO_UNUSE:91|>", "<|AUDIO_UNUSE:92|>", "<|AUDIO_UNUSE:93|>", "<|AUDIO_UNUSE:94|>", "<|AUDIO_UNUSE:95|>", "<|AUDIO_UNUSE:96|>", "<|AUDIO_UNUSE:97|>", "<|AUDIO_UNUSE:98|>", "<|AUDIO_UNUSE:99|>", "<|AUDIO_UNUSE:100|>", "<|AUDIO_UNUSE:101|>", "<|AUDIO_UNUSE:102|>", "<|AUDIO_UNUSE:103|>", "<|AUDIO_UNUSE:104|>", "<|AUDIO_UNUSE:105|>", "<|AUDIO_UNUSE:106|>", "<|AUDIO_UNUSE:107|>", "<|AUDIO_UNUSE:108|>", "<|AUDIO_UNUSE:109|>", "<|AUDIO_UNUSE:110|>", "<|AUDIO_UNUSE:111|>", "<|AUDIO_UNUSE:112|>", "<|AUDIO_UNUSE:113|>", "<|AUDIO_UNUSE:114|>", "<|AUDIO_UNUSE:115|>", "<|AUDIO_UNUSE:116|>", "<|AUDIO_UNUSE:117|>", "<|AUDIO_UNUSE:118|>", "<|AUDIO_UNUSE:119|>", "<|AUDIO_UNUSE:120|>", "<|AUDIO_UNUSE:121|>", "<|AUDIO_UNUSE:122|>", "<|AUDIO_UNUSE:123|>", "<|AUDIO_UNUSE:124|>", "<|AUDIO_UNUSE:125|>", "<|AUDIO_UNUSE:126|>", "<|AUDIO_UNUSE:127|>", "<|AUDIO_UNUSE:128|>", "<|AUDIO_UNUSE:129|>", "<|AUDIO_UNUSE:130|>", "<|AUDIO_UNUSE:131|>", "<|AUDIO_UNUSE:132|>", "<|AUDIO_UNUSE:133|>", "<|AUDIO_UNUSE:134|>", "<|AUDIO_UNUSE:135|>", "<|AUDIO_UNUSE:136|>", "<|AUDIO_UNUSE:137|>", "<|AUDIO_UNUSE:138|>", "<|AUDIO_UNUSE:139|>", "<|AUDIO_UNUSE:140|>", "<|AUDIO_UNUSE:141|>", "<|AUDIO_UNUSE:142|>", "<|AUDIO_UNUSE:143|>", "<|AUDIO_UNUSE:144|>", "<|AUDIO_UNUSE:145|>", "<|AUDIO_UNUSE:146|>", "<|AUDIO_UNUSE:147|>", "<|AUDIO_UNUSE:148|>", "<|AUDIO_UNUSE:149|>", "<|AUDIO_UNUSE:150|>", "<|AUDIO_UNUSE:151|>", "<|AUDIO_UNUSE:152|>", "<|AUDIO_UNUSE:153|>", "<|AUDIO_UNUSE:154|>", "<|AUDIO_UNUSE:155|>", "<|AUDIO_UNUSE:156|>", "<|AUDIO_UNUSE:157|>", "<|AUDIO_UNUSE:158|>", "<|AUDIO_UNUSE:159|>", "<|AUDIO_UNUSE:160|>", "<|AUDIO_UNUSE:161|>", "<|AUDIO_UNUSE:162|>", "<|AUDIO_UNUSE:163|>", "<|AUDIO_UNUSE:164|>", "<|AUDIO_UNUSE:165|>", "<|AUDIO_UNUSE:166|>", "<|AUDIO_UNUSE:167|>", "<|AUDIO_UNUSE:168|>", "<|AUDIO_UNUSE:169|>", "<|AUDIO_UNUSE:170|>", "<|AUDIO_UNUSE:171|>", "<|AUDIO_UNUSE:172|>", "<|AUDIO_UNUSE:173|>", "<|AUDIO_UNUSE:174|>", "<|AUDIO_UNUSE:175|>", "<|AUDIO_UNUSE:176|>", "<|AUDIO_UNUSE:177|>", "<|AUDIO_UNUSE:178|>", "<|AUDIO_UNUSE:179|>", "<|AUDIO_UNUSE:180|>", "<|AUDIO_UNUSE:181|>", "<|AUDIO_UNUSE:182|>", "<|AUDIO_UNUSE:183|>", "<|AUDIO_UNUSE:184|>", "<|AUDIO_UNUSE:185|>", "<|AUDIO_UNUSE:186|>", "<|AUDIO_UNUSE:187|>", "<|AUDIO_UNUSE:188|>", "<|AUDIO_UNUSE:189|>", "<|AUDIO_UNUSE:190|>", "<|AUDIO_UNUSE:191|>", "<|AUDIO_UNUSE:192|>", "<|AUDIO_UNUSE:193|>", "<|AUDIO_UNUSE:194|>", "<|AUDIO_UNUSE:195|>", "<|AUDIO_UNUSE:196|>", "<|AUDIO_UNUSE:197|>", "<|AUDIO_UNUSE:198|>", "<|AUDIO_UNUSE:199|>", "<|AUDIO_UNUSE:200|>", "<|AUDIO_UNUSE:201|>", "<|AUDIO_UNUSE:202|>", "<|AUDIO_UNUSE:203|>", "<|AUDIO_UNUSE:204|>", "<|AUDIO_UNUSE:205|>", "<|AUDIO_UNUSE:206|>", "<|AUDIO_UNUSE:207|>", "<|AUDIO_UNUSE:208|>", "<|AUDIO_UNUSE:209|>", "<|AUDIO_UNUSE:210|>", "<|AUDIO_UNUSE:211|>", "<|AUDIO_UNUSE:212|>", "<|AUDIO_UNUSE:213|>", "<|AUDIO_UNUSE:214|>", "<|AUDIO_UNUSE:215|>", "<|AUDIO_UNUSE:216|>", "<|AUDIO_UNUSE:217|>", "<|AUDIO_UNUSE:218|>", "<|AUDIO_UNUSE:219|>", "<|AUDIO_UNUSE:220|>", "<|AUDIO_UNUSE:221|>", "<|AUDIO_UNUSE:222|>", "<|AUDIO_UNUSE:223|>", "<|AUDIO_UNUSE:224|>", "<|AUDIO_UNUSE:225|>", "<|AUDIO_UNUSE:226|>", "<|AUDIO_UNUSE:227|>", "<|AUDIO_UNUSE:228|>", "<|AUDIO_UNUSE:229|>", "<|AUDIO_UNUSE:230|>", "<|AUDIO_UNUSE:231|>", "<|AUDIO_UNUSE:232|>", "<|AUDIO_UNUSE:233|>", "<|AUDIO_UNUSE:234|>", "<|AUDIO_UNUSE:235|>", "<|AUDIO_UNUSE:236|>", "<|AUDIO_UNUSE:237|>", "<|AUDIO_UNUSE:238|>", "<|AUDIO_UNUSE:239|>", "<|AUDIO_UNUSE:240|>", "<|AUDIO_UNUSE:241|>", "<|AUDIO_UNUSE:242|>", "<|AUDIO_UNUSE:243|>", "<|AUDIO_UNUSE:244|>", "<|AUDIO_UNUSE:245|>", "<|AUDIO_UNUSE:246|>", "<|AUDIO_UNUSE:247|>", "<|AUDIO_UNUSE:248|>", "<|AUDIO_UNUSE:249|>", "<|AUDIO_UNUSE:250|>", "<|AUDIO_UNUSE:251|>", "<|AUDIO_UNUSE:252|>", "<|AUDIO_UNUSE:253|>", "<|AUDIO_UNUSE:254|>", "<|AUDIO_UNUSE:255|>", "<|AUDIO_UNUSE:256|>", "<|AUDIO_UNUSE:257|>", "<|AUDIO_UNUSE:258|>", "<|AUDIO_UNUSE:259|>", "<|AUDIO_UNUSE:260|>", "<|AUDIO_UNUSE:261|>", "<|AUDIO_UNUSE:262|>", "<|AUDIO_UNUSE:263|>", "<|AUDIO_UNUSE:264|>", "<|AUDIO_UNUSE:265|>", "<|AUDIO_UNUSE:266|>", "<|AUDIO_UNUSE:267|>", "<|AUDIO_UNUSE:268|>", "<|AUDIO_UNUSE:269|>", "<|AUDIO_UNUSE:270|>", "<|AUDIO_UNUSE:271|>", "<|AUDIO_UNUSE:272|>", "<|AUDIO_UNUSE:273|>", "<|AUDIO_UNUSE:274|>", "<|AUDIO_UNUSE:275|>", "<|AUDIO_UNUSE:276|>", "<|AUDIO_UNUSE:277|>", "<|AUDIO_UNUSE:278|>", "<|AUDIO_UNUSE:279|>", "<|AUDIO_UNUSE:280|>", "<|AUDIO_UNUSE:281|>", "<|AUDIO_UNUSE:282|>", "<|AUDIO_UNUSE:283|>", "<|AUDIO_UNUSE:284|>", "<|AUDIO_UNUSE:285|>", "<|AUDIO_UNUSE:286|>", "<|AUDIO_UNUSE:287|>", "<|AUDIO_UNUSE:288|>", "<|AUDIO_UNUSE:289|>", "<|AUDIO_UNUSE:290|>", "<|AUDIO_UNUSE:291|>", "<|AUDIO_UNUSE:292|>", "<|AUDIO_UNUSE:293|>", "<|AUDIO_UNUSE:294|>", "<|AUDIO_UNUSE:295|>", "<|AUDIO_UNUSE:296|>", "<|AUDIO_UNUSE:297|>", "<|AUDIO_UNUSE:298|>", "<|AUDIO_UNUSE:299|>", "<|AUDIO_UNUSE:300|>", "<|AUDIO_UNUSE:301|>", "<|AUDIO_UNUSE:302|>", "<|AUDIO_UNUSE:303|>", "<|AUDIO_UNUSE:304|>", "<|AUDIO_UNUSE:305|>", "<|AUDIO_UNUSE:306|>", "<|AUDIO_UNUSE:307|>", "<|AUDIO_UNUSE:308|>", "<|AUDIO_UNUSE:309|>", "<|AUDIO_UNUSE:310|>", "<|AUDIO_UNUSE:311|>", "<|AUDIO_UNUSE:312|>", "<|AUDIO_UNUSE:313|>", "<|AUDIO_UNUSE:314|>", "<|AUDIO_UNUSE:315|>", "<|AUDIO_UNUSE:316|>", "<|AUDIO_UNUSE:317|>", "<|AUDIO_UNUSE:318|>", "<|AUDIO_UNUSE:319|>", "<|AUDIO_UNUSE:320|>", "<|AUDIO_UNUSE:321|>", "<|AUDIO_UNUSE:322|>", "<|AUDIO_UNUSE:323|>", "<|AUDIO_UNUSE:324|>", "<|AUDIO_UNUSE:325|>", "<|AUDIO_UNUSE:326|>", "<|AUDIO_UNUSE:327|>", "<|AUDIO_UNUSE:328|>", "<|AUDIO_UNUSE:329|>", "<|AUDIO_UNUSE:330|>", "<|AUDIO_UNUSE:331|>", "<|AUDIO_UNUSE:332|>", "<|AUDIO_UNUSE:333|>", "<|AUDIO_UNUSE:334|>", "<|AUDIO_UNUSE:335|>", "<|AUDIO_UNUSE:336|>", "<|AUDIO_UNUSE:337|>", "<|AUDIO_UNUSE:338|>", "<|AUDIO_UNUSE:339|>", "<|AUDIO_UNUSE:340|>", "<|AUDIO_UNUSE:341|>", "<|AUDIO_UNUSE:342|>", "<|AUDIO_UNUSE:343|>", "<|AUDIO_UNUSE:344|>", "<|AUDIO_UNUSE:345|>", "<|AUDIO_UNUSE:346|>", "<|AUDIO_UNUSE:347|>", "<|AUDIO_UNUSE:348|>", "<|AUDIO_UNUSE:349|>", "<|AUDIO_UNUSE:350|>", "<|AUDIO_UNUSE:351|>", "<|AUDIO_UNUSE:352|>", "<|AUDIO_UNUSE:353|>", "<|AUDIO_UNUSE:354|>", "<|AUDIO_UNUSE:355|>", "<|AUDIO_UNUSE:356|>", "<|AUDIO_UNUSE:357|>", "<|AUDIO_UNUSE:358|>", "<|AUDIO_UNUSE:359|>", "<|AUDIO_UNUSE:360|>", "<|AUDIO_UNUSE:361|>", "<|AUDIO_UNUSE:362|>", "<|AUDIO_UNUSE:363|>", "<|AUDIO_UNUSE:364|>", "<|AUDIO_UNUSE:365|>", "<|AUDIO_UNUSE:366|>", "<|AUDIO_UNUSE:367|>", "<|AUDIO_UNUSE:368|>", "<|AUDIO_UNUSE:369|>", "<|AUDIO_UNUSE:370|>", "<|AUDIO_UNUSE:371|>", "<|AUDIO_UNUSE:372|>", "<|AUDIO_UNUSE:373|>", "<|AUDIO_UNUSE:374|>", "<|AUDIO_UNUSE:375|>", "<|AUDIO_UNUSE:376|>", "<|AUDIO_UNUSE:377|>", "<|AUDIO_UNUSE:378|>", "<|AUDIO_UNUSE:379|>", "<|AUDIO_UNUSE:380|>", "<|AUDIO_UNUSE:381|>", "<|AUDIO_UNUSE:382|>", "<|AUDIO_UNUSE:383|>", "<|AUDIO_UNUSE:384|>", "<|AUDIO_UNUSE:385|>", "<|AUDIO_UNUSE:386|>", "<|AUDIO_UNUSE:387|>", "<|AUDIO_UNUSE:388|>", "<|AUDIO_UNUSE:389|>", "<|AUDIO_UNUSE:390|>", "<|AUDIO_UNUSE:391|>", "<|AUDIO_UNUSE:392|>", "<|AUDIO_UNUSE:393|>", "<|AUDIO_UNUSE:394|>", "<|AUDIO_UNUSE:395|>", "<|AUDIO_UNUSE:396|>", "<|AUDIO_UNUSE:397|>", "<|AUDIO_UNUSE:398|>", "<|AUDIO_UNUSE:399|>", "<|AUDIO_UNUSE:400|>", "<|AUDIO_UNUSE:401|>", "<|AUDIO_UNUSE:402|>", "<|AUDIO_UNUSE:403|>", "<|AUDIO_UNUSE:404|>", "<|AUDIO_UNUSE:405|>", "<|AUDIO_UNUSE:406|>", "<|AUDIO_UNUSE:407|>", "<|AUDIO_UNUSE:408|>", "<|AUDIO_UNUSE:409|>", "<|AUDIO_UNUSE:410|>", "<|AUDIO_UNUSE:411|>", "<|AUDIO_UNUSE:412|>", "<|AUDIO_UNUSE:413|>", "<|AUDIO_UNUSE:414|>", "<|AUDIO_UNUSE:415|>", "<|AUDIO_UNUSE:416|>", "<|AUDIO_UNUSE:417|>", "<|AUDIO_UNUSE:418|>", "<|AUDIO_UNUSE:419|>", "<|AUDIO_UNUSE:420|>", "<|AUDIO_UNUSE:421|>", "<|AUDIO_UNUSE:422|>", "<|AUDIO_UNUSE:423|>", "<|AUDIO_UNUSE:424|>", "<|AUDIO_UNUSE:425|>", "<|AUDIO_UNUSE:426|>", "<|AUDIO_UNUSE:427|>", "<|AUDIO_UNUSE:428|>", "<|AUDIO_UNUSE:429|>", "<|AUDIO_UNUSE:430|>", "<|AUDIO_UNUSE:431|>", "<|AUDIO_UNUSE:432|>", "<|AUDIO_UNUSE:433|>", "<|AUDIO_UNUSE:434|>", "<|AUDIO_UNUSE:435|>", "<|AUDIO_UNUSE:436|>", "<|AUDIO_UNUSE:437|>", "<|AUDIO_UNUSE:438|>", "<|AUDIO_UNUSE:439|>", "<|AUDIO_UNUSE:440|>", "<|AUDIO_UNUSE:441|>", "<|AUDIO_UNUSE:442|>", "<|AUDIO_UNUSE:443|>", "<|AUDIO_UNUSE:444|>", "<|AUDIO_UNUSE:445|>", "<|AUDIO_UNUSE:446|>", "<|AUDIO_UNUSE:447|>", "<|AUDIO_UNUSE:448|>", "<|AUDIO_UNUSE:449|>", "<|AUDIO_UNUSE:450|>", "<|AUDIO_UNUSE:451|>", "<|AUDIO_UNUSE:452|>", "<|AUDIO_UNUSE:453|>", "<|AUDIO_UNUSE:454|>", "<|AUDIO_UNUSE:455|>", "<|AUDIO_UNUSE:456|>", "<|AUDIO_UNUSE:457|>", "<|AUDIO_UNUSE:458|>", "<|AUDIO_UNUSE:459|>", "<|AUDIO_UNUSE:460|>", "<|AUDIO_UNUSE:461|>", "<|AUDIO_UNUSE:462|>", "<|AUDIO_UNUSE:463|>", "<|AUDIO_UNUSE:464|>", "<|AUDIO_UNUSE:465|>", "<|AUDIO_UNUSE:466|>", "<|AUDIO_UNUSE:467|>", "<|AUDIO_UNUSE:468|>", "<|AUDIO_UNUSE:469|>", "<|AUDIO_UNUSE:470|>", "<|AUDIO_UNUSE:471|>", "<|AUDIO_UNUSE:472|>", "<|AUDIO_UNUSE:473|>", "<|AUDIO_UNUSE:474|>", "<|AUDIO_UNUSE:475|>", "<|AUDIO_UNUSE:476|>", "<|AUDIO_UNUSE:477|>", "<|AUDIO_UNUSE:478|>", "<|AUDIO_UNUSE:479|>", "<|AUDIO_UNUSE:480|>", "<|AUDIO_UNUSE:481|>", "<|AUDIO_UNUSE:482|>", "<|AUDIO_UNUSE:483|>", "<|AUDIO_UNUSE:484|>", "<|AUDIO_UNUSE:485|>", "<|AUDIO_UNUSE:486|>", "<|AUDIO_UNUSE:487|>", "<|AUDIO_UNUSE:488|>", "<|AUDIO_UNUSE:489|>", "<|AUDIO_UNUSE:490|>", "<|AUDIO_UNUSE:491|>", "<|AUDIO_UNUSE:492|>", "<|AUDIO_UNUSE:493|>", "<|AUDIO_UNUSE:494|>", "<|AUDIO_UNUSE:495|>", "<|AUDIO_UNUSE:496|>", "<|AUDIO_UNUSE:497|>", "<|AUDIO_UNUSE:498|>", "<|AUDIO_UNUSE:499|>", "<|AUDIO_UNUSE:500|>", "<|AUDIO_UNUSE:501|>", "<|AUDIO_UNUSE:502|>", "<|AUDIO_UNUSE:503|>", "<|AUDIO_UNUSE:504|>", "<|AUDIO_UNUSE:505|>", "<|AUDIO_UNUSE:506|>", "<|AUDIO_UNUSE:507|>", "<|AUDIO_UNUSE:508|>", "<|AUDIO_UNUSE:509|>", "<|AUDIO_UNUSE:510|>", "<|AUDIO_UNUSE:511|>", "<|AUDIO_UNUSE:512|>", "<|AUDIO_UNUSE:513|>", "<|AUDIO_UNUSE:514|>", "<|AUDIO_UNUSE:515|>", "<|AUDIO_UNUSE:516|>", "<|AUDIO_UNUSE:517|>", "<|AUDIO_UNUSE:518|>", "<|AUDIO_UNUSE:519|>", "<|AUDIO_UNUSE:520|>", "<|AUDIO_UNUSE:521|>", "<|AUDIO_UNUSE:522|>", "<|AUDIO_UNUSE:523|>", "<|AUDIO_UNUSE:524|>", "<|AUDIO_UNUSE:525|>", "<|AUDIO_UNUSE:526|>", "<|AUDIO_UNUSE:527|>", "<|AUDIO_UNUSE:528|>", "<|AUDIO_UNUSE:529|>", "<|AUDIO_UNUSE:530|>", "<|AUDIO_UNUSE:531|>", "<|AUDIO_UNUSE:532|>", "<|AUDIO_UNUSE:533|>", "<|AUDIO_UNUSE:534|>", "<|AUDIO_UNUSE:535|>", "<|AUDIO_UNUSE:536|>", "<|AUDIO_UNUSE:537|>", "<|AUDIO_UNUSE:538|>", "<|AUDIO_UNUSE:539|>", "<|AUDIO_UNUSE:540|>", "<|AUDIO_UNUSE:541|>", "<|AUDIO_UNUSE:542|>", "<|AUDIO_UNUSE:543|>", "<|AUDIO_UNUSE:544|>", "<|AUDIO_UNUSE:545|>", "<|AUDIO_UNUSE:546|>", "<|AUDIO_UNUSE:547|>", "<|AUDIO_UNUSE:548|>", "<|AUDIO_UNUSE:549|>", "<|AUDIO_UNUSE:550|>", "<|AUDIO_UNUSE:551|>", "<|AUDIO_UNUSE:552|>", "<|AUDIO_UNUSE:553|>", "<|AUDIO_UNUSE:554|>", "<|AUDIO_UNUSE:555|>", "<|AUDIO_UNUSE:556|>", "<|AUDIO_UNUSE:557|>", "<|AUDIO_UNUSE:558|>", "<|AUDIO_UNUSE:559|>", "<|AUDIO_UNUSE:560|>", "<|AUDIO_UNUSE:561|>", "<|AUDIO_UNUSE:562|>", "<|AUDIO_UNUSE:563|>", "<|AUDIO_UNUSE:564|>", "<|AUDIO_UNUSE:565|>", "<|AUDIO_UNUSE:566|>", "<|AUDIO_UNUSE:567|>", "<|AUDIO_UNUSE:568|>", "<|AUDIO_UNUSE:569|>", "<|AUDIO_UNUSE:570|>", "<|AUDIO_UNUSE:571|>", "<|AUDIO_UNUSE:572|>", "<|AUDIO_UNUSE:573|>", "<|AUDIO_UNUSE:574|>", "<|AUDIO_UNUSE:575|>", "<|AUDIO_UNUSE:576|>", "<|AUDIO_UNUSE:577|>", "<|AUDIO_UNUSE:578|>", "<|AUDIO_UNUSE:579|>", "<|AUDIO_UNUSE:580|>", "<|AUDIO_UNUSE:581|>", "<|AUDIO_UNUSE:582|>", "<|AUDIO_UNUSE:583|>", "<|AUDIO_UNUSE:584|>", "<|AUDIO_UNUSE:585|>", "<|AUDIO_UNUSE:586|>", "<|AUDIO_UNUSE:587|>", "<|AUDIO_UNUSE:588|>", "<|AUDIO_UNUSE:589|>", "<|AUDIO_UNUSE:590|>", "<|AUDIO_UNUSE:591|>", "<|AUDIO_UNUSE:592|>", "<|AUDIO_UNUSE:593|>", "<|AUDIO_UNUSE:594|>", "<|AUDIO_UNUSE:595|>", "<|AUDIO_UNUSE:596|>", "<|AUDIO_UNUSE:597|>", "<|AUDIO_UNUSE:598|>", "<|AUDIO_UNUSE:599|>", "<|AUDIO_UNUSE:600|>", "<|AUDIO_UNUSE:601|>", "<|AUDIO_UNUSE:602|>", "<|AUDIO_UNUSE:603|>", "<|AUDIO_UNUSE:604|>", "<|AUDIO_UNUSE:605|>", "<|AUDIO_UNUSE:606|>", "<|AUDIO_UNUSE:607|>", "<|AUDIO_UNUSE:608|>", "<|AUDIO_UNUSE:609|>", "<|AUDIO_UNUSE:610|>", "<|AUDIO_UNUSE:611|>", "<|AUDIO_UNUSE:612|>", "<|AUDIO_UNUSE:613|>", "<|AUDIO_UNUSE:614|>", "<|AUDIO_UNUSE:615|>", "<|AUDIO_UNUSE:616|>", "<|AUDIO_UNUSE:617|>", "<|AUDIO_UNUSE:618|>", "<|AUDIO_UNUSE:619|>", "<|AUDIO_UNUSE:620|>", "<|AUDIO_UNUSE:621|>", "<|AUDIO_UNUSE:622|>", "<|AUDIO_UNUSE:623|>", "<|AUDIO_UNUSE:624|>", "<|AUDIO_UNUSE:625|>", "<|AUDIO_UNUSE:626|>", "<|AUDIO_UNUSE:627|>", "<|AUDIO_UNUSE:628|>", "<|AUDIO_UNUSE:629|>", "<|AUDIO_UNUSE:630|>", "<|AUDIO_UNUSE:631|>", "<|AUDIO_UNUSE:632|>", "<|AUDIO_UNUSE:633|>", "<|AUDIO_UNUSE:634|>", "<|AUDIO_UNUSE:635|>", "<|AUDIO_UNUSE:636|>", "<|AUDIO_UNUSE:637|>", "<|AUDIO_UNUSE:638|>", "<|AUDIO_UNUSE:639|>", "<|AUDIO_UNUSE:640|>", "<|AUDIO_UNUSE:641|>", "<|AUDIO_UNUSE:642|>", "<|AUDIO_UNUSE:643|>", "<|AUDIO_UNUSE:644|>", "<|AUDIO_UNUSE:645|>", "<|AUDIO_UNUSE:646|>", "<|AUDIO_UNUSE:647|>", "<|AUDIO_UNUSE:648|>", "<|AUDIO_UNUSE:649|>", "<|AUDIO_UNUSE:650|>", "<|AUDIO_UNUSE:651|>", "<|AUDIO_UNUSE:652|>", "<|AUDIO_UNUSE:653|>", "<|AUDIO_UNUSE:654|>", "<|AUDIO_UNUSE:655|>", "<|AUDIO_UNUSE:656|>", "<|AUDIO_UNUSE:657|>", "<|AUDIO_UNUSE:658|>", "<|AUDIO_UNUSE:659|>", "<|AUDIO_UNUSE:660|>", "<|AUDIO_UNUSE:661|>", "<|AUDIO_UNUSE:662|>", "<|AUDIO_UNUSE:663|>", "<|AUDIO_UNUSE:664|>", "<|AUDIO_UNUSE:665|>", "<|AUDIO_UNUSE:666|>", "<|AUDIO_UNUSE:667|>", "<|AUDIO_UNUSE:668|>", "<|AUDIO_UNUSE:669|>", "<|AUDIO_UNUSE:670|>", "<|AUDIO_UNUSE:671|>", "<|AUDIO_UNUSE:672|>", "<|AUDIO_UNUSE:673|>", "<|AUDIO_UNUSE:674|>", "<|AUDIO_UNUSE:675|>", "<|AUDIO_UNUSE:676|>", "<|AUDIO_UNUSE:677|>", "<|AUDIO_UNUSE:678|>", "<|AUDIO_UNUSE:679|>", "<|AUDIO_UNUSE:680|>", "<|AUDIO_UNUSE:681|>", "<|AUDIO_UNUSE:682|>", "<|AUDIO_UNUSE:683|>", "<|AUDIO_UNUSE:684|>", "<|AUDIO_UNUSE:685|>", "<|AUDIO_UNUSE:686|>", "<|AUDIO_UNUSE:687|>", "<|AUDIO_UNUSE:688|>", "<|AUDIO_UNUSE:689|>", "<|AUDIO_UNUSE:690|>", "<|AUDIO_UNUSE:691|>", "<|AUDIO_UNUSE:692|>", "<|AUDIO_UNUSE:693|>", "<|AUDIO_UNUSE:694|>", "<|AUDIO_UNUSE:695|>", "<|AUDIO_UNUSE:696|>", "<|AUDIO_UNUSE:697|>", "<|AUDIO_UNUSE:698|>", "<|AUDIO_UNUSE:699|>", "<|AUDIO_UNUSE:700|>", "<|AUDIO_UNUSE:701|>", "<|AUDIO_UNUSE:702|>", "<|AUDIO_UNUSE:703|>", "<|AUDIO_UNUSE:704|>", "<|AUDIO_UNUSE:705|>", "<|AUDIO_UNUSE:706|>", "<|AUDIO_UNUSE:707|>", "<|AUDIO_UNUSE:708|>", "<|AUDIO_UNUSE:709|>", "<|AUDIO_UNUSE:710|>", "<|AUDIO_UNUSE:711|>", "<|AUDIO_UNUSE:712|>", "<|AUDIO_UNUSE:713|>", "<|AUDIO_UNUSE:714|>", "<|AUDIO_UNUSE:715|>", "<|AUDIO_UNUSE:716|>", "<|AUDIO_UNUSE:717|>", "<|AUDIO_UNUSE:718|>", "<|AUDIO_UNUSE:719|>", "<|AUDIO_UNUSE:720|>", "<|AUDIO_UNUSE:721|>", "<|AUDIO_UNUSE:722|>", "<|AUDIO_UNUSE:723|>", "<|AUDIO_UNUSE:724|>", "<|AUDIO_UNUSE:725|>", "<|AUDIO_UNUSE:726|>", "<|AUDIO_UNUSE:727|>", "<|AUDIO_UNUSE:728|>", "<|AUDIO_UNUSE:729|>", "<|AUDIO_UNUSE:730|>", "<|AUDIO_UNUSE:731|>", "<|AUDIO_UNUSE:732|>", "<|AUDIO_UNUSE:733|>", "<|AUDIO_UNUSE:734|>", "<|AUDIO_UNUSE:735|>", "<|AUDIO_UNUSE:736|>", "<|AUDIO_UNUSE:737|>", "<|AUDIO_UNUSE:738|>", "<|AUDIO_UNUSE:739|>", "<|AUDIO_UNUSE:740|>", "<|AUDIO_UNUSE:741|>", "<|AUDIO_UNUSE:742|>", "<|AUDIO_UNUSE:743|>", "<|AUDIO_UNUSE:744|>", "<|AUDIO_UNUSE:745|>", "<|AUDIO_UNUSE:746|>", "<|AUDIO_UNUSE:747|>", "<|AUDIO_UNUSE:748|>", "<|AUDIO_UNUSE:749|>", "<|AUDIO_UNUSE:750|>", "<|AUDIO_UNUSE:751|>", "<|AUDIO_UNUSE:752|>", "<|AUDIO_UNUSE:753|>", "<|AUDIO_UNUSE:754|>", "<|AUDIO_UNUSE:755|>", "<|AUDIO_UNUSE:756|>", "<|AUDIO_UNUSE:757|>", "<|AUDIO_UNUSE:758|>", "<|AUDIO_UNUSE:759|>", "<|AUDIO_UNUSE:760|>", "<|AUDIO_UNUSE:761|>", "<|AUDIO_UNUSE:762|>", "<|AUDIO_UNUSE:763|>", "<|AUDIO_UNUSE:764|>", "<|AUDIO_UNUSE:765|>", "<|AUDIO_UNUSE:766|>", "<|AUDIO_UNUSE:767|>", "<|AUDIO_UNUSE:768|>", "<|AUDIO_UNUSE:769|>", "<|AUDIO_UNUSE:770|>", "<|AUDIO_UNUSE:771|>", "<|AUDIO_UNUSE:772|>", "<|AUDIO_UNUSE:773|>", "<|AUDIO_UNUSE:774|>", "<|AUDIO_UNUSE:775|>", "<|AUDIO_UNUSE:776|>", "<|AUDIO_UNUSE:777|>", "<|AUDIO_UNUSE:778|>", "<|AUDIO_UNUSE:779|>", "<|AUDIO_UNUSE:780|>", "<|AUDIO_UNUSE:781|>", "<|AUDIO_UNUSE:782|>", "<|AUDIO_UNUSE:783|>", "<|AUDIO_UNUSE:784|>", "<|AUDIO_UNUSE:785|>", "<|AUDIO_UNUSE:786|>", "<|AUDIO_UNUSE:787|>", "<|AUDIO_UNUSE:788|>", "<|AUDIO_UNUSE:789|>", "<|AUDIO_UNUSE:790|>", "<|AUDIO_UNUSE:791|>", "<|AUDIO_UNUSE:792|>", "<|AUDIO_UNUSE:793|>", "<|AUDIO_UNUSE:794|>", "<|AUDIO_UNUSE:795|>", "<|AUDIO_UNUSE:796|>", "<|AUDIO_UNUSE:797|>", "<|AUDIO_UNUSE:798|>", "<|AUDIO_UNUSE:799|>", "<|AUDIO_UNUSE:800|>", "<|AUDIO_UNUSE:801|>", "<|AUDIO_UNUSE:802|>", "<|AUDIO_UNUSE:803|>", "<|AUDIO_UNUSE:804|>", "<|AUDIO_UNUSE:805|>", "<|AUDIO_UNUSE:806|>", "<|AUDIO_UNUSE:807|>", "<|AUDIO_UNUSE:808|>", "<|AUDIO_UNUSE:809|>", "<|AUDIO_UNUSE:810|>", "<|AUDIO_UNUSE:811|>", "<|AUDIO_UNUSE:812|>", "<|AUDIO_UNUSE:813|>", "<|AUDIO_UNUSE:814|>", "<|AUDIO_UNUSE:815|>", "<|AUDIO_UNUSE:816|>", "<|AUDIO_UNUSE:817|>", "<|AUDIO_UNUSE:818|>", "<|AUDIO_UNUSE:819|>", "<|AUDIO_UNUSE:820|>", "<|AUDIO_UNUSE:821|>", "<|AUDIO_UNUSE:822|>", "<|AUDIO_UNUSE:823|>", "<|AUDIO_UNUSE:824|>", "<|AUDIO_UNUSE:825|>", "<|AUDIO_UNUSE:826|>", "<|AUDIO_UNUSE:827|>", "<|AUDIO_UNUSE:828|>", "<|AUDIO_UNUSE:829|>", "<|AUDIO_UNUSE:830|>", "<|AUDIO_UNUSE:831|>", "<|AUDIO_UNUSE:832|>", "<|AUDIO_UNUSE:833|>", "<|AUDIO_UNUSE:834|>", "<|AUDIO_UNUSE:835|>", "<|AUDIO_UNUSE:836|>", "<|AUDIO_UNUSE:837|>", "<|AUDIO_UNUSE:838|>", "<|AUDIO_UNUSE:839|>", "<|AUDIO_UNUSE:840|>", "<|AUDIO_UNUSE:841|>", "<|AUDIO_UNUSE:842|>", "<|AUDIO_UNUSE:843|>", "<|AUDIO_UNUSE:844|>", "<|AUDIO_UNUSE:845|>", "<|AUDIO_UNUSE:846|>", "<|AUDIO_UNUSE:847|>", "<|AUDIO_UNUSE:848|>", "<|AUDIO_UNUSE:849|>", "<|AUDIO_UNUSE:850|>", "<|AUDIO_UNUSE:851|>", "<|AUDIO_UNUSE:852|>", "<|AUDIO_UNUSE:853|>", "<|AUDIO_UNUSE:854|>", "<|AUDIO_UNUSE:855|>", "<|AUDIO_UNUSE:856|>", "<|AUDIO_UNUSE:857|>", "<|AUDIO_UNUSE:858|>", "<|AUDIO_UNUSE:859|>", "<|AUDIO_UNUSE:860|>", "<|AUDIO_UNUSE:861|>", "<|AUDIO_UNUSE:862|>", "<|AUDIO_UNUSE:863|>", "<|AUDIO_UNUSE:864|>", "<|AUDIO_UNUSE:865|>", "<|AUDIO_UNUSE:866|>", "<|AUDIO_UNUSE:867|>", "<|AUDIO_UNUSE:868|>", "<|AUDIO_UNUSE:869|>", "<|AUDIO_UNUSE:870|>", "<|AUDIO_UNUSE:871|>", "<|AUDIO_UNUSE:872|>", "<|AUDIO_UNUSE:873|>", "<|AUDIO_UNUSE:874|>", "<|AUDIO_UNUSE:875|>", "<|AUDIO_UNUSE:876|>", "<|AUDIO_UNUSE:877|>", "<|AUDIO_UNUSE:878|>", "<|AUDIO_UNUSE:879|>", "<|AUDIO_UNUSE:880|>", "<|AUDIO_UNUSE:881|>", "<|AUDIO_UNUSE:882|>", "<|AUDIO_UNUSE:883|>", "<|AUDIO_UNUSE:884|>", "<|AUDIO_UNUSE:885|>", "<|AUDIO_UNUSE:886|>", "<|AUDIO_UNUSE:887|>", "<|AUDIO_UNUSE:888|>", "<|AUDIO_UNUSE:889|>", "<|AUDIO_UNUSE:890|>", "<|AUDIO_UNUSE:891|>", "<|AUDIO_UNUSE:892|>", "<|AUDIO_UNUSE:893|>", "<|AUDIO_UNUSE:894|>", "<|AUDIO_UNUSE:895|>", "<|AUDIO_UNUSE:896|>", "<|AUDIO_UNUSE:897|>", "<|AUDIO_UNUSE:898|>", "<|AUDIO_UNUSE:899|>", "<|AUDIO_UNUSE:900|>", "<|AUDIO_UNUSE:901|>", "<|AUDIO_UNUSE:902|>", "<|AUDIO_UNUSE:903|>", "<|AUDIO_UNUSE:904|>", "<|AUDIO_UNUSE:905|>", "<|AUDIO_UNUSE:906|>", "<|AUDIO_UNUSE:907|>", "<|AUDIO_UNUSE:908|>", "<|AUDIO_UNUSE:909|>", "<|AUDIO_UNUSE:910|>", "<|AUDIO_UNUSE:911|>", "<|AUDIO_UNUSE:912|>", "<|AUDIO_UNUSE:913|>", "<|AUDIO_UNUSE:914|>", "<|AUDIO_UNUSE:915|>", "<|AUDIO_UNUSE:916|>", "<|AUDIO_UNUSE:917|>", "<|AUDIO_UNUSE:918|>", "<|AUDIO_UNUSE:919|>", "<|AUDIO_UNUSE:920|>", "<|AUDIO_UNUSE:921|>", "<|AUDIO_UNUSE:922|>", "<|AUDIO_UNUSE:923|>", "<|AUDIO_UNUSE:924|>", "<|AUDIO_UNUSE:925|>", "<|AUDIO_UNUSE:926|>", "<|AUDIO_UNUSE:927|>", "<|AUDIO_UNUSE:928|>", "<|AUDIO_UNUSE:929|>", "<|AUDIO_UNUSE:930|>", "<|AUDIO_UNUSE:931|>", "<|AUDIO_UNUSE:932|>", "<|AUDIO_UNUSE:933|>", "<|AUDIO_UNUSE:934|>", "<|AUDIO_UNUSE:935|>", "<|AUDIO_UNUSE:936|>", "<|AUDIO_UNUSE:937|>", "<|AUDIO_UNUSE:938|>", "<|AUDIO_UNUSE:939|>", "<|AUDIO_UNUSE:940|>", "<|AUDIO_UNUSE:941|>", "<|AUDIO_UNUSE:942|>", "<|AUDIO_UNUSE:943|>", "<|AUDIO_UNUSE:944|>", "<|AUDIO_UNUSE:945|>", "<|AUDIO_UNUSE:946|>", "<|AUDIO_UNUSE:947|>", "<|AUDIO_UNUSE:948|>", "<|AUDIO_UNUSE:949|>", "<|AUDIO_UNUSE:950|>", "<|AUDIO_UNUSE:951|>", "<|AUDIO_UNUSE:952|>", "<|AUDIO_UNUSE:953|>", "<|AUDIO_UNUSE:954|>", "<|AUDIO_UNUSE:955|>", "<|AUDIO_UNUSE:956|>", "<|AUDIO_UNUSE:957|>", "<|AUDIO_UNUSE:958|>", "<|AUDIO_UNUSE:959|>", "<|AUDIO_UNUSE:960|>", "<|AUDIO_UNUSE:961|>", "<|AUDIO_UNUSE:962|>", "<|AUDIO_UNUSE:963|>", "<|AUDIO_UNUSE:964|>", "<|AUDIO_UNUSE:965|>", "<|AUDIO_UNUSE:966|>", "<|AUDIO_UNUSE:967|>", "<|AUDIO_UNUSE:968|>", "<|AUDIO_UNUSE:969|>", "<|AUDIO_UNUSE:970|>", "<|AUDIO_UNUSE:971|>", "<|AUDIO_UNUSE:972|>", "<|AUDIO_UNUSE:973|>", "<|AUDIO_UNUSE:974|>", "<|AUDIO_UNUSE:975|>", "<|AUDIO_UNUSE:976|>", "<|AUDIO_UNUSE:977|>", "<|AUDIO_UNUSE:978|>", "<|AUDIO_UNUSE:979|>", "<|AUDIO_UNUSE:980|>", "<|AUDIO_UNUSE:981|>", "<|AUDIO_UNUSE:982|>", "<|AUDIO_UNUSE:983|>", "<|AUDIO_UNUSE:984|>", "<|AUDIO_UNUSE:985|>", "<|AUDIO_UNUSE:986|>", "<|AUDIO_UNUSE:987|>", "<|AUDIO_UNUSE:988|>", "<|AUDIO_UNUSE:989|>", "<|AUDIO_UNUSE:990|>", "<|AUDIO_UNUSE:991|>", "<|AUDIO_UNUSE:992|>", "<|AUDIO_UNUSE:993|>", "<|AUDIO_UNUSE:994|>", "<|AUDIO_UNUSE:995|>", "<|AUDIO_UNUSE:996|>", "<|AUDIO_UNUSE:997|>", "<|AUDIO_UNUSE:998|>", "<|AUDIO_UNUSE:999|>", "<|AUDIO_UNUSE:1000|>", "<|AUDIO_UNUSE:1001|>", "<|AUDIO_UNUSE:1002|>", "<|AUDIO_UNUSE:1003|>", "<|AUDIO_UNUSE:1004|>", "<|AUDIO_UNUSE:1005|>", "<|AUDIO_UNUSE:1006|>", "<|AUDIO_UNUSE:1007|>", "<|AUDIO_UNUSE:1008|>", "<|AUDIO_UNUSE:1009|>", "<|AUDIO_UNUSE:1010|>", "<|AUDIO_UNUSE:1011|>", "<|AUDIO_UNUSE:1012|>", "<|AUDIO_UNUSE:1013|>", "<|AUDIO_UNUSE:1014|>", "<|AUDIO_UNUSE:1015|>", "<|AUDIO_UNUSE:1016|>", "<|AUDIO_UNUSE:1017|>", "<|AUDIO_UNUSE:1018|>", "<|AUDIO_UNUSE:1019|>", "<|AUDIO_UNUSE:1020|>", "<think>", "</think>"]}
 
 
tokenizer.model → tokenizer.json RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2ed2203974453df691287a0432c06737f1b17f20f5ab325fb33e31844d90ddb0
3
- size 1614362
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94718bd0ed3c1f3591a1cf2c517d6364283457f2fd7e95ae4d201138e922079d
3
+ size 11590466
tokenizer_config.json CHANGED
@@ -1,22 +1,49 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
4
- "pad_token": "<unk>",
5
- "unk_token": "<unk>",
6
- "cls_token": "<|begin_of_sentence|>",
7
- "sep_token": "<|end_of_sentence|>",
8
- "mask_token": "<mask:1>",
9
- "sys_start_token": "<mask:4>",
10
- "sys_end_token": "<mask:5>",
11
- "header_start_token": "<mask:6>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  "header_end_token": "<mask:7>",
13
- "additional_special_tokens": null,
14
- "tokenizer_class": "Ernie4_5_VLTokenizer",
15
- "auto_map": {
16
- "AutoTokenizer": [
17
- "processing_ernie4_5_vl.Ernie4_5_VLTokenizer",
18
- null
19
- ]
20
- },
21
- "chat_template": "\n{%- set image_count = namespace(value=0) -%}\n{%- set video_count = namespace(value=0) -%}\n{{- '<|begin_of_sentence|>' }}\n{%- for message in messages -%}\n {%- if message.role in ['system', 'user'] -%}\n {%- if message.role == 'user' -%}\n {{- 'User: ' -}}\n {%- endif -%}\n {%- if message.content is string -%}\n {{- message.content -}}\n {%- else -%}\n {%- for content_item in message.content -%}\n {%- if content_item.type == 'text' -%}\n {{- content_item.text -}}\n {%- elif content_item.type in ['image_url', 'image'] -%}\n {%- set image_count.value = image_count.value + 1 -%}\n Picture {{ image_count.value }}:<|IMAGE_START|><|image@placeholder|><|IMAGE_END|>\n {%- elif content_item.type in ['video_url', 'video'] -%}\n {%- set video_count.value = video_count.value + 1 -%}\n Video {{ video_count.value }}:<|VIDEO_START|><|video@placeholder|><|VIDEO_END|>\n {%- endif -%}\n {%- endfor -%}\n {%- endif -%}\n {%- if message.role == 'system' -%}\n {{- '\n' -}}\n {%- endif -%}\n {%- elif message.role == 'assistant' -%}\n {%- macro extract_text_content(content_field) -%}\n {%- if content_field is string -%}\n {{- content_field -}}\n {%- elif content_field is iterable and content_field is not string -%}\n {%- set ns = namespace(text_parts=[]) -%}\n {%- set text_parts = [] -%}\n {%- for item in content_field -%}\n {%- if item.type == 'text' -%}\n {%- set ns.text_parts = ns.text_parts + [item.text] -%}\n {%- endif -%}\n {%- endfor -%}\n {{- ns.text_parts | join('') -}}\n {%- else -%}\n {{- '' -}}\n {%- endif -%}\n {%- endmacro -%}\n {%- set reasoning_content = extract_text_content(message.reasoning_content) -%}\n {%- set content = extract_text_content(message.content) -%}\n {%- if '</think>' in content %}\n {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}\n {%- set content = content.split('</think>')[-1].lstrip('\n') %}\n {%- endif %}\n {%- if reasoning_content %}\n {{- '\n' + 'Assistant: ' + '<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}\n {%- else %}\n {{- '\n' + 'Assistant: ' + content }}\n {%- endif %}\n {{- '<|end_of_sentence|>' }}\n {%- endif -%}\n{%- endfor -%}\n{%- if add_generation_prompt is not defined or add_generation_prompt is true %}\n {{- '\nAssistant: ' -}}\n {%- if enable_thinking is defined and enable_thinking is false %}\n {{- '<think>\n\n</think>\n\n' }}\n {%- endif %}\n {%- if enable_thinking is not defined or enable_thinking is true %}\n {{- '<think>' }}\n {%- endif %}\n{%- endif %}\n"
22
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "add_prefix_space": false,
3
+ "backend": "tokenizers",
4
+ "bos_token": "<s>",
5
+ "clean_up_tokenization_spaces": false,
6
+ "cls_token": "<|begin_of_sentence|>",
7
+ "eos_token": "</s>",
8
+ "extra_special_tokens": [
9
+ "<|IMAGE_PLACEHOLDER|>",
10
+ "<|IMAGE_START|>",
11
+ "<|IMAGE_END|>",
12
+ "<|VIDEO_PLACEHOLDER|>",
13
+ "<|VIDEO_START|>",
14
+ "<|VIDEO_END|>",
15
+ "<think>",
16
+ "</think>"
17
+ ],
18
+ "header_end_token": "<mask:7>",
19
+ "header_start_token": "<mask:6>",
20
+ "image_end_token": "<|IMAGE_END|>",
21
+ "image_start_token": "<|IMAGE_START|>",
22
+ "image_token": "<|IMAGE_PLACEHOLDER|>",
23
+ "is_local": true,
24
+ "mask_token": "<mask:1>",
25
+ "model_max_length": 131072,
26
+ "model_specific_special_tokens": {
27
  "header_end_token": "<mask:7>",
28
+ "header_start_token": "<mask:6>",
29
+ "image_end_token": "<|IMAGE_END|>",
30
+ "image_start_token": "<|IMAGE_START|>",
31
+ "image_token": "<|IMAGE_PLACEHOLDER|>",
32
+ "sys_end_token": "<mask:5>",
33
+ "sys_start_token": "<mask:4>",
34
+ "video_end_token": "<|VIDEO_END|>",
35
+ "video_start_token": "<|VIDEO_START|>",
36
+ "video_token": "<|VIDEO_PLACEHOLDER|>"
37
+ },
38
+ "pad_token": "<unk>",
39
+ "processor_class": "Ernie4_5_VL_MoeProcessor",
40
+ "sep_token": "<|end_of_sentence|>",
41
+ "sys_end_token": "<mask:5>",
42
+ "sys_start_token": "<mask:4>",
43
+ "tokenizer_class": "LlamaTokenizer",
44
+ "unk_token": "<unk>",
45
+ "use_default_system_prompt": false,
46
+ "video_end_token": "<|VIDEO_END|>",
47
+ "video_start_token": "<|VIDEO_START|>",
48
+ "video_token": "<|VIDEO_PLACEHOLDER|>"
49
+ }