Vedant Vyas commited on
Commit
aefb8b3
1 Parent(s): b748013
data/create_data.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+
3
+
4
+ def main():
5
+ print('Creating data...')
6
+ # Create JSON data
7
+ # Create JSON data
8
+
9
+ WordList = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q']
10
+ ValidWordList = ['l','m', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
11
+ file = open('data.json', 'w')
12
+ file2 = open('validData.json', 'w')
13
+ for k in range(1000):
14
+ file.write('{"translation":{"le":')
15
+ if k <= 1000:
16
+ file2.write('{"translation":{"le":')
17
+ #randomNum = random.randint(1, 10)
18
+ randomNum = 10
19
+ word = []
20
+ word2 = []
21
+ for i in random.sample(range(1, 100), randomNum):
22
+ word.append(WordList[i%9])
23
+ if k <= 1000:
24
+ word2.append(ValidWordList[i%11])
25
+ wordStr = ' '.join(word)
26
+ wordRev = wordStr[::-1]
27
+ if k <= 1000:
28
+ wordStr2 = ' '.join(word2)
29
+ wordRev2 = wordStr2[::-1]
30
+ file2.write(f'"{wordStr2}.", "rev" :"{wordRev2}."')
31
+ file2.write('}}\n')
32
+ file.write(f'"{wordStr}.", "rev" :"{wordRev}."')
33
+ file.write('}}\n')
34
+
35
+
36
+ file.close()
37
+ file2.close()
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+ if __name__ == '__main__':
46
+ main()
data/data.json ADDED
The diff for this file is too large to render. See raw diff
 
data/validData.json ADDED
@@ -0,0 +1,1001 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"translation":{"let":"vtxtxszvryyvvurusstsswxuwzuwx", "rev" :"xwuzwuxwsstssuruvvyyrvzsxtxtv"}}
2
+ {"translation":{"let":"xvxyrsxwttzzwytzsu", "rev" :"usztywzzttwxsryxvx"}}
3
+ {"translation":{"let":"zrzxtwzruu", "rev" :"uurzwtxzrz"}}
4
+ {"translation":{"let":"y", "rev" :"y"}}
5
+ {"translation":{"let":"ytrszzuxzyuyrxswuszsxzvrutzuwxwrswuyrstvyvwvv", "rev" :"vvwvyvtsryuwsrwxwuzturvzxszsuwsxryuyzxuzzsrty"}}
6
+ {"translation":{"let":"zzxxwyvzzury", "rev" :"yruzzvywxxzz"}}
7
+ {"translation":{"let":"rzxuxzurvvyvwrwxsyuzxxttzyrux", "rev" :"xuryzttxxzuysxwrwvyvvruzxuxzr"}}
8
+ {"translation":{"let":"twywyrrrtztvxuyzvrszuxvsutzvwxzvrswwwyuuuxvsyztzwz", "rev" :"zwztzysvxuuuywwwsrvzxwvztusvxuzsrvzyuxvtztrrrywywt"}}
9
+ {"translation":{"let":"rxzvwuzvtuzvusyysuwtvywttytvrswyruxvssytyxyszv", "rev" :"vzsyxytyssvxurywsrvtyttwyvtwusyysuvzutvzuwvzxr"}}
10
+ {"translation":{"let":"ry", "rev" :"yr"}}
11
+ {"translation":{"let":"vwryuwvxyxsrxvsrwtsuvxzuzvyyxzrsuwwzsrrwtszvt", "rev" :"tvzstwrrszwwusrzxyyvzuzxvustwrsvxrsxyxvwuyrwv"}}
12
+ {"translation":{"let":"xrwwtwysrsytzrxxsutwwztuwuzvsxzvvwsyzyvtwrr", "rev" :"rrwtvyzyswvvzxsvzuwutzwwtusxxrztysrsywtwwrx"}}
13
+ {"translation":{"let":"sztxvurtruxutysywyuwyvsvrtwyvxuvrtzswxztyswsxzvvy", "rev" :"yvvzxswsytzxwsztrvuxvywtrvsvywuywysytuxurtruvxtzs"}}
14
+ {"translation":{"let":"rwzvvwvtzywvssxytrstsusuw", "rev" :"wusustsrtyxssvwyztvwvvzwr"}}
15
+ {"translation":{"let":"vtxyvyuuuxturzwtvzuyxtxrzusvrsuurswxrtwrwy", "rev" :"ywrwtrxwsruusrvsuzrxtxyuzvtwzrutxuuuyvyxtv"}}
16
+ {"translation":{"let":"ztuxuvyvrztzryrrsvuxyxr", "rev" :"rxyxuvsrryrztzrvyvuxutz"}}
17
+ {"translation":{"let":"v", "rev" :"v"}}
18
+ {"translation":{"let":"twxruzvsyusvwzyrrtuzxrtrwtrxs", "rev" :"sxrtwrtrxzutrryzwvsuysvzurxwt"}}
19
+ {"translation":{"let":"wwyrvxtzu", "rev" :"uztxvryww"}}
20
+ {"translation":{"let":"svytwytsuxrvuvyxvyt", "rev" :"tyvxyvuvrxustywtyvs"}}
21
+ {"translation":{"let":"zut", "rev" :"tuz"}}
22
+ {"translation":{"let":"wuutwz", "rev" :"zwtuuw"}}
23
+ {"translation":{"let":"sywxsstzwsutryrstrsyxyuzsrzturwwvuxwxut", "rev" :"tuxwxuvwwrutzrszuyxysrtsryrtuswztssxwys"}}
24
+ {"translation":{"let":"xtzzztzuwwxuyrtsxwsrtuwx", "rev" :"xwutrswxstryuxwwuztzzztx"}}
25
+ {"translation":{"let":"rrytxssxuuswwxvwzytvsvvuttszuu", "rev" :"uuzsttuvvsvtyzwvxwwsuuxssxtyrr"}}
26
+ {"translation":{"let":"ztvvuvrzutw", "rev" :"wtuzrvuvvtz"}}
27
+ {"translation":{"let":"ry", "rev" :"yr"}}
28
+ {"translation":{"let":"xxwxsxw", "rev" :"wxsxwxx"}}
29
+ {"translation":{"let":"zzsusyxwtwwryvwtyrsrrrvzvwwvxx", "rev" :"xxvwwvzvrrrsrytwvyrwwtwxysuszz"}}
30
+ {"translation":{"let":"rsvyrzvsuuwwvurxryttrtzrswzv", "rev" :"vzwsrztrttyrxruvwwuusvzryvsr"}}
31
+ {"translation":{"let":"wwyuuztvwvwsxrrtyttrrxwyysvzsswsxtrvzrzsxyruuurxw", "rev" :"wxruuuryxszrzvrtxswsszvsyywxrrttytrrxswvwvtzuuyww"}}
32
+ {"translation":{"let":"yzxyyyxvuxuztsrxwru", "rev" :"urwxrstzuxuvxyyyxzy"}}
33
+ {"translation":{"let":"xxsu", "rev" :"usxx"}}
34
+ {"translation":{"let":"zyywruvvwtxxtzuyru", "rev" :"uryuztxxtwvvurwyyz"}}
35
+ {"translation":{"let":"zwtzxrxyryvstyrszwuyyvutzrurwurzxrxsyyxxxy", "rev" :"yxxxyysxrxzruwrurztuvyyuwzsrytsvyryxrxztwz"}}
36
+ {"translation":{"let":"rxyytttxwxsvyrztuxvswursytuysuyvztysv", "rev" :"vsytzvyusyutysruwsvxutzryvsxwxtttyyxr"}}
37
+ {"translation":{"let":"zvsrvytvvvvxvrtwtxywtruyu", "rev" :"uyurtwyxtwtrvxvvvvtyvrsvz"}}
38
+ {"translation":{"let":"vzzxsxvtwuysyztvywuwwzsxvvyrrstvwttx", "rev" :"xttwvtsrryvvxszwwuwyvtzysyuwtvxsxzzv"}}
39
+ {"translation":{"let":"trtvuxryxyuyu", "rev" :"uyuyxyrxuvtrt"}}
40
+ {"translation":{"let":"ryyzvutxzrtuvsxzxzwyxxtuwwytrtwuvvwywrr", "rev" :"rrwywvvuwtrtywwutxxywzxzxsvutrzxtuvzyyr"}}
41
+ {"translation":{"let":"xsruwryvzwuwzvts", "rev" :"stvzwuwzvyrwursx"}}
42
+ {"translation":{"let":"tzxxtvyxsstutssuzrvxvzws", "rev" :"swzvxvrzusstutssxyvtxxzt"}}
43
+ {"translation":{"let":"xvtzwyyrtrswuwwvvrsxwsysrsxsrszywxvxsytutuuwxw", "rev" :"wxwuututysxvxwyzsrsxsrsyswxsrvvwwuwsrtryywztvx"}}
44
+ {"translation":{"let":"tsts", "rev" :"stst"}}
45
+ {"translation":{"let":"zsvyyzztvrruyxzywsxststzxw", "rev" :"wxztstsxswyzxyurrvtzzyyvsz"}}
46
+ {"translation":{"let":"uwuxsxsxvvwsxyrxy", "rev" :"yxryxswvvxsxsxuwu"}}
47
+ {"translation":{"let":"vrssrrwzxrrszvuyssuvuuwyvyxrwuzztyz", "rev" :"zytzzuwrxyvywuuvussyuvzsrrxzwrrssrv"}}
48
+ {"translation":{"let":"tzyuxzvtwzsvrtuuytrytwuztvsuxzrzxsxyruwrwztyv", "rev" :"vytzwrwuryxsxzrzxusvtzuwtyrtyuutrvszwtvzxuyzt"}}
49
+ {"translation":{"let":"zswv", "rev" :"vwsz"}}
50
+ {"translation":{"let":"rzzxysvrsxzyztsswutt", "rev" :"ttuwsstzyzxsrvsyxzzr"}}
51
+ {"translation":{"let":"ywutrtvxyvrzxurrwrzstrtsztz", "rev" :"ztzstrtszrwrruxzrvyxvtrtuwy"}}
52
+ {"translation":{"let":"urtzwyxxwttsxzsrsyyuvrrxszwzwxzxuvvxtxsruzzwvv", "rev" :"vvwzzursxtxvvuxzxwzwzsxrrvuyysrszxsttwxxywztru"}}
53
+ {"translation":{"let":"trxxuszztrvzuzrtytxtwswyyxtswrurwys", "rev" :"sywrurwstxyywswtxtytrzuzvrtzzsuxxrt"}}
54
+ {"translation":{"let":"vruzrwxytr", "rev" :"rtyxwrzurv"}}
55
+ {"translation":{"let":"txtvxzxsyruwtwt", "rev" :"twtwurysxzxvtxt"}}
56
+ {"translation":{"let":"x", "rev" :"x"}}
57
+ {"translation":{"let":"wwryxuyvwvuwvsstvrrwxzszzuzuyxzttxvzyrx", "rev" :"xryzvxttzxyuzuzzszxwrrvtssvwuvwvyuxyrww"}}
58
+ {"translation":{"let":"rrrwyvywvstxuzswuuszstzvwu", "rev" :"uwvztszsuuwszuxtsvwyvywrrr"}}
59
+ {"translation":{"let":"vwrzxvyvsusyrtxzsz", "rev" :"zszxtrysusvyvxzrwv"}}
60
+ {"translation":{"let":"zz", "rev" :"zz"}}
61
+ {"translation":{"let":"uvrvwvuszttszruyuytyzxyrtzrvvw", "rev" :"wvvrztryxzytyuyurzsttzsuvwvrvu"}}
62
+ {"translation":{"let":"xyswysuvsxwrutyuzuyytrvuuwtsrtssuusrryw", "rev" :"wyrrsuusstrstwuuvrtyyuzuyturwxsvusywsyx"}}
63
+ {"translation":{"let":"zyuvv", "rev" :"vvuyz"}}
64
+ {"translation":{"let":"rzsvxxrztvsvrwz", "rev" :"zwrvsvtzrxxvszr"}}
65
+ {"translation":{"let":"tzrusyyvvvuruzsxszwyrutszzruwvtystwvrzvuyywswuwwxv", "rev" :"vxwwuwswyyuvzrvwtsytvwurzzsturywzsxszuruvvvyysurzt"}}
66
+ {"translation":{"let":"sttuyzvytzzrsvx", "rev" :"xvsrzztyvzyutts"}}
67
+ {"translation":{"let":"swwwyztv", "rev" :"vtzywwws"}}
68
+ {"translation":{"let":"zszwvuyrzwvyzr", "rev" :"rzyvwzryuvwzsz"}}
69
+ {"translation":{"let":"xvwwtyty", "rev" :"ytytwwvx"}}
70
+ {"translation":{"let":"sttuzsvyvwwzvvyturuttwtvur", "rev" :"ruvtwtturutyvvzwwvyvszutts"}}
71
+ {"translation":{"let":"vzytszrsrwrxxzyzzsvyyzusurutzwxuszuxwwvrsxsrv", "rev" :"vrsxsrvwwxuzsuxwzturusuzyyvszzyzxxrwrsrzstyzv"}}
72
+ {"translation":{"let":"vszzvwwxxzztxxrswzvvsurxuyttwyyyrwwzvzwrryszu", "rev" :"uzsyrrwzvzwwryyywttyuxrusvvzwsrxxtzzxxwwvzzsv"}}
73
+ {"translation":{"let":"ytsx", "rev" :"xsty"}}
74
+ {"translation":{"let":"xvvwvwxvr", "rev" :"rvxwvwvvx"}}
75
+ {"translation":{"let":"utrsvwuyvxrwtusuusxtszszyxxxzyzswrtryyvytxw", "rev" :"wxtyvyyrtrwszyzxxxyzszstxsuusutwrxvyuwvsrtu"}}
76
+ {"translation":{"let":"uuzvwvxsrrwxuwr", "rev" :"rwuxwrrsxvwvzuu"}}
77
+ {"translation":{"let":"zsrszxyxvyytrt", "rev" :"trtyyvxyxzsrsz"}}
78
+ {"translation":{"let":"xyuvwxrswxtxvvxtrtyrzttssrxuy", "rev" :"yuxrssttzrytrtxvvxtxwsrxwvuyx"}}
79
+ {"translation":{"let":"uxsxtsvrwywuwzxrxvrrrzttzsszrrtzsztyv", "rev" :"vytzsztrrzsszttzrrrvxrxzwuwywrvstxsxu"}}
80
+ {"translation":{"let":"xrwrrssvtwyuyvtyvzszxwxzsuz", "rev" :"zuszxwxzszvytvyuywtvssrrwrx"}}
81
+ {"translation":{"let":"sywuwuxwxrsruxysrrruvuxstwxyzr", "rev" :"rzyxwtsxuvurrrsyxursrxwxuwuwys"}}
82
+ {"translation":{"let":"sxuwzwrsrvxtstuysuzxstzsvyxzzztuxruswyx", "rev" :"xywsurxutzzzxyvsztsxzusyutstxvrsrwzwuxs"}}
83
+ {"translation":{"let":"wttrrvwzxtusuyustwxxyrswywrvstzywzu", "rev" :"uzwyztsvrwywsryxxwtsuyusutxzwvrrttw"}}
84
+ {"translation":{"let":"vuzwttsrvwv", "rev" :"vwvrsttwzuv"}}
85
+ {"translation":{"let":"xxrtytszyzvrwzsyysryrvyuxsyvwuw", "rev" :"wuwvysxuyvryrsyyszwrvzyzstytrxx"}}
86
+ {"translation":{"let":"ttzyxsxxwsuzvtwxyxwswrxrtyvtrrxuuvurwtyv", "rev" :"vytwruvuuxrrtvytrxrwswxyxwtvzuswxxsxyztt"}}
87
+ {"translation":{"let":"uyrvyusvuwzvzxwwyuytxsrztxwsv", "rev" :"vswxtzrsxtyuywwxzvzwuvsuyvryu"}}
88
+ {"translation":{"let":"xuwuuvztustvxuyvxyszzwwvysywxvrzsvrwtryzxtxr", "rev" :"rxtxzyrtwrvszrvxwysyvwwzzsyxvyuxvtsutzvuuwux"}}
89
+ {"translation":{"let":"ytvywztuztrtryrttyuxurvrwuzurzyvsytzsxwssx", "rev" :"xsswxsztysvyzruzuwrvruxuyttryrtrtzutzwyvty"}}
90
+ {"translation":{"let":"tryuvxzxztruutwuruy", "rev" :"yuruwtuurtzxzxvuyrt"}}
91
+ {"translation":{"let":"vtzuuvszwyssyywysvutur", "rev" :"rutuvsywyyssywzsvuuztv"}}
92
+ {"translation":{"let":"trzwzww", "rev" :"wwzwzrt"}}
93
+ {"translation":{"let":"yzwuwtwztwxvyyzsuuwuzx", "rev" :"xzuwuuszyyvxwtzwtwuwzy"}}
94
+ {"translation":{"let":"y", "rev" :"y"}}
95
+ {"translation":{"let":"xvywrvyvywrxxzzrwuvvtuvztxyvswrtzztxtwssyrustts", "rev" :"sttsurysswtxtzztrwsvyxtzvutvvuwrzzxxrwyvyvrwyvx"}}
96
+ {"translation":{"let":"zyyvrxvwwtutrztrvzw", "rev" :"wzvrtzrtutwwvxrvyyz"}}
97
+ {"translation":{"let":"zxtvzyrzxyvwwrtyzxttvy", "rev" :"yvttxzytrwwvyxzryzvtxz"}}
98
+ {"translation":{"let":"trxurrurvsxuuyuzvvrvxstvxrxtwxttrszywyvzzyvttvsy", "rev" :"ysvttvyzzvywyzsrttxwtxrxvtsxvrvvzuyuuxsvrurruxrt"}}
99
+ {"translation":{"let":"zvsxzvvyysusxzrrwwttwsxzuvrwvuryrusuzytz", "rev" :"ztyzusuryruvwrvuzxswttwwrrzxsusyyvvzxsvz"}}
100
+ {"translation":{"let":"wrxryyswvywvwrwrxuzzutuuywxssxvsy", "rev" :"ysvxssxwyuutuzzuxrwrwvwyvwsyyrxrw"}}
101
+ {"translation":{"let":"vuyw", "rev" :"wyuv"}}
102
+ {"translation":{"let":"", "rev" :""}}
103
+ {"translation":{"let":"", "rev" :""}}
104
+ {"translation":{"let":"", "rev" :""}}
105
+ {"translation":{"let":"", "rev" :""}}
106
+ {"translation":{"let":"", "rev" :""}}
107
+ {"translation":{"let":"", "rev" :""}}
108
+ {"translation":{"let":"", "rev" :""}}
109
+ {"translation":{"let":"", "rev" :""}}
110
+ {"translation":{"let":"", "rev" :""}}
111
+ {"translation":{"let":"", "rev" :""}}
112
+ {"translation":{"let":"", "rev" :""}}
113
+ {"translation":{"let":"", "rev" :""}}
114
+ {"translation":{"let":"", "rev" :""}}
115
+ {"translation":{"let":"", "rev" :""}}
116
+ {"translation":{"let":"", "rev" :""}}
117
+ {"translation":{"let":"", "rev" :""}}
118
+ {"translation":{"let":"", "rev" :""}}
119
+ {"translation":{"let":"", "rev" :""}}
120
+ {"translation":{"let":"", "rev" :""}}
121
+ {"translation":{"let":"", "rev" :""}}
122
+ {"translation":{"let":"", "rev" :""}}
123
+ {"translation":{"let":"", "rev" :""}}
124
+ {"translation":{"let":"", "rev" :""}}
125
+ {"translation":{"let":"", "rev" :""}}
126
+ {"translation":{"let":"", "rev" :""}}
127
+ {"translation":{"let":"", "rev" :""}}
128
+ {"translation":{"let":"", "rev" :""}}
129
+ {"translation":{"let":"", "rev" :""}}
130
+ {"translation":{"let":"", "rev" :""}}
131
+ {"translation":{"let":"", "rev" :""}}
132
+ {"translation":{"let":"", "rev" :""}}
133
+ {"translation":{"let":"", "rev" :""}}
134
+ {"translation":{"let":"", "rev" :""}}
135
+ {"translation":{"let":"", "rev" :""}}
136
+ {"translation":{"let":"", "rev" :""}}
137
+ {"translation":{"let":"", "rev" :""}}
138
+ {"translation":{"let":"", "rev" :""}}
139
+ {"translation":{"let":"", "rev" :""}}
140
+ {"translation":{"let":"", "rev" :""}}
141
+ {"translation":{"let":"", "rev" :""}}
142
+ {"translation":{"let":"", "rev" :""}}
143
+ {"translation":{"let":"", "rev" :""}}
144
+ {"translation":{"let":"", "rev" :""}}
145
+ {"translation":{"let":"", "rev" :""}}
146
+ {"translation":{"let":"", "rev" :""}}
147
+ {"translation":{"let":"", "rev" :""}}
148
+ {"translation":{"let":"", "rev" :""}}
149
+ {"translation":{"let":"", "rev" :""}}
150
+ {"translation":{"let":"", "rev" :""}}
151
+ {"translation":{"let":"", "rev" :""}}
152
+ {"translation":{"let":"", "rev" :""}}
153
+ {"translation":{"let":"", "rev" :""}}
154
+ {"translation":{"let":"", "rev" :""}}
155
+ {"translation":{"let":"", "rev" :""}}
156
+ {"translation":{"let":"", "rev" :""}}
157
+ {"translation":{"let":"", "rev" :""}}
158
+ {"translation":{"let":"", "rev" :""}}
159
+ {"translation":{"let":"", "rev" :""}}
160
+ {"translation":{"let":"", "rev" :""}}
161
+ {"translation":{"let":"", "rev" :""}}
162
+ {"translation":{"let":"", "rev" :""}}
163
+ {"translation":{"let":"", "rev" :""}}
164
+ {"translation":{"let":"", "rev" :""}}
165
+ {"translation":{"let":"", "rev" :""}}
166
+ {"translation":{"let":"", "rev" :""}}
167
+ {"translation":{"let":"", "rev" :""}}
168
+ {"translation":{"let":"", "rev" :""}}
169
+ {"translation":{"let":"", "rev" :""}}
170
+ {"translation":{"let":"", "rev" :""}}
171
+ {"translation":{"let":"", "rev" :""}}
172
+ {"translation":{"let":"", "rev" :""}}
173
+ {"translation":{"let":"", "rev" :""}}
174
+ {"translation":{"let":"", "rev" :""}}
175
+ {"translation":{"let":"", "rev" :""}}
176
+ {"translation":{"let":"", "rev" :""}}
177
+ {"translation":{"let":"", "rev" :""}}
178
+ {"translation":{"let":"", "rev" :""}}
179
+ {"translation":{"let":"", "rev" :""}}
180
+ {"translation":{"let":"", "rev" :""}}
181
+ {"translation":{"let":"", "rev" :""}}
182
+ {"translation":{"let":"", "rev" :""}}
183
+ {"translation":{"let":"", "rev" :""}}
184
+ {"translation":{"let":"", "rev" :""}}
185
+ {"translation":{"let":"", "rev" :""}}
186
+ {"translation":{"let":"", "rev" :""}}
187
+ {"translation":{"let":"", "rev" :""}}
188
+ {"translation":{"let":"", "rev" :""}}
189
+ {"translation":{"let":"", "rev" :""}}
190
+ {"translation":{"let":"", "rev" :""}}
191
+ {"translation":{"let":"", "rev" :""}}
192
+ {"translation":{"let":"", "rev" :""}}
193
+ {"translation":{"let":"", "rev" :""}}
194
+ {"translation":{"let":"", "rev" :""}}
195
+ {"translation":{"let":"", "rev" :""}}
196
+ {"translation":{"let":"", "rev" :""}}
197
+ {"translation":{"let":"", "rev" :""}}
198
+ {"translation":{"let":"", "rev" :""}}
199
+ {"translation":{"let":"", "rev" :""}}
200
+ {"translation":{"let":"", "rev" :""}}
201
+ {"translation":{"let":"", "rev" :""}}
202
+ {"translation":{"let":"", "rev" :""}}
203
+ {"translation":{"let":"", "rev" :""}}
204
+ {"translation":{"let":"", "rev" :""}}
205
+ {"translation":{"let":"", "rev" :""}}
206
+ {"translation":{"let":"", "rev" :""}}
207
+ {"translation":{"let":"", "rev" :""}}
208
+ {"translation":{"let":"", "rev" :""}}
209
+ {"translation":{"let":"", "rev" :""}}
210
+ {"translation":{"let":"", "rev" :""}}
211
+ {"translation":{"let":"", "rev" :""}}
212
+ {"translation":{"let":"", "rev" :""}}
213
+ {"translation":{"let":"", "rev" :""}}
214
+ {"translation":{"let":"", "rev" :""}}
215
+ {"translation":{"let":"", "rev" :""}}
216
+ {"translation":{"let":"", "rev" :""}}
217
+ {"translation":{"let":"", "rev" :""}}
218
+ {"translation":{"let":"", "rev" :""}}
219
+ {"translation":{"let":"", "rev" :""}}
220
+ {"translation":{"let":"", "rev" :""}}
221
+ {"translation":{"let":"", "rev" :""}}
222
+ {"translation":{"let":"", "rev" :""}}
223
+ {"translation":{"let":"", "rev" :""}}
224
+ {"translation":{"let":"", "rev" :""}}
225
+ {"translation":{"let":"", "rev" :""}}
226
+ {"translation":{"let":"", "rev" :""}}
227
+ {"translation":{"let":"", "rev" :""}}
228
+ {"translation":{"let":"", "rev" :""}}
229
+ {"translation":{"let":"", "rev" :""}}
230
+ {"translation":{"let":"", "rev" :""}}
231
+ {"translation":{"let":"", "rev" :""}}
232
+ {"translation":{"let":"", "rev" :""}}
233
+ {"translation":{"let":"", "rev" :""}}
234
+ {"translation":{"let":"", "rev" :""}}
235
+ {"translation":{"let":"", "rev" :""}}
236
+ {"translation":{"let":"", "rev" :""}}
237
+ {"translation":{"let":"", "rev" :""}}
238
+ {"translation":{"let":"", "rev" :""}}
239
+ {"translation":{"let":"", "rev" :""}}
240
+ {"translation":{"let":"", "rev" :""}}
241
+ {"translation":{"let":"", "rev" :""}}
242
+ {"translation":{"let":"", "rev" :""}}
243
+ {"translation":{"let":"", "rev" :""}}
244
+ {"translation":{"let":"", "rev" :""}}
245
+ {"translation":{"let":"", "rev" :""}}
246
+ {"translation":{"let":"", "rev" :""}}
247
+ {"translation":{"let":"", "rev" :""}}
248
+ {"translation":{"let":"", "rev" :""}}
249
+ {"translation":{"let":"", "rev" :""}}
250
+ {"translation":{"let":"", "rev" :""}}
251
+ {"translation":{"let":"", "rev" :""}}
252
+ {"translation":{"let":"", "rev" :""}}
253
+ {"translation":{"let":"", "rev" :""}}
254
+ {"translation":{"let":"", "rev" :""}}
255
+ {"translation":{"let":"", "rev" :""}}
256
+ {"translation":{"let":"", "rev" :""}}
257
+ {"translation":{"let":"", "rev" :""}}
258
+ {"translation":{"let":"", "rev" :""}}
259
+ {"translation":{"let":"", "rev" :""}}
260
+ {"translation":{"let":"", "rev" :""}}
261
+ {"translation":{"let":"", "rev" :""}}
262
+ {"translation":{"let":"", "rev" :""}}
263
+ {"translation":{"let":"", "rev" :""}}
264
+ {"translation":{"let":"", "rev" :""}}
265
+ {"translation":{"let":"", "rev" :""}}
266
+ {"translation":{"let":"", "rev" :""}}
267
+ {"translation":{"let":"", "rev" :""}}
268
+ {"translation":{"let":"", "rev" :""}}
269
+ {"translation":{"let":"", "rev" :""}}
270
+ {"translation":{"let":"", "rev" :""}}
271
+ {"translation":{"let":"", "rev" :""}}
272
+ {"translation":{"let":"", "rev" :""}}
273
+ {"translation":{"let":"", "rev" :""}}
274
+ {"translation":{"let":"", "rev" :""}}
275
+ {"translation":{"let":"", "rev" :""}}
276
+ {"translation":{"let":"", "rev" :""}}
277
+ {"translation":{"let":"", "rev" :""}}
278
+ {"translation":{"let":"", "rev" :""}}
279
+ {"translation":{"let":"", "rev" :""}}
280
+ {"translation":{"let":"", "rev" :""}}
281
+ {"translation":{"let":"", "rev" :""}}
282
+ {"translation":{"let":"", "rev" :""}}
283
+ {"translation":{"let":"", "rev" :""}}
284
+ {"translation":{"let":"", "rev" :""}}
285
+ {"translation":{"let":"", "rev" :""}}
286
+ {"translation":{"let":"", "rev" :""}}
287
+ {"translation":{"let":"", "rev" :""}}
288
+ {"translation":{"let":"", "rev" :""}}
289
+ {"translation":{"let":"", "rev" :""}}
290
+ {"translation":{"let":"", "rev" :""}}
291
+ {"translation":{"let":"", "rev" :""}}
292
+ {"translation":{"let":"", "rev" :""}}
293
+ {"translation":{"let":"", "rev" :""}}
294
+ {"translation":{"let":"", "rev" :""}}
295
+ {"translation":{"let":"", "rev" :""}}
296
+ {"translation":{"let":"", "rev" :""}}
297
+ {"translation":{"let":"", "rev" :""}}
298
+ {"translation":{"let":"", "rev" :""}}
299
+ {"translation":{"let":"", "rev" :""}}
300
+ {"translation":{"let":"", "rev" :""}}
301
+ {"translation":{"let":"", "rev" :""}}
302
+ {"translation":{"let":"", "rev" :""}}
303
+ {"translation":{"let":"", "rev" :""}}
304
+ {"translation":{"let":"", "rev" :""}}
305
+ {"translation":{"let":"", "rev" :""}}
306
+ {"translation":{"let":"", "rev" :""}}
307
+ {"translation":{"let":"", "rev" :""}}
308
+ {"translation":{"let":"", "rev" :""}}
309
+ {"translation":{"let":"", "rev" :""}}
310
+ {"translation":{"let":"", "rev" :""}}
311
+ {"translation":{"let":"", "rev" :""}}
312
+ {"translation":{"let":"", "rev" :""}}
313
+ {"translation":{"let":"", "rev" :""}}
314
+ {"translation":{"let":"", "rev" :""}}
315
+ {"translation":{"let":"", "rev" :""}}
316
+ {"translation":{"let":"", "rev" :""}}
317
+ {"translation":{"let":"", "rev" :""}}
318
+ {"translation":{"let":"", "rev" :""}}
319
+ {"translation":{"let":"", "rev" :""}}
320
+ {"translation":{"let":"", "rev" :""}}
321
+ {"translation":{"let":"", "rev" :""}}
322
+ {"translation":{"let":"", "rev" :""}}
323
+ {"translation":{"let":"", "rev" :""}}
324
+ {"translation":{"let":"", "rev" :""}}
325
+ {"translation":{"let":"", "rev" :""}}
326
+ {"translation":{"let":"", "rev" :""}}
327
+ {"translation":{"let":"", "rev" :""}}
328
+ {"translation":{"let":"", "rev" :""}}
329
+ {"translation":{"let":"", "rev" :""}}
330
+ {"translation":{"let":"", "rev" :""}}
331
+ {"translation":{"let":"", "rev" :""}}
332
+ {"translation":{"let":"", "rev" :""}}
333
+ {"translation":{"let":"", "rev" :""}}
334
+ {"translation":{"let":"", "rev" :""}}
335
+ {"translation":{"let":"", "rev" :""}}
336
+ {"translation":{"let":"", "rev" :""}}
337
+ {"translation":{"let":"", "rev" :""}}
338
+ {"translation":{"let":"", "rev" :""}}
339
+ {"translation":{"let":"", "rev" :""}}
340
+ {"translation":{"let":"", "rev" :""}}
341
+ {"translation":{"let":"", "rev" :""}}
342
+ {"translation":{"let":"", "rev" :""}}
343
+ {"translation":{"let":"", "rev" :""}}
344
+ {"translation":{"let":"", "rev" :""}}
345
+ {"translation":{"let":"", "rev" :""}}
346
+ {"translation":{"let":"", "rev" :""}}
347
+ {"translation":{"let":"", "rev" :""}}
348
+ {"translation":{"let":"", "rev" :""}}
349
+ {"translation":{"let":"", "rev" :""}}
350
+ {"translation":{"let":"", "rev" :""}}
351
+ {"translation":{"let":"", "rev" :""}}
352
+ {"translation":{"let":"", "rev" :""}}
353
+ {"translation":{"let":"", "rev" :""}}
354
+ {"translation":{"let":"", "rev" :""}}
355
+ {"translation":{"let":"", "rev" :""}}
356
+ {"translation":{"let":"", "rev" :""}}
357
+ {"translation":{"let":"", "rev" :""}}
358
+ {"translation":{"let":"", "rev" :""}}
359
+ {"translation":{"let":"", "rev" :""}}
360
+ {"translation":{"let":"", "rev" :""}}
361
+ {"translation":{"let":"", "rev" :""}}
362
+ {"translation":{"let":"", "rev" :""}}
363
+ {"translation":{"let":"", "rev" :""}}
364
+ {"translation":{"let":"", "rev" :""}}
365
+ {"translation":{"let":"", "rev" :""}}
366
+ {"translation":{"let":"", "rev" :""}}
367
+ {"translation":{"let":"", "rev" :""}}
368
+ {"translation":{"let":"", "rev" :""}}
369
+ {"translation":{"let":"", "rev" :""}}
370
+ {"translation":{"let":"", "rev" :""}}
371
+ {"translation":{"let":"", "rev" :""}}
372
+ {"translation":{"let":"", "rev" :""}}
373
+ {"translation":{"let":"", "rev" :""}}
374
+ {"translation":{"let":"", "rev" :""}}
375
+ {"translation":{"let":"", "rev" :""}}
376
+ {"translation":{"let":"", "rev" :""}}
377
+ {"translation":{"let":"", "rev" :""}}
378
+ {"translation":{"let":"", "rev" :""}}
379
+ {"translation":{"let":"", "rev" :""}}
380
+ {"translation":{"let":"", "rev" :""}}
381
+ {"translation":{"let":"", "rev" :""}}
382
+ {"translation":{"let":"", "rev" :""}}
383
+ {"translation":{"let":"", "rev" :""}}
384
+ {"translation":{"let":"", "rev" :""}}
385
+ {"translation":{"let":"", "rev" :""}}
386
+ {"translation":{"let":"", "rev" :""}}
387
+ {"translation":{"let":"", "rev" :""}}
388
+ {"translation":{"let":"", "rev" :""}}
389
+ {"translation":{"let":"", "rev" :""}}
390
+ {"translation":{"let":"", "rev" :""}}
391
+ {"translation":{"let":"", "rev" :""}}
392
+ {"translation":{"let":"", "rev" :""}}
393
+ {"translation":{"let":"", "rev" :""}}
394
+ {"translation":{"let":"", "rev" :""}}
395
+ {"translation":{"let":"", "rev" :""}}
396
+ {"translation":{"let":"", "rev" :""}}
397
+ {"translation":{"let":"", "rev" :""}}
398
+ {"translation":{"let":"", "rev" :""}}
399
+ {"translation":{"let":"", "rev" :""}}
400
+ {"translation":{"let":"", "rev" :""}}
401
+ {"translation":{"let":"", "rev" :""}}
402
+ {"translation":{"let":"", "rev" :""}}
403
+ {"translation":{"let":"", "rev" :""}}
404
+ {"translation":{"let":"", "rev" :""}}
405
+ {"translation":{"let":"", "rev" :""}}
406
+ {"translation":{"let":"", "rev" :""}}
407
+ {"translation":{"let":"", "rev" :""}}
408
+ {"translation":{"let":"", "rev" :""}}
409
+ {"translation":{"let":"", "rev" :""}}
410
+ {"translation":{"let":"", "rev" :""}}
411
+ {"translation":{"let":"", "rev" :""}}
412
+ {"translation":{"let":"", "rev" :""}}
413
+ {"translation":{"let":"", "rev" :""}}
414
+ {"translation":{"let":"", "rev" :""}}
415
+ {"translation":{"let":"", "rev" :""}}
416
+ {"translation":{"let":"", "rev" :""}}
417
+ {"translation":{"let":"", "rev" :""}}
418
+ {"translation":{"let":"", "rev" :""}}
419
+ {"translation":{"let":"", "rev" :""}}
420
+ {"translation":{"let":"", "rev" :""}}
421
+ {"translation":{"let":"", "rev" :""}}
422
+ {"translation":{"let":"", "rev" :""}}
423
+ {"translation":{"let":"", "rev" :""}}
424
+ {"translation":{"let":"", "rev" :""}}
425
+ {"translation":{"let":"", "rev" :""}}
426
+ {"translation":{"let":"", "rev" :""}}
427
+ {"translation":{"let":"", "rev" :""}}
428
+ {"translation":{"let":"", "rev" :""}}
429
+ {"translation":{"let":"", "rev" :""}}
430
+ {"translation":{"let":"", "rev" :""}}
431
+ {"translation":{"let":"", "rev" :""}}
432
+ {"translation":{"let":"", "rev" :""}}
433
+ {"translation":{"let":"", "rev" :""}}
434
+ {"translation":{"let":"", "rev" :""}}
435
+ {"translation":{"let":"", "rev" :""}}
436
+ {"translation":{"let":"", "rev" :""}}
437
+ {"translation":{"let":"", "rev" :""}}
438
+ {"translation":{"let":"", "rev" :""}}
439
+ {"translation":{"let":"", "rev" :""}}
440
+ {"translation":{"let":"", "rev" :""}}
441
+ {"translation":{"let":"", "rev" :""}}
442
+ {"translation":{"let":"", "rev" :""}}
443
+ {"translation":{"let":"", "rev" :""}}
444
+ {"translation":{"let":"", "rev" :""}}
445
+ {"translation":{"let":"", "rev" :""}}
446
+ {"translation":{"let":"", "rev" :""}}
447
+ {"translation":{"let":"", "rev" :""}}
448
+ {"translation":{"let":"", "rev" :""}}
449
+ {"translation":{"let":"", "rev" :""}}
450
+ {"translation":{"let":"", "rev" :""}}
451
+ {"translation":{"let":"", "rev" :""}}
452
+ {"translation":{"let":"", "rev" :""}}
453
+ {"translation":{"let":"", "rev" :""}}
454
+ {"translation":{"let":"", "rev" :""}}
455
+ {"translation":{"let":"", "rev" :""}}
456
+ {"translation":{"let":"", "rev" :""}}
457
+ {"translation":{"let":"", "rev" :""}}
458
+ {"translation":{"let":"", "rev" :""}}
459
+ {"translation":{"let":"", "rev" :""}}
460
+ {"translation":{"let":"", "rev" :""}}
461
+ {"translation":{"let":"", "rev" :""}}
462
+ {"translation":{"let":"", "rev" :""}}
463
+ {"translation":{"let":"", "rev" :""}}
464
+ {"translation":{"let":"", "rev" :""}}
465
+ {"translation":{"let":"", "rev" :""}}
466
+ {"translation":{"let":"", "rev" :""}}
467
+ {"translation":{"let":"", "rev" :""}}
468
+ {"translation":{"let":"", "rev" :""}}
469
+ {"translation":{"let":"", "rev" :""}}
470
+ {"translation":{"let":"", "rev" :""}}
471
+ {"translation":{"let":"", "rev" :""}}
472
+ {"translation":{"let":"", "rev" :""}}
473
+ {"translation":{"let":"", "rev" :""}}
474
+ {"translation":{"let":"", "rev" :""}}
475
+ {"translation":{"let":"", "rev" :""}}
476
+ {"translation":{"let":"", "rev" :""}}
477
+ {"translation":{"let":"", "rev" :""}}
478
+ {"translation":{"let":"", "rev" :""}}
479
+ {"translation":{"let":"", "rev" :""}}
480
+ {"translation":{"let":"", "rev" :""}}
481
+ {"translation":{"let":"", "rev" :""}}
482
+ {"translation":{"let":"", "rev" :""}}
483
+ {"translation":{"let":"", "rev" :""}}
484
+ {"translation":{"let":"", "rev" :""}}
485
+ {"translation":{"let":"", "rev" :""}}
486
+ {"translation":{"let":"", "rev" :""}}
487
+ {"translation":{"let":"", "rev" :""}}
488
+ {"translation":{"let":"", "rev" :""}}
489
+ {"translation":{"let":"", "rev" :""}}
490
+ {"translation":{"let":"", "rev" :""}}
491
+ {"translation":{"let":"", "rev" :""}}
492
+ {"translation":{"let":"", "rev" :""}}
493
+ {"translation":{"let":"", "rev" :""}}
494
+ {"translation":{"let":"", "rev" :""}}
495
+ {"translation":{"let":"", "rev" :""}}
496
+ {"translation":{"let":"", "rev" :""}}
497
+ {"translation":{"let":"", "rev" :""}}
498
+ {"translation":{"let":"", "rev" :""}}
499
+ {"translation":{"let":"", "rev" :""}}
500
+ {"translation":{"let":"", "rev" :""}}
501
+ {"translation":{"let":"", "rev" :""}}
502
+ {"translation":{"let":"", "rev" :""}}
503
+ {"translation":{"let":"", "rev" :""}}
504
+ {"translation":{"let":"", "rev" :""}}
505
+ {"translation":{"let":"", "rev" :""}}
506
+ {"translation":{"let":"", "rev" :""}}
507
+ {"translation":{"let":"", "rev" :""}}
508
+ {"translation":{"let":"", "rev" :""}}
509
+ {"translation":{"let":"", "rev" :""}}
510
+ {"translation":{"let":"", "rev" :""}}
511
+ {"translation":{"let":"", "rev" :""}}
512
+ {"translation":{"let":"", "rev" :""}}
513
+ {"translation":{"let":"", "rev" :""}}
514
+ {"translation":{"let":"", "rev" :""}}
515
+ {"translation":{"let":"", "rev" :""}}
516
+ {"translation":{"let":"", "rev" :""}}
517
+ {"translation":{"let":"", "rev" :""}}
518
+ {"translation":{"let":"", "rev" :""}}
519
+ {"translation":{"let":"", "rev" :""}}
520
+ {"translation":{"let":"", "rev" :""}}
521
+ {"translation":{"let":"", "rev" :""}}
522
+ {"translation":{"let":"", "rev" :""}}
523
+ {"translation":{"let":"", "rev" :""}}
524
+ {"translation":{"let":"", "rev" :""}}
525
+ {"translation":{"let":"", "rev" :""}}
526
+ {"translation":{"let":"", "rev" :""}}
527
+ {"translation":{"let":"", "rev" :""}}
528
+ {"translation":{"let":"", "rev" :""}}
529
+ {"translation":{"let":"", "rev" :""}}
530
+ {"translation":{"let":"", "rev" :""}}
531
+ {"translation":{"let":"", "rev" :""}}
532
+ {"translation":{"let":"", "rev" :""}}
533
+ {"translation":{"let":"", "rev" :""}}
534
+ {"translation":{"let":"", "rev" :""}}
535
+ {"translation":{"let":"", "rev" :""}}
536
+ {"translation":{"let":"", "rev" :""}}
537
+ {"translation":{"let":"", "rev" :""}}
538
+ {"translation":{"let":"", "rev" :""}}
539
+ {"translation":{"let":"", "rev" :""}}
540
+ {"translation":{"let":"", "rev" :""}}
541
+ {"translation":{"let":"", "rev" :""}}
542
+ {"translation":{"let":"", "rev" :""}}
543
+ {"translation":{"let":"", "rev" :""}}
544
+ {"translation":{"let":"", "rev" :""}}
545
+ {"translation":{"let":"", "rev" :""}}
546
+ {"translation":{"let":"", "rev" :""}}
547
+ {"translation":{"let":"", "rev" :""}}
548
+ {"translation":{"let":"", "rev" :""}}
549
+ {"translation":{"let":"", "rev" :""}}
550
+ {"translation":{"let":"", "rev" :""}}
551
+ {"translation":{"let":"", "rev" :""}}
552
+ {"translation":{"let":"", "rev" :""}}
553
+ {"translation":{"let":"", "rev" :""}}
554
+ {"translation":{"let":"", "rev" :""}}
555
+ {"translation":{"let":"", "rev" :""}}
556
+ {"translation":{"let":"", "rev" :""}}
557
+ {"translation":{"let":"", "rev" :""}}
558
+ {"translation":{"let":"", "rev" :""}}
559
+ {"translation":{"let":"", "rev" :""}}
560
+ {"translation":{"let":"", "rev" :""}}
561
+ {"translation":{"let":"", "rev" :""}}
562
+ {"translation":{"let":"", "rev" :""}}
563
+ {"translation":{"let":"", "rev" :""}}
564
+ {"translation":{"let":"", "rev" :""}}
565
+ {"translation":{"let":"", "rev" :""}}
566
+ {"translation":{"let":"", "rev" :""}}
567
+ {"translation":{"let":"", "rev" :""}}
568
+ {"translation":{"let":"", "rev" :""}}
569
+ {"translation":{"let":"", "rev" :""}}
570
+ {"translation":{"let":"", "rev" :""}}
571
+ {"translation":{"let":"", "rev" :""}}
572
+ {"translation":{"let":"", "rev" :""}}
573
+ {"translation":{"let":"", "rev" :""}}
574
+ {"translation":{"let":"", "rev" :""}}
575
+ {"translation":{"let":"", "rev" :""}}
576
+ {"translation":{"let":"", "rev" :""}}
577
+ {"translation":{"let":"", "rev" :""}}
578
+ {"translation":{"let":"", "rev" :""}}
579
+ {"translation":{"let":"", "rev" :""}}
580
+ {"translation":{"let":"", "rev" :""}}
581
+ {"translation":{"let":"", "rev" :""}}
582
+ {"translation":{"let":"", "rev" :""}}
583
+ {"translation":{"let":"", "rev" :""}}
584
+ {"translation":{"let":"", "rev" :""}}
585
+ {"translation":{"let":"", "rev" :""}}
586
+ {"translation":{"let":"", "rev" :""}}
587
+ {"translation":{"let":"", "rev" :""}}
588
+ {"translation":{"let":"", "rev" :""}}
589
+ {"translation":{"let":"", "rev" :""}}
590
+ {"translation":{"let":"", "rev" :""}}
591
+ {"translation":{"let":"", "rev" :""}}
592
+ {"translation":{"let":"", "rev" :""}}
593
+ {"translation":{"let":"", "rev" :""}}
594
+ {"translation":{"let":"", "rev" :""}}
595
+ {"translation":{"let":"", "rev" :""}}
596
+ {"translation":{"let":"", "rev" :""}}
597
+ {"translation":{"let":"", "rev" :""}}
598
+ {"translation":{"let":"", "rev" :""}}
599
+ {"translation":{"let":"", "rev" :""}}
600
+ {"translation":{"let":"", "rev" :""}}
601
+ {"translation":{"let":"", "rev" :""}}
602
+ {"translation":{"let":"", "rev" :""}}
603
+ {"translation":{"let":"", "rev" :""}}
604
+ {"translation":{"let":"", "rev" :""}}
605
+ {"translation":{"let":"", "rev" :""}}
606
+ {"translation":{"let":"", "rev" :""}}
607
+ {"translation":{"let":"", "rev" :""}}
608
+ {"translation":{"let":"", "rev" :""}}
609
+ {"translation":{"let":"", "rev" :""}}
610
+ {"translation":{"let":"", "rev" :""}}
611
+ {"translation":{"let":"", "rev" :""}}
612
+ {"translation":{"let":"", "rev" :""}}
613
+ {"translation":{"let":"", "rev" :""}}
614
+ {"translation":{"let":"", "rev" :""}}
615
+ {"translation":{"let":"", "rev" :""}}
616
+ {"translation":{"let":"", "rev" :""}}
617
+ {"translation":{"let":"", "rev" :""}}
618
+ {"translation":{"let":"", "rev" :""}}
619
+ {"translation":{"let":"", "rev" :""}}
620
+ {"translation":{"let":"", "rev" :""}}
621
+ {"translation":{"let":"", "rev" :""}}
622
+ {"translation":{"let":"", "rev" :""}}
623
+ {"translation":{"let":"", "rev" :""}}
624
+ {"translation":{"let":"", "rev" :""}}
625
+ {"translation":{"let":"", "rev" :""}}
626
+ {"translation":{"let":"", "rev" :""}}
627
+ {"translation":{"let":"", "rev" :""}}
628
+ {"translation":{"let":"", "rev" :""}}
629
+ {"translation":{"let":"", "rev" :""}}
630
+ {"translation":{"let":"", "rev" :""}}
631
+ {"translation":{"let":"", "rev" :""}}
632
+ {"translation":{"let":"", "rev" :""}}
633
+ {"translation":{"let":"", "rev" :""}}
634
+ {"translation":{"let":"", "rev" :""}}
635
+ {"translation":{"let":"", "rev" :""}}
636
+ {"translation":{"let":"", "rev" :""}}
637
+ {"translation":{"let":"", "rev" :""}}
638
+ {"translation":{"let":"", "rev" :""}}
639
+ {"translation":{"let":"", "rev" :""}}
640
+ {"translation":{"let":"", "rev" :""}}
641
+ {"translation":{"let":"", "rev" :""}}
642
+ {"translation":{"let":"", "rev" :""}}
643
+ {"translation":{"let":"", "rev" :""}}
644
+ {"translation":{"let":"", "rev" :""}}
645
+ {"translation":{"let":"", "rev" :""}}
646
+ {"translation":{"let":"", "rev" :""}}
647
+ {"translation":{"let":"", "rev" :""}}
648
+ {"translation":{"let":"", "rev" :""}}
649
+ {"translation":{"let":"", "rev" :""}}
650
+ {"translation":{"let":"", "rev" :""}}
651
+ {"translation":{"let":"", "rev" :""}}
652
+ {"translation":{"let":"", "rev" :""}}
653
+ {"translation":{"let":"", "rev" :""}}
654
+ {"translation":{"let":"", "rev" :""}}
655
+ {"translation":{"let":"", "rev" :""}}
656
+ {"translation":{"let":"", "rev" :""}}
657
+ {"translation":{"let":"", "rev" :""}}
658
+ {"translation":{"let":"", "rev" :""}}
659
+ {"translation":{"let":"", "rev" :""}}
660
+ {"translation":{"let":"", "rev" :""}}
661
+ {"translation":{"let":"", "rev" :""}}
662
+ {"translation":{"let":"", "rev" :""}}
663
+ {"translation":{"let":"", "rev" :""}}
664
+ {"translation":{"let":"", "rev" :""}}
665
+ {"translation":{"let":"", "rev" :""}}
666
+ {"translation":{"let":"", "rev" :""}}
667
+ {"translation":{"let":"", "rev" :""}}
668
+ {"translation":{"let":"", "rev" :""}}
669
+ {"translation":{"let":"", "rev" :""}}
670
+ {"translation":{"let":"", "rev" :""}}
671
+ {"translation":{"let":"", "rev" :""}}
672
+ {"translation":{"let":"", "rev" :""}}
673
+ {"translation":{"let":"", "rev" :""}}
674
+ {"translation":{"let":"", "rev" :""}}
675
+ {"translation":{"let":"", "rev" :""}}
676
+ {"translation":{"let":"", "rev" :""}}
677
+ {"translation":{"let":"", "rev" :""}}
678
+ {"translation":{"let":"", "rev" :""}}
679
+ {"translation":{"let":"", "rev" :""}}
680
+ {"translation":{"let":"", "rev" :""}}
681
+ {"translation":{"let":"", "rev" :""}}
682
+ {"translation":{"let":"", "rev" :""}}
683
+ {"translation":{"let":"", "rev" :""}}
684
+ {"translation":{"let":"", "rev" :""}}
685
+ {"translation":{"let":"", "rev" :""}}
686
+ {"translation":{"let":"", "rev" :""}}
687
+ {"translation":{"let":"", "rev" :""}}
688
+ {"translation":{"let":"", "rev" :""}}
689
+ {"translation":{"let":"", "rev" :""}}
690
+ {"translation":{"let":"", "rev" :""}}
691
+ {"translation":{"let":"", "rev" :""}}
692
+ {"translation":{"let":"", "rev" :""}}
693
+ {"translation":{"let":"", "rev" :""}}
694
+ {"translation":{"let":"", "rev" :""}}
695
+ {"translation":{"let":"", "rev" :""}}
696
+ {"translation":{"let":"", "rev" :""}}
697
+ {"translation":{"let":"", "rev" :""}}
698
+ {"translation":{"let":"", "rev" :""}}
699
+ {"translation":{"let":"", "rev" :""}}
700
+ {"translation":{"let":"", "rev" :""}}
701
+ {"translation":{"let":"", "rev" :""}}
702
+ {"translation":{"let":"", "rev" :""}}
703
+ {"translation":{"let":"", "rev" :""}}
704
+ {"translation":{"let":"", "rev" :""}}
705
+ {"translation":{"let":"", "rev" :""}}
706
+ {"translation":{"let":"", "rev" :""}}
707
+ {"translation":{"let":"", "rev" :""}}
708
+ {"translation":{"let":"", "rev" :""}}
709
+ {"translation":{"let":"", "rev" :""}}
710
+ {"translation":{"let":"", "rev" :""}}
711
+ {"translation":{"let":"", "rev" :""}}
712
+ {"translation":{"let":"", "rev" :""}}
713
+ {"translation":{"let":"", "rev" :""}}
714
+ {"translation":{"let":"", "rev" :""}}
715
+ {"translation":{"let":"", "rev" :""}}
716
+ {"translation":{"let":"", "rev" :""}}
717
+ {"translation":{"let":"", "rev" :""}}
718
+ {"translation":{"let":"", "rev" :""}}
719
+ {"translation":{"let":"", "rev" :""}}
720
+ {"translation":{"let":"", "rev" :""}}
721
+ {"translation":{"let":"", "rev" :""}}
722
+ {"translation":{"let":"", "rev" :""}}
723
+ {"translation":{"let":"", "rev" :""}}
724
+ {"translation":{"let":"", "rev" :""}}
725
+ {"translation":{"let":"", "rev" :""}}
726
+ {"translation":{"let":"", "rev" :""}}
727
+ {"translation":{"let":"", "rev" :""}}
728
+ {"translation":{"let":"", "rev" :""}}
729
+ {"translation":{"let":"", "rev" :""}}
730
+ {"translation":{"let":"", "rev" :""}}
731
+ {"translation":{"let":"", "rev" :""}}
732
+ {"translation":{"let":"", "rev" :""}}
733
+ {"translation":{"let":"", "rev" :""}}
734
+ {"translation":{"let":"", "rev" :""}}
735
+ {"translation":{"let":"", "rev" :""}}
736
+ {"translation":{"let":"", "rev" :""}}
737
+ {"translation":{"let":"", "rev" :""}}
738
+ {"translation":{"let":"", "rev" :""}}
739
+ {"translation":{"let":"", "rev" :""}}
740
+ {"translation":{"let":"", "rev" :""}}
741
+ {"translation":{"let":"", "rev" :""}}
742
+ {"translation":{"let":"", "rev" :""}}
743
+ {"translation":{"let":"", "rev" :""}}
744
+ {"translation":{"let":"", "rev" :""}}
745
+ {"translation":{"let":"", "rev" :""}}
746
+ {"translation":{"let":"", "rev" :""}}
747
+ {"translation":{"let":"", "rev" :""}}
748
+ {"translation":{"let":"", "rev" :""}}
749
+ {"translation":{"let":"", "rev" :""}}
750
+ {"translation":{"let":"", "rev" :""}}
751
+ {"translation":{"let":"", "rev" :""}}
752
+ {"translation":{"let":"", "rev" :""}}
753
+ {"translation":{"let":"", "rev" :""}}
754
+ {"translation":{"let":"", "rev" :""}}
755
+ {"translation":{"let":"", "rev" :""}}
756
+ {"translation":{"let":"", "rev" :""}}
757
+ {"translation":{"let":"", "rev" :""}}
758
+ {"translation":{"let":"", "rev" :""}}
759
+ {"translation":{"let":"", "rev" :""}}
760
+ {"translation":{"let":"", "rev" :""}}
761
+ {"translation":{"let":"", "rev" :""}}
762
+ {"translation":{"let":"", "rev" :""}}
763
+ {"translation":{"let":"", "rev" :""}}
764
+ {"translation":{"let":"", "rev" :""}}
765
+ {"translation":{"let":"", "rev" :""}}
766
+ {"translation":{"let":"", "rev" :""}}
767
+ {"translation":{"let":"", "rev" :""}}
768
+ {"translation":{"let":"", "rev" :""}}
769
+ {"translation":{"let":"", "rev" :""}}
770
+ {"translation":{"let":"", "rev" :""}}
771
+ {"translation":{"let":"", "rev" :""}}
772
+ {"translation":{"let":"", "rev" :""}}
773
+ {"translation":{"let":"", "rev" :""}}
774
+ {"translation":{"let":"", "rev" :""}}
775
+ {"translation":{"let":"", "rev" :""}}
776
+ {"translation":{"let":"", "rev" :""}}
777
+ {"translation":{"let":"", "rev" :""}}
778
+ {"translation":{"let":"", "rev" :""}}
779
+ {"translation":{"let":"", "rev" :""}}
780
+ {"translation":{"let":"", "rev" :""}}
781
+ {"translation":{"let":"", "rev" :""}}
782
+ {"translation":{"let":"", "rev" :""}}
783
+ {"translation":{"let":"", "rev" :""}}
784
+ {"translation":{"let":"", "rev" :""}}
785
+ {"translation":{"let":"", "rev" :""}}
786
+ {"translation":{"let":"", "rev" :""}}
787
+ {"translation":{"let":"", "rev" :""}}
788
+ {"translation":{"let":"", "rev" :""}}
789
+ {"translation":{"let":"", "rev" :""}}
790
+ {"translation":{"let":"", "rev" :""}}
791
+ {"translation":{"let":"", "rev" :""}}
792
+ {"translation":{"let":"", "rev" :""}}
793
+ {"translation":{"let":"", "rev" :""}}
794
+ {"translation":{"let":"", "rev" :""}}
795
+ {"translation":{"let":"", "rev" :""}}
796
+ {"translation":{"let":"", "rev" :""}}
797
+ {"translation":{"let":"", "rev" :""}}
798
+ {"translation":{"let":"", "rev" :""}}
799
+ {"translation":{"let":"", "rev" :""}}
800
+ {"translation":{"let":"", "rev" :""}}
801
+ {"translation":{"let":"", "rev" :""}}
802
+ {"translation":{"let":"", "rev" :""}}
803
+ {"translation":{"let":"", "rev" :""}}
804
+ {"translation":{"let":"", "rev" :""}}
805
+ {"translation":{"let":"", "rev" :""}}
806
+ {"translation":{"let":"", "rev" :""}}
807
+ {"translation":{"let":"", "rev" :""}}
808
+ {"translation":{"let":"", "rev" :""}}
809
+ {"translation":{"let":"", "rev" :""}}
810
+ {"translation":{"let":"", "rev" :""}}
811
+ {"translation":{"let":"", "rev" :""}}
812
+ {"translation":{"let":"", "rev" :""}}
813
+ {"translation":{"let":"", "rev" :""}}
814
+ {"translation":{"let":"", "rev" :""}}
815
+ {"translation":{"let":"", "rev" :""}}
816
+ {"translation":{"let":"", "rev" :""}}
817
+ {"translation":{"let":"", "rev" :""}}
818
+ {"translation":{"let":"", "rev" :""}}
819
+ {"translation":{"let":"", "rev" :""}}
820
+ {"translation":{"let":"", "rev" :""}}
821
+ {"translation":{"let":"", "rev" :""}}
822
+ {"translation":{"let":"", "rev" :""}}
823
+ {"translation":{"let":"", "rev" :""}}
824
+ {"translation":{"let":"", "rev" :""}}
825
+ {"translation":{"let":"", "rev" :""}}
826
+ {"translation":{"let":"", "rev" :""}}
827
+ {"translation":{"let":"", "rev" :""}}
828
+ {"translation":{"let":"", "rev" :""}}
829
+ {"translation":{"let":"", "rev" :""}}
830
+ {"translation":{"let":"", "rev" :""}}
831
+ {"translation":{"let":"", "rev" :""}}
832
+ {"translation":{"let":"", "rev" :""}}
833
+ {"translation":{"let":"", "rev" :""}}
834
+ {"translation":{"let":"", "rev" :""}}
835
+ {"translation":{"let":"", "rev" :""}}
836
+ {"translation":{"let":"", "rev" :""}}
837
+ {"translation":{"let":"", "rev" :""}}
838
+ {"translation":{"let":"", "rev" :""}}
839
+ {"translation":{"let":"", "rev" :""}}
840
+ {"translation":{"let":"", "rev" :""}}
841
+ {"translation":{"let":"", "rev" :""}}
842
+ {"translation":{"let":"", "rev" :""}}
843
+ {"translation":{"let":"", "rev" :""}}
844
+ {"translation":{"let":"", "rev" :""}}
845
+ {"translation":{"let":"", "rev" :""}}
846
+ {"translation":{"let":"", "rev" :""}}
847
+ {"translation":{"let":"", "rev" :""}}
848
+ {"translation":{"let":"", "rev" :""}}
849
+ {"translation":{"let":"", "rev" :""}}
850
+ {"translation":{"let":"", "rev" :""}}
851
+ {"translation":{"let":"", "rev" :""}}
852
+ {"translation":{"let":"", "rev" :""}}
853
+ {"translation":{"let":"", "rev" :""}}
854
+ {"translation":{"let":"", "rev" :""}}
855
+ {"translation":{"let":"", "rev" :""}}
856
+ {"translation":{"let":"", "rev" :""}}
857
+ {"translation":{"let":"", "rev" :""}}
858
+ {"translation":{"let":"", "rev" :""}}
859
+ {"translation":{"let":"", "rev" :""}}
860
+ {"translation":{"let":"", "rev" :""}}
861
+ {"translation":{"let":"", "rev" :""}}
862
+ {"translation":{"let":"", "rev" :""}}
863
+ {"translation":{"let":"", "rev" :""}}
864
+ {"translation":{"let":"", "rev" :""}}
865
+ {"translation":{"let":"", "rev" :""}}
866
+ {"translation":{"let":"", "rev" :""}}
867
+ {"translation":{"let":"", "rev" :""}}
868
+ {"translation":{"let":"", "rev" :""}}
869
+ {"translation":{"let":"", "rev" :""}}
870
+ {"translation":{"let":"", "rev" :""}}
871
+ {"translation":{"let":"", "rev" :""}}
872
+ {"translation":{"let":"", "rev" :""}}
873
+ {"translation":{"let":"", "rev" :""}}
874
+ {"translation":{"let":"", "rev" :""}}
875
+ {"translation":{"let":"", "rev" :""}}
876
+ {"translation":{"let":"", "rev" :""}}
877
+ {"translation":{"let":"", "rev" :""}}
878
+ {"translation":{"let":"", "rev" :""}}
879
+ {"translation":{"let":"", "rev" :""}}
880
+ {"translation":{"let":"", "rev" :""}}
881
+ {"translation":{"let":"", "rev" :""}}
882
+ {"translation":{"let":"", "rev" :""}}
883
+ {"translation":{"let":"", "rev" :""}}
884
+ {"translation":{"let":"", "rev" :""}}
885
+ {"translation":{"let":"", "rev" :""}}
886
+ {"translation":{"let":"", "rev" :""}}
887
+ {"translation":{"let":"", "rev" :""}}
888
+ {"translation":{"let":"", "rev" :""}}
889
+ {"translation":{"let":"", "rev" :""}}
890
+ {"translation":{"let":"", "rev" :""}}
891
+ {"translation":{"let":"", "rev" :""}}
892
+ {"translation":{"let":"", "rev" :""}}
893
+ {"translation":{"let":"", "rev" :""}}
894
+ {"translation":{"let":"", "rev" :""}}
895
+ {"translation":{"let":"", "rev" :""}}
896
+ {"translation":{"let":"", "rev" :""}}
897
+ {"translation":{"let":"", "rev" :""}}
898
+ {"translation":{"let":"", "rev" :""}}
899
+ {"translation":{"let":"", "rev" :""}}
900
+ {"translation":{"let":"", "rev" :""}}
901
+ {"translation":{"let":"", "rev" :""}}
902
+ {"translation":{"let":"", "rev" :""}}
903
+ {"translation":{"let":"", "rev" :""}}
904
+ {"translation":{"let":"", "rev" :""}}
905
+ {"translation":{"let":"", "rev" :""}}
906
+ {"translation":{"let":"", "rev" :""}}
907
+ {"translation":{"let":"", "rev" :""}}
908
+ {"translation":{"let":"", "rev" :""}}
909
+ {"translation":{"let":"", "rev" :""}}
910
+ {"translation":{"let":"", "rev" :""}}
911
+ {"translation":{"let":"", "rev" :""}}
912
+ {"translation":{"let":"", "rev" :""}}
913
+ {"translation":{"let":"", "rev" :""}}
914
+ {"translation":{"let":"", "rev" :""}}
915
+ {"translation":{"let":"", "rev" :""}}
916
+ {"translation":{"let":"", "rev" :""}}
917
+ {"translation":{"let":"", "rev" :""}}
918
+ {"translation":{"let":"", "rev" :""}}
919
+ {"translation":{"let":"", "rev" :""}}
920
+ {"translation":{"let":"", "rev" :""}}
921
+ {"translation":{"let":"", "rev" :""}}
922
+ {"translation":{"let":"", "rev" :""}}
923
+ {"translation":{"let":"", "rev" :""}}
924
+ {"translation":{"let":"", "rev" :""}}
925
+ {"translation":{"let":"", "rev" :""}}
926
+ {"translation":{"let":"", "rev" :""}}
927
+ {"translation":{"let":"", "rev" :""}}
928
+ {"translation":{"let":"", "rev" :""}}
929
+ {"translation":{"let":"", "rev" :""}}
930
+ {"translation":{"let":"", "rev" :""}}
931
+ {"translation":{"let":"", "rev" :""}}
932
+ {"translation":{"let":"", "rev" :""}}
933
+ {"translation":{"let":"", "rev" :""}}
934
+ {"translation":{"let":"", "rev" :""}}
935
+ {"translation":{"let":"", "rev" :""}}
936
+ {"translation":{"let":"", "rev" :""}}
937
+ {"translation":{"let":"", "rev" :""}}
938
+ {"translation":{"let":"", "rev" :""}}
939
+ {"translation":{"let":"", "rev" :""}}
940
+ {"translation":{"let":"", "rev" :""}}
941
+ {"translation":{"let":"", "rev" :""}}
942
+ {"translation":{"let":"", "rev" :""}}
943
+ {"translation":{"let":"", "rev" :""}}
944
+ {"translation":{"let":"", "rev" :""}}
945
+ {"translation":{"let":"", "rev" :""}}
946
+ {"translation":{"let":"", "rev" :""}}
947
+ {"translation":{"let":"", "rev" :""}}
948
+ {"translation":{"let":"", "rev" :""}}
949
+ {"translation":{"let":"", "rev" :""}}
950
+ {"translation":{"let":"", "rev" :""}}
951
+ {"translation":{"let":"", "rev" :""}}
952
+ {"translation":{"let":"", "rev" :""}}
953
+ {"translation":{"let":"", "rev" :""}}
954
+ {"translation":{"let":"", "rev" :""}}
955
+ {"translation":{"let":"", "rev" :""}}
956
+ {"translation":{"let":"", "rev" :""}}
957
+ {"translation":{"let":"", "rev" :""}}
958
+ {"translation":{"let":"", "rev" :""}}
959
+ {"translation":{"let":"", "rev" :""}}
960
+ {"translation":{"let":"", "rev" :""}}
961
+ {"translation":{"let":"", "rev" :""}}
962
+ {"translation":{"let":"", "rev" :""}}
963
+ {"translation":{"let":"", "rev" :""}}
964
+ {"translation":{"let":"", "rev" :""}}
965
+ {"translation":{"let":"", "rev" :""}}
966
+ {"translation":{"let":"", "rev" :""}}
967
+ {"translation":{"let":"", "rev" :""}}
968
+ {"translation":{"let":"", "rev" :""}}
969
+ {"translation":{"let":"", "rev" :""}}
970
+ {"translation":{"let":"", "rev" :""}}
971
+ {"translation":{"let":"", "rev" :""}}
972
+ {"translation":{"let":"", "rev" :""}}
973
+ {"translation":{"let":"", "rev" :""}}
974
+ {"translation":{"let":"", "rev" :""}}
975
+ {"translation":{"let":"", "rev" :""}}
976
+ {"translation":{"let":"", "rev" :""}}
977
+ {"translation":{"let":"", "rev" :""}}
978
+ {"translation":{"let":"", "rev" :""}}
979
+ {"translation":{"let":"", "rev" :""}}
980
+ {"translation":{"let":"", "rev" :""}}
981
+ {"translation":{"let":"", "rev" :""}}
982
+ {"translation":{"let":"", "rev" :""}}
983
+ {"translation":{"let":"", "rev" :""}}
984
+ {"translation":{"let":"", "rev" :""}}
985
+ {"translation":{"let":"", "rev" :""}}
986
+ {"translation":{"let":"", "rev" :""}}
987
+ {"translation":{"let":"", "rev" :""}}
988
+ {"translation":{"let":"", "rev" :""}}
989
+ {"translation":{"let":"", "rev" :""}}
990
+ {"translation":{"let":"", "rev" :""}}
991
+ {"translation":{"let":"", "rev" :""}}
992
+ {"translation":{"let":"", "rev" :""}}
993
+ {"translation":{"let":"", "rev" :""}}
994
+ {"translation":{"let":"", "rev" :""}}
995
+ {"translation":{"let":"", "rev" :""}}
996
+ {"translation":{"let":"", "rev" :""}}
997
+ {"translation":{"let":"", "rev" :""}}
998
+ {"translation":{"let":"", "rev" :""}}
999
+ {"translation":{"let":"", "rev" :""}}
1000
+ {"translation":{"let":"", "rev" :""}}
1001
+ {"translation":{"let":"", "rev" :""}}
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ accelerate >= 0.12.0
2
+ datasets >= 1.8.0
3
+ sentencepiece != 0.1.92
4
+ protobuf
5
+ sacrebleu >= 1.4.12
6
+ py7zr
7
+ torch >= 1.3
8
+ evaluate
9
+ git+https://github.com/huggingface/transformers
run_translation.py ADDED
@@ -0,0 +1,660 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ # Copyright The HuggingFace Team and The HuggingFace Inc. team. All rights reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+ """
17
+ Fine-tuning the library models for sequence to sequence.
18
+ """
19
+ # You can also adapt this script on your own sequence to sequence task. Pointers for this are left as comments.
20
+
21
+ import logging
22
+ import os
23
+ import sys
24
+ from dataclasses import dataclass, field
25
+ from typing import Optional
26
+
27
+ import datasets
28
+ import numpy as np
29
+ from datasets import load_dataset
30
+
31
+ import evaluate
32
+ import transformers
33
+ from transformers import (
34
+ AutoConfig,
35
+ AutoModelForSeq2SeqLM,
36
+ AutoTokenizer,
37
+ DataCollatorForSeq2Seq,
38
+ HfArgumentParser,
39
+ M2M100Tokenizer,
40
+ MBart50Tokenizer,
41
+ MBart50TokenizerFast,
42
+ MBartTokenizer,
43
+ MBartTokenizerFast,
44
+ Seq2SeqTrainer,
45
+ Seq2SeqTrainingArguments,
46
+ default_data_collator,
47
+ set_seed,
48
+ )
49
+ from transformers.trainer_utils import get_last_checkpoint
50
+ from transformers.utils import check_min_version, send_example_telemetry
51
+ from transformers.utils.versions import require_version
52
+
53
+
54
+ # Will error if the minimal version of Transformers is not installed. Remove at your own risks.
55
+ check_min_version("4.26.0.dev0")
56
+
57
+ require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/translation/requirements.txt")
58
+
59
+ logger = logging.getLogger(__name__)
60
+
61
+ # A list of all multilingual tokenizer which require src_lang and tgt_lang attributes.
62
+ MULTILINGUAL_TOKENIZERS = [MBartTokenizer, MBartTokenizerFast, MBart50Tokenizer, MBart50TokenizerFast, M2M100Tokenizer]
63
+
64
+
65
+ @dataclass
66
+ class ModelArguments:
67
+ """
68
+ Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
69
+ """
70
+
71
+ model_name_or_path: str = field(
72
+ metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
73
+ )
74
+ config_name: Optional[str] = field(
75
+ default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
76
+ )
77
+ tokenizer_name: Optional[str] = field(
78
+ default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
79
+ )
80
+ cache_dir: Optional[str] = field(
81
+ default=None,
82
+ metadata={"help": "Where to store the pretrained models downloaded from huggingface.co"},
83
+ )
84
+ use_fast_tokenizer: bool = field(
85
+ default=True,
86
+ metadata={"help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not."},
87
+ )
88
+ model_revision: str = field(
89
+ default="main",
90
+ metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
91
+ )
92
+ use_auth_token: bool = field(
93
+ default=False,
94
+ metadata={
95
+ "help": (
96
+ "Will use the token generated when running `huggingface-cli login` (necessary to use this script "
97
+ "with private models)."
98
+ )
99
+ },
100
+ )
101
+
102
+
103
+ @dataclass
104
+ class DataTrainingArguments:
105
+ """
106
+ Arguments pertaining to what data we are going to input our model for training and eval.
107
+ """
108
+
109
+ source_lang: str = field(default=None, metadata={"help": "Source language id for translation."})
110
+ target_lang: str = field(default=None, metadata={"help": "Target language id for translation."})
111
+
112
+ dataset_name: Optional[str] = field(
113
+ default=None, metadata={"help": "The name of the dataset to use (via the datasets library)."}
114
+ )
115
+ dataset_config_name: Optional[str] = field(
116
+ default=None, metadata={"help": "The configuration name of the dataset to use (via the datasets library)."}
117
+ )
118
+ train_file: Optional[str] = field(default=None, metadata={"help": "The input training data file (a jsonlines)."})
119
+ validation_file: Optional[str] = field(
120
+ default=None,
121
+ metadata={
122
+ "help": "An optional input evaluation data file to evaluate the metrics (sacrebleu) on a jsonlines file."
123
+ },
124
+ )
125
+ test_file: Optional[str] = field(
126
+ default=None,
127
+ metadata={"help": "An optional input test data file to evaluate the metrics (sacrebleu) on a jsonlines file."},
128
+ )
129
+ overwrite_cache: bool = field(
130
+ default=False, metadata={"help": "Overwrite the cached training and evaluation sets"}
131
+ )
132
+ preprocessing_num_workers: Optional[int] = field(
133
+ default=None,
134
+ metadata={"help": "The number of processes to use for the preprocessing."},
135
+ )
136
+ max_source_length: Optional[int] = field(
137
+ default=1024,
138
+ metadata={
139
+ "help": (
140
+ "The maximum total input sequence length after tokenization. Sequences longer "
141
+ "than this will be truncated, sequences shorter will be padded."
142
+ )
143
+ },
144
+ )
145
+ max_target_length: Optional[int] = field(
146
+ default=128,
147
+ metadata={
148
+ "help": (
149
+ "The maximum total sequence length for target text after tokenization. Sequences longer "
150
+ "than this will be truncated, sequences shorter will be padded."
151
+ )
152
+ },
153
+ )
154
+ val_max_target_length: Optional[int] = field(
155
+ default=None,
156
+ metadata={
157
+ "help": (
158
+ "The maximum total sequence length for validation target text after tokenization. Sequences longer "
159
+ "than this will be truncated, sequences shorter will be padded. Will default to `max_target_length`."
160
+ "This argument is also used to override the ``max_length`` param of ``model.generate``, which is used "
161
+ "during ``evaluate`` and ``predict``."
162
+ )
163
+ },
164
+ )
165
+ pad_to_max_length: bool = field(
166
+ default=False,
167
+ metadata={
168
+ "help": (
169
+ "Whether to pad all samples to model maximum sentence length. "
170
+ "If False, will pad the samples dynamically when batching to the maximum length in the batch. More "
171
+ "efficient on GPU but very bad for TPU."
172
+ )
173
+ },
174
+ )
175
+ max_train_samples: Optional[int] = field(
176
+ default=None,
177
+ metadata={
178
+ "help": (
179
+ "For debugging purposes or quicker training, truncate the number of training examples to this "
180
+ "value if set."
181
+ )
182
+ },
183
+ )
184
+ max_eval_samples: Optional[int] = field(
185
+ default=None,
186
+ metadata={
187
+ "help": (
188
+ "For debugging purposes or quicker training, truncate the number of evaluation examples to this "
189
+ "value if set."
190
+ )
191
+ },
192
+ )
193
+ max_predict_samples: Optional[int] = field(
194
+ default=None,
195
+ metadata={
196
+ "help": (
197
+ "For debugging purposes or quicker training, truncate the number of prediction examples to this "
198
+ "value if set."
199
+ )
200
+ },
201
+ )
202
+ num_beams: Optional[int] = field(
203
+ default=None,
204
+ metadata={
205
+ "help": (
206
+ "Number of beams to use for evaluation. This argument will be passed to ``model.generate``, "
207
+ "which is used during ``evaluate`` and ``predict``."
208
+ )
209
+ },
210
+ )
211
+ ignore_pad_token_for_loss: bool = field(
212
+ default=True,
213
+ metadata={
214
+ "help": "Whether to ignore the tokens corresponding to padded labels in the loss computation or not."
215
+ },
216
+ )
217
+ source_prefix: Optional[str] = field(
218
+ default=None, metadata={"help": "A prefix to add before every source text (useful for T5 models)."}
219
+ )
220
+ forced_bos_token: Optional[str] = field(
221
+ default=None,
222
+ metadata={
223
+ "help": (
224
+ "The token to force as the first generated token after the :obj:`decoder_start_token_id`.Useful for"
225
+ " multilingual models like :doc:`mBART <../model_doc/mbart>` where the first generated token needs to"
226
+ " be the target language token.(Usually it is the target language token)"
227
+ )
228
+ },
229
+ )
230
+
231
+ def __post_init__(self):
232
+ if self.dataset_name is None and self.train_file is None and self.validation_file is None:
233
+ raise ValueError("Need either a dataset name or a training/validation file.")
234
+ elif self.source_lang is None or self.target_lang is None:
235
+ raise ValueError("Need to specify the source language and the target language.")
236
+
237
+ # accepting both json and jsonl file extensions, as
238
+ # many jsonlines files actually have a .json extension
239
+ valid_extensions = ["json", "jsonl"]
240
+
241
+ if self.train_file is not None:
242
+ extension = self.train_file.split(".")[-1]
243
+ assert extension in valid_extensions, "`train_file` should be a jsonlines file."
244
+ if self.validation_file is not None:
245
+ extension = self.validation_file.split(".")[-1]
246
+ assert extension in valid_extensions, "`validation_file` should be a jsonlines file."
247
+ if self.val_max_target_length is None:
248
+ self.val_max_target_length = self.max_target_length
249
+
250
+
251
+ def main():
252
+ # See all possible arguments in src/transformers/training_args.py
253
+ # or by passing the --help flag to this script.
254
+ # We now keep distinct sets of args, for a cleaner separation of concerns.
255
+
256
+ parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
257
+ if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
258
+ # If we pass only one argument to the script and it's the path to a json file,
259
+ # let's parse it to get our arguments.
260
+ model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
261
+ else:
262
+ model_args, data_args, training_args = parser.parse_args_into_dataclasses()
263
+
264
+ # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The
265
+ # information sent is the one passed as arguments along with your Python/PyTorch versions.
266
+ send_example_telemetry("run_translation", model_args, data_args)
267
+
268
+ # Setup logging
269
+ logging.basicConfig(
270
+ format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
271
+ datefmt="%m/%d/%Y %H:%M:%S",
272
+ handlers=[logging.StreamHandler(sys.stdout)],
273
+ )
274
+
275
+ log_level = training_args.get_process_log_level()
276
+ logger.setLevel(log_level)
277
+ datasets.utils.logging.set_verbosity(log_level)
278
+ transformers.utils.logging.set_verbosity(log_level)
279
+ transformers.utils.logging.enable_default_handler()
280
+ transformers.utils.logging.enable_explicit_format()
281
+
282
+ # Log on each process the small summary:
283
+ logger.warning(
284
+ f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}"
285
+ + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}"
286
+ )
287
+ logger.info(f"Training/evaluation parameters {training_args}")
288
+
289
+ if data_args.source_prefix is None and model_args.model_name_or_path in [
290
+ "t5-small",
291
+ "t5-base",
292
+ "t5-large",
293
+ "t5-3b",
294
+ "t5-11b",
295
+ ]:
296
+ logger.warning(
297
+ "You're running a t5 model but didn't provide a source prefix, which is expected, e.g. with "
298
+ "`--source_prefix 'translate English to German: ' `"
299
+ )
300
+
301
+ # Detecting last checkpoint.
302
+ last_checkpoint = None
303
+ if os.path.isdir(training_args.output_dir) and training_args.do_train and not training_args.overwrite_output_dir:
304
+ last_checkpoint = get_last_checkpoint(training_args.output_dir)
305
+ if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0:
306
+ raise ValueError(
307
+ f"Output directory ({training_args.output_dir}) already exists and is not empty. "
308
+ "Use --overwrite_output_dir to overcome."
309
+ )
310
+ elif last_checkpoint is not None and training_args.resume_from_checkpoint is None:
311
+ logger.info(
312
+ f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change "
313
+ "the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
314
+ )
315
+
316
+ # Set seed before initializing model.
317
+ set_seed(training_args.seed)
318
+
319
+ # Get the datasets: you can either provide your own JSON training and evaluation files (see below)
320
+ # or just provide the name of one of the public datasets available on the hub at https://huggingface.co/datasets/
321
+ # (the dataset will be downloaded automatically from the datasets Hub).
322
+ #
323
+ # For translation, only JSON files are supported, with one field named "translation" containing two keys for the
324
+ # source and target languages (unless you adapt what follows).
325
+ #
326
+ # In distributed training, the load_dataset function guarantee that only one local process can concurrently
327
+ # download the dataset.
328
+ if data_args.dataset_name is not None:
329
+ # Downloading and loading a dataset from the hub.
330
+ raw_datasets = load_dataset(
331
+ data_args.dataset_name,
332
+ data_args.dataset_config_name,
333
+ cache_dir=model_args.cache_dir,
334
+ use_auth_token=True if model_args.use_auth_token else None,
335
+ )
336
+ else:
337
+ data_files = {}
338
+ if data_args.train_file is not None:
339
+ data_files["train"] = data_args.train_file
340
+ extension = data_args.train_file.split(".")[-1]
341
+ if data_args.validation_file is not None:
342
+ data_files["validation"] = data_args.validation_file
343
+ extension = data_args.validation_file.split(".")[-1]
344
+ if data_args.test_file is not None:
345
+ data_files["test"] = data_args.test_file
346
+ extension = data_args.test_file.split(".")[-1]
347
+ raw_datasets = load_dataset(
348
+ extension,
349
+ data_files=data_files,
350
+ cache_dir=model_args.cache_dir,
351
+ use_auth_token=True if model_args.use_auth_token else None,
352
+ )
353
+ # See more about loading any type of standard or custom dataset (from files, python dict, pandas DataFrame, etc) at
354
+ # https://huggingface.co/docs/datasets/loading_datasets.html.
355
+
356
+ # Load pretrained model and tokenizer
357
+ #
358
+ # Distributed training:
359
+ # The .from_pretrained methods guarantee that only one local process can concurrently
360
+ # download model & vocab.
361
+ config = AutoConfig.from_pretrained(
362
+ model_args.config_name if model_args.config_name else model_args.model_name_or_path,
363
+ cache_dir=model_args.cache_dir,
364
+ revision=model_args.model_revision,
365
+ use_auth_token=True if model_args.use_auth_token else None,
366
+ )
367
+ tokenizer = AutoTokenizer.from_pretrained(
368
+ model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
369
+ cache_dir=model_args.cache_dir,
370
+ use_fast=model_args.use_fast_tokenizer,
371
+ revision=model_args.model_revision,
372
+ use_auth_token=True if model_args.use_auth_token else None,
373
+ )
374
+ model = AutoModelForSeq2SeqLM.from_pretrained(
375
+ model_args.model_name_or_path,
376
+ from_tf=bool(".ckpt" in model_args.model_name_or_path),
377
+ config=config,
378
+ cache_dir=model_args.cache_dir,
379
+ revision=model_args.model_revision,
380
+ use_auth_token=True if model_args.use_auth_token else None,
381
+ )
382
+
383
+ # We resize the embeddings only when necessary to avoid index errors. If you are creating a model from scratch
384
+ # on a small vocab and want a smaller embedding size, remove this test.
385
+ embedding_size = model.get_input_embeddings().weight.shape[0]
386
+ if len(tokenizer) > embedding_size:
387
+ model.resize_token_embeddings(len(tokenizer))
388
+
389
+ # Set decoder_start_token_id
390
+ if model.config.decoder_start_token_id is None and isinstance(tokenizer, (MBartTokenizer, MBartTokenizerFast)):
391
+ if isinstance(tokenizer, MBartTokenizer):
392
+ model.config.decoder_start_token_id = tokenizer.lang_code_to_id[data_args.target_lang]
393
+ else:
394
+ model.config.decoder_start_token_id = tokenizer.convert_tokens_to_ids(data_args.target_lang)
395
+
396
+ if model.config.decoder_start_token_id is None:
397
+ raise ValueError("Make sure that `config.decoder_start_token_id` is correctly defined")
398
+
399
+ prefix = data_args.source_prefix if data_args.source_prefix is not None else ""
400
+
401
+ # Preprocessing the datasets.
402
+ # We need to tokenize inputs and targets.
403
+ if training_args.do_train:
404
+ column_names = raw_datasets["train"].column_names
405
+ elif training_args.do_eval:
406
+ column_names = raw_datasets["validation"].column_names
407
+ elif training_args.do_predict:
408
+ column_names = raw_datasets["test"].column_names
409
+ else:
410
+ logger.info("There is nothing to do. Please pass `do_train`, `do_eval` and/or `do_predict`.")
411
+ return
412
+
413
+ # For translation we set the codes of our source and target languages (only useful for mBART, the others will
414
+ # ignore those attributes).
415
+ if isinstance(tokenizer, tuple(MULTILINGUAL_TOKENIZERS)):
416
+ assert data_args.target_lang is not None and data_args.source_lang is not None, (
417
+ f"{tokenizer.__class__.__name__} is a multilingual tokenizer which requires --source_lang and "
418
+ "--target_lang arguments."
419
+ )
420
+
421
+ tokenizer.src_lang = data_args.source_lang
422
+ tokenizer.tgt_lang = data_args.target_lang
423
+
424
+ # For multilingual translation models like mBART-50 and M2M100 we need to force the target language token
425
+ # as the first generated token. We ask the user to explicitly provide this as --forced_bos_token argument.
426
+ forced_bos_token_id = (
427
+ tokenizer.lang_code_to_id[data_args.forced_bos_token] if data_args.forced_bos_token is not None else None
428
+ )
429
+ model.config.forced_bos_token_id = forced_bos_token_id
430
+
431
+ # Get the language codes for input/target.
432
+ source_lang = data_args.source_lang.split("_")[0]
433
+ target_lang = data_args.target_lang.split("_")[0]
434
+
435
+ # Temporarily set max_target_length for training.
436
+ max_target_length = data_args.max_target_length
437
+ padding = "max_length" if data_args.pad_to_max_length else False
438
+
439
+ if training_args.label_smoothing_factor > 0 and not hasattr(model, "prepare_decoder_input_ids_from_labels"):
440
+ logger.warning(
441
+ "label_smoothing is enabled but the `prepare_decoder_input_ids_from_labels` method is not defined for"
442
+ f"`{model.__class__.__name__}`. This will lead to loss being calculated twice and will take up more memory"
443
+ )
444
+
445
+ def preprocess_function(examples):
446
+ inputs = [ex[source_lang] for ex in examples["translation"]]
447
+ targets = [ex[target_lang] for ex in examples["translation"]]
448
+ inputs = [prefix + inp for inp in inputs]
449
+ model_inputs = tokenizer(inputs, max_length=data_args.max_source_length, padding=padding, truncation=True)
450
+
451
+ # Tokenize targets with the `text_target` keyword argument
452
+ labels = tokenizer(text_target=targets, max_length=max_target_length, padding=padding, truncation=True)
453
+
454
+ # If we are padding here, replace all tokenizer.pad_token_id in the labels by -100 when we want to ignore
455
+ # padding in the loss.
456
+ if padding == "max_length" and data_args.ignore_pad_token_for_loss:
457
+ labels["input_ids"] = [
458
+ [(l if l != tokenizer.pad_token_id else -100) for l in label] for label in labels["input_ids"]
459
+ ]
460
+
461
+ model_inputs["labels"] = labels["input_ids"]
462
+ return model_inputs
463
+
464
+ if training_args.do_train:
465
+ if "train" not in raw_datasets:
466
+ raise ValueError("--do_train requires a train dataset")
467
+ train_dataset = raw_datasets["train"]
468
+ if data_args.max_train_samples is not None:
469
+ max_train_samples = min(len(train_dataset), data_args.max_train_samples)
470
+ train_dataset = train_dataset.select(range(max_train_samples))
471
+ with training_args.main_process_first(desc="train dataset map pre-processing"):
472
+ train_dataset = train_dataset.map(
473
+ preprocess_function,
474
+ batched=True,
475
+ num_proc=data_args.preprocessing_num_workers,
476
+ remove_columns=column_names,
477
+ load_from_cache_file=not data_args.overwrite_cache,
478
+ desc="Running tokenizer on train dataset",
479
+ )
480
+
481
+ if training_args.do_eval:
482
+ max_target_length = data_args.val_max_target_length
483
+ if "validation" not in raw_datasets:
484
+ raise ValueError("--do_eval requires a validation dataset")
485
+ eval_dataset = raw_datasets["validation"]
486
+ if data_args.max_eval_samples is not None:
487
+ max_eval_samples = min(len(eval_dataset), data_args.max_eval_samples)
488
+ eval_dataset = eval_dataset.select(range(max_eval_samples))
489
+ with training_args.main_process_first(desc="validation dataset map pre-processing"):
490
+ eval_dataset = eval_dataset.map(
491
+ preprocess_function,
492
+ batched=True,
493
+ num_proc=data_args.preprocessing_num_workers,
494
+ remove_columns=column_names,
495
+ load_from_cache_file=not data_args.overwrite_cache,
496
+ desc="Running tokenizer on validation dataset",
497
+ )
498
+
499
+ if training_args.do_predict:
500
+ max_target_length = data_args.val_max_target_length
501
+ if "test" not in raw_datasets:
502
+ raise ValueError("--do_predict requires a test dataset")
503
+ predict_dataset = raw_datasets["test"]
504
+ if data_args.max_predict_samples is not None:
505
+ max_predict_samples = min(len(predict_dataset), data_args.max_predict_samples)
506
+ predict_dataset = predict_dataset.select(range(max_predict_samples))
507
+ with training_args.main_process_first(desc="prediction dataset map pre-processing"):
508
+ predict_dataset = predict_dataset.map(
509
+ preprocess_function,
510
+ batched=True,
511
+ num_proc=data_args.preprocessing_num_workers,
512
+ remove_columns=column_names,
513
+ load_from_cache_file=not data_args.overwrite_cache,
514
+ desc="Running tokenizer on prediction dataset",
515
+ )
516
+
517
+ # Data collator
518
+ label_pad_token_id = -100 if data_args.ignore_pad_token_for_loss else tokenizer.pad_token_id
519
+ if data_args.pad_to_max_length:
520
+ data_collator = default_data_collator
521
+ else:
522
+ data_collator = DataCollatorForSeq2Seq(
523
+ tokenizer,
524
+ model=model,
525
+ label_pad_token_id=label_pad_token_id,
526
+ pad_to_multiple_of=8 if training_args.fp16 else None,
527
+ )
528
+
529
+ # Metric
530
+ metric = evaluate.load("sacrebleu")
531
+
532
+ def postprocess_text(preds, labels):
533
+ preds = [pred.strip() for pred in preds]
534
+ labels = [[label.strip()] for label in labels]
535
+
536
+ return preds, labels
537
+
538
+ def compute_metrics(eval_preds):
539
+ preds, labels = eval_preds
540
+ if isinstance(preds, tuple):
541
+ preds = preds[0]
542
+ decoded_preds = tokenizer.batch_decode(preds, skip_special_tokens=True)
543
+ if data_args.ignore_pad_token_for_loss:
544
+ # Replace -100 in the labels as we can't decode them.
545
+ labels = np.where(labels != -100, labels, tokenizer.pad_token_id)
546
+ decoded_labels = tokenizer.batch_decode(labels, skip_special_tokens=True)
547
+
548
+ # Some simple post-processing
549
+ decoded_preds, decoded_labels = postprocess_text(decoded_preds, decoded_labels)
550
+
551
+ result = metric.compute(predictions=decoded_preds, references=decoded_labels)
552
+ result = {"bleu": result["score"]}
553
+
554
+ prediction_lens = [np.count_nonzero(pred != tokenizer.pad_token_id) for pred in preds]
555
+ result["gen_len"] = np.mean(prediction_lens)
556
+ result = {k: round(v, 4) for k, v in result.items()}
557
+ return result
558
+
559
+ # Initialize our Trainer
560
+ trainer = Seq2SeqTrainer(
561
+ model=model,
562
+ args=training_args,
563
+ train_dataset=train_dataset if training_args.do_train else None,
564
+ eval_dataset=eval_dataset if training_args.do_eval else None,
565
+ tokenizer=tokenizer,
566
+ data_collator=data_collator,
567
+ compute_metrics=compute_metrics if training_args.predict_with_generate else None,
568
+ )
569
+
570
+ # Training
571
+ if training_args.do_train:
572
+ checkpoint = None
573
+ if training_args.resume_from_checkpoint is not None:
574
+ checkpoint = training_args.resume_from_checkpoint
575
+ elif last_checkpoint is not None:
576
+ checkpoint = last_checkpoint
577
+ train_result = trainer.train(resume_from_checkpoint=checkpoint)
578
+ trainer.save_model() # Saves the tokenizer too for easy upload
579
+
580
+ metrics = train_result.metrics
581
+ max_train_samples = (
582
+ data_args.max_train_samples if data_args.max_train_samples is not None else len(train_dataset)
583
+ )
584
+ metrics["train_samples"] = min(max_train_samples, len(train_dataset))
585
+
586
+ trainer.log_metrics("train", metrics)
587
+ trainer.save_metrics("train", metrics)
588
+ trainer.save_state()
589
+
590
+ # Evaluation
591
+ results = {}
592
+ max_length = (
593
+ training_args.generation_max_length
594
+ if training_args.generation_max_length is not None
595
+ else data_args.val_max_target_length
596
+ )
597
+ num_beams = data_args.num_beams if data_args.num_beams is not None else training_args.generation_num_beams
598
+ if training_args.do_eval:
599
+ logger.info("*** Evaluate ***")
600
+
601
+ metrics = trainer.evaluate(max_length=max_length, num_beams=num_beams, metric_key_prefix="eval")
602
+ max_eval_samples = data_args.max_eval_samples if data_args.max_eval_samples is not None else len(eval_dataset)
603
+ metrics["eval_samples"] = min(max_eval_samples, len(eval_dataset))
604
+
605
+ trainer.log_metrics("eval", metrics)
606
+ trainer.save_metrics("eval", metrics)
607
+
608
+ if training_args.do_predict:
609
+ logger.info("*** Predict ***")
610
+
611
+ predict_results = trainer.predict(
612
+ predict_dataset, metric_key_prefix="predict", max_length=max_length, num_beams=num_beams
613
+ )
614
+ metrics = predict_results.metrics
615
+ max_predict_samples = (
616
+ data_args.max_predict_samples if data_args.max_predict_samples is not None else len(predict_dataset)
617
+ )
618
+ metrics["predict_samples"] = min(max_predict_samples, len(predict_dataset))
619
+
620
+ trainer.log_metrics("predict", metrics)
621
+ trainer.save_metrics("predict", metrics)
622
+
623
+ if trainer.is_world_process_zero():
624
+ if training_args.predict_with_generate:
625
+ predictions = tokenizer.batch_decode(
626
+ predict_results.predictions, skip_special_tokens=True, clean_up_tokenization_spaces=True
627
+ )
628
+ predictions = [pred.strip() for pred in predictions]
629
+ output_prediction_file = os.path.join(training_args.output_dir, "generated_predictions.txt")
630
+ with open(output_prediction_file, "w", encoding="utf-8") as writer:
631
+ writer.write("\n".join(predictions))
632
+
633
+ kwargs = {"finetuned_from": model_args.model_name_or_path, "tasks": "translation"}
634
+ if data_args.dataset_name is not None:
635
+ kwargs["dataset_tags"] = data_args.dataset_name
636
+ if data_args.dataset_config_name is not None:
637
+ kwargs["dataset_args"] = data_args.dataset_config_name
638
+ kwargs["dataset"] = f"{data_args.dataset_name} {data_args.dataset_config_name}"
639
+ else:
640
+ kwargs["dataset"] = data_args.dataset_name
641
+
642
+ languages = [l for l in [data_args.source_lang, data_args.target_lang] if l is not None]
643
+ if len(languages) > 0:
644
+ kwargs["language"] = languages
645
+
646
+ if training_args.push_to_hub:
647
+ trainer.push_to_hub(**kwargs)
648
+ else:
649
+ trainer.create_model_card(**kwargs)
650
+
651
+ return results
652
+
653
+
654
+ def _mp_fn(index):
655
+ # For xla_spawn (TPUs)
656
+ main()
657
+
658
+
659
+ if __name__ == "__main__":
660
+ main()