sha
null
last_modified
null
library_name
stringclasses
154 values
text
stringlengths
1
900k
metadata
stringlengths
2
348k
pipeline_tag
stringclasses
45 values
id
stringlengths
5
122
tags
sequencelengths
1
1.84k
created_at
stringlengths
25
25
arxiv
sequencelengths
0
201
languages
sequencelengths
0
1.83k
tags_str
stringlengths
17
9.34k
text_str
stringlengths
0
389k
text_lists
sequencelengths
0
722
processed_texts
sequencelengths
1
723
tokens_length
sequencelengths
1
723
input_texts
sequencelengths
1
61
embeddings
sequencelengths
768
768
null
null
transformers
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> Made by finetuning [google/flan-t5-small](https://huggingface.co/google/flan-t5-small).
{"license": "unknown", "metrics": ["bleu"], "pipeline_tag": "translation"}
translation
aboli-marathe/flan_t5_3185BLEU
[ "transformers", "safetensors", "t5", "text2text-generation", "translation", "license:unknown", "autotrain_compatible", "endpoints_compatible", "text-generation-inference", "region:us" ]
2023-11-12T17:02:03+00:00
[]
[]
TAGS #transformers #safetensors #t5 #text2text-generation #translation #license-unknown #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
# Model Card for Model ID Made by finetuning google/flan-t5-small.
[ "# Model Card for Model ID\n\n\n\nMade by finetuning google/flan-t5-small." ]
[ "TAGS\n#transformers #safetensors #t5 #text2text-generation #translation #license-unknown #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n", "# Model Card for Model ID\n\n\n\nMade by finetuning google/flan-t5-small." ]
[ 59, 20 ]
[ "passage: TAGS\n#transformers #safetensors #t5 #text2text-generation #translation #license-unknown #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Model Card for Model ID\n\n\n\nMade by finetuning google/flan-t5-small." ]
[ -0.011941848322749138, -0.022965282201766968, -0.0022553654853254557, 0.07177906483411789, 0.13740822672843933, 0.027347607538104057, 0.22754520177841187, 0.0798010379076004, 0.05369769036769867, -0.0540769025683403, 0.14481115341186523, 0.12744782865047455, 0.03279551491141319, 0.2458675354719162, -0.08210055530071259, -0.18498116731643677, 0.05568689480423927, -0.02461916022002697, 0.1040174663066864, 0.10425985604524612, 0.08512585610151291, -0.02343950793147087, 0.13704507052898407, -0.03795374184846878, -0.13361172378063202, 0.03702747821807861, 0.09338828176259995, -0.12735378742218018, 0.07828807830810547, 0.07710210233926773, 0.05000186339020729, 0.12210006266832352, 0.053630679845809937, -0.10545521229505539, 0.020760154351592064, 0.021528689190745354, -0.10764357447624207, 0.027976084500551224, 0.09192349761724472, -0.057492341846227646, 0.11202549189329147, 0.07045025378465652, -0.010323413647711277, 0.09211470931768417, -0.07064439356327057, -0.14446379244327545, -0.058308567851781845, 0.12104722857475281, -0.014032955281436443, 0.035079825669527054, 0.0041257417760789394, 0.1548026204109192, -0.016841290518641472, 0.10328008234500885, 0.02640555612742901, -0.2855721116065979, 0.01729254610836506, 0.15870548784732819, 0.031249448657035828, 0.034529369324445724, 0.05497124046087265, 0.10006099939346313, 0.08955185115337372, -0.01392389740794897, 0.050141237676143646, -0.0233925674110651, -0.03258897364139557, 0.0248059444129467, -0.029393857344985008, -0.020635489374399185, 0.14680764079093933, 0.006723394617438316, -0.03834393620491028, -0.12796229124069214, -0.061153557151556015, 0.008022861555218697, -0.03358740732073784, -0.05034761503338814, 0.04961211979389191, 0.05928072705864906, 0.054924655705690384, -0.08964059501886368, -0.10635829716920853, -0.05900053679943085, -0.20160095393657684, 0.04357099160552025, 0.012652333825826645, 0.061621811240911484, -0.165762796998024, 0.023787125945091248, -0.0003287910658400506, -0.06764692068099976, -0.022502407431602478, -0.09259913116693497, 0.098316490650177, -0.04152945429086685, -0.020645877346396446, -0.0625007152557373, 0.11722519993782043, 0.1150144413113594, 0.023100193589925766, 0.025281302630901337, -0.09135046601295471, 0.04160332307219505, 0.015639541670680046, 0.010253225453197956, -0.0204393919557333, 0.03778925910592079, 0.09366574138402939, -0.07515868544578552, 0.030659660696983337, -0.05076923221349716, -0.18303044140338898, -0.003957636188715696, 0.027300899848341942, 0.0918901115655899, -0.013551700860261917, 0.1280217319726944, 0.005291748326271772, 0.016692427918314934, 0.11470300704240799, -0.07370110601186752, -0.011979436501860619, 0.006071915850043297, 0.0022460869513452053, 0.06499192118644714, 0.0891888216137886, 0.0066160522401332855, -0.07530881464481354, 0.005899869371205568, -0.03417492285370827, -0.04579666256904602, 0.0019361804006621242, -0.08222812414169312, 0.032457321882247925, -0.09759698063135147, 0.029475614428520203, -0.219684898853302, -0.2795998156070709, 0.04726497828960419, 0.023741481825709343, -0.050562985241413116, 0.00900623481720686, -0.03390488773584366, -0.021105602383613586, 0.005868356674909592, -0.03957628831267357, -0.040055520832538605, -0.05479162186384201, 0.07851625978946686, 0.015061290934681892, 0.0690157413482666, -0.20053544640541077, 0.03154287114739418, -0.12545263767242432, -0.02160555124282837, -0.08468037098646164, 0.0736510157585144, -0.023890798911452293, 0.10854044556617737, -0.057478245347738266, -0.02690797857940197, -0.03533223271369934, 0.10088764131069183, -0.03934849798679352, 0.13553155958652496, -0.10756248980760574, -0.05997598543763161, 0.20733235776424408, -0.1556059569120407, -0.23931856453418732, 0.08911658078432083, -0.013517958112061024, 0.1463487595319748, 0.10993603616952896, 0.19650647044181824, 0.05268517881631851, -0.07331570982933044, 0.05079403892159462, 0.05030960217118263, -0.16958218812942505, -0.12678083777427673, -0.020824162289500237, 0.024352263659238815, -0.19923841953277588, 0.04552869126200676, 0.03616496920585632, 0.05854326859116554, -0.03382221609354019, -0.047674428671598434, -0.07946858555078506, -0.0877697616815567, 0.07007452845573425, -0.02286762185394764, 0.08093798160552979, -0.10680781304836273, -0.021071217954158783, 0.020823005586862564, -0.03843173757195473, -0.012791670858860016, -0.05251274257898331, -0.13853979110717773, 0.10982060432434082, -0.006822514813393354, 0.05998220667243004, -0.054682087153196335, -0.11538857966661453, -0.02694348245859146, 0.009152737446129322, 0.05537156015634537, 0.02200218290090561, 0.0556715726852417, -0.010314482264220715, -0.03778446093201637, 0.013193829916417599, 0.19837239384651184, 0.031915903091430664, -0.0223754420876503, -0.051698580384254456, 0.0813780128955841, -0.01361012738198042, -0.035299770534038544, -0.15013788640499115, 0.030374526977539062, -0.0007214389625005424, 0.08287248760461807, 0.03765832260251045, 0.07371684163808823, -0.014468617737293243, -0.02755391225218773, -0.055415183305740356, -0.041428904980421066, 0.09860619902610779, -0.013122495263814926, -0.042610712349414825, 0.22995589673519135, -0.14832665026187897, 0.300811231136322, 0.21646694839000702, -0.14701275527477264, -0.06565539538860321, 0.004098692908883095, 0.039559222757816315, 0.01312582939863205, 0.015202704817056656, -0.01184876449406147, -0.020771360024809837, -0.07347982376813889, 0.19601134955883026, -0.12488365173339844, -0.010870488360524178, 0.08757425099611282, -0.01130667980760336, -0.06523158401250839, 0.049101151525974274, 0.10789894312620163, -0.2201586812734604, 0.12818032503128052, 0.13583867251873016, 0.09602079540491104, 0.15956881642341614, -0.03467485308647156, 0.01672360673546791, 0.05754925683140755, 0.014662631787359715, -0.007486282382160425, -0.014551069587469101, -0.017898686230182648, -0.02272987738251686, 0.0624924898147583, 0.0019419696182012558, 0.053954970091581345, -0.08794808387756348, -0.04352632537484169, 0.04648515582084656, -0.08267132937908173, -0.13815058767795563, 0.10939645022153854, -0.0003081410250160843, 0.11827852576971054, -0.042620763182640076, -0.04165154695510864, 0.1424437314271927, 0.04175424575805664, -0.15828028321266174, 0.14335358142852783, -0.05368049070239067, -0.2531471252441406, -0.14012888073921204, -0.1009020283818245, -0.000908209418412298, 0.04115366190671921, 0.09643673151731491, -0.04385746270418167, -0.057807039469480515, -0.10967584699392319, -0.054309260100126266, 0.010590175166726112, -0.007670804858207703, -0.08118114620447159, 0.023274412378668785, -0.005673281382769346, -0.11603433638811111, -0.032603923231363297, 0.05315348133444786, -0.028765656054019928, 0.13035948574543, -0.1670600026845932, 0.06324969977140427, 0.1037878692150116, -0.09371654689311981, 0.05357227474451065, -0.08947053551673889, 0.1587279587984085, -0.025912288576364517, 0.049926549196243286, 0.23929451406002045, 0.038454197347164154, 0.032832831144332886, 0.11343167722225189, -0.039523836225271225, -0.11762043088674545, 0.07558022439479828, -0.03557699918746948, -0.11402223259210587, -0.24304670095443726, -0.06937595456838608, 0.017775466665625572, 0.12468662112951279, 0.049977634102106094, 0.07248588651418686, 0.1679799109697342, 0.12058678269386292, -0.007214438635855913, 0.07859361171722412, 0.027858899906277657, 0.050146590918302536, 0.15588001906871796, 0.030826468020677567, 0.11064769327640533, -0.165110245347023, -0.03030870109796524, 0.13982944190502167, -0.011739250272512436, 0.10014653205871582, 0.08834833651781082, 0.05914970859885216, 0.021553395316004753, 0.05649637430906296, 0.14211998879909515, 0.15778355300426483, 0.07099778950214386, -0.024364518001675606, -0.014477035030722618, -0.04261881858110428, 0.004235954023897648, 0.021972496062517166, -0.08562406152486801, -0.09988783299922943, -0.07167740166187286, 0.03693631663918495, 0.024952152743935585, 0.0832442045211792, 0.012848793528974056, -0.2847960591316223, 0.009276106022298336, 0.011704463511705399, -0.009868723340332508, -0.10526176542043686, 0.08762920647859573, 0.01855027861893177, -0.06380406022071838, 0.1309358924627304, -0.019594864919781685, 0.08657059818506241, 0.036415908485651016, 0.03575196862220764, 0.023180708289146423, -0.04757566750049591, -0.004546318203210831, 0.0925968661904335, -0.30175507068634033, 0.1634489893913269, 0.005827030632644892, 0.026173310354351997, -0.10352946817874908, -0.02935953252017498, 0.03903159126639366, 0.14616626501083374, 0.15956564247608185, -0.01039045862853527, -0.09257043153047562, -0.09211099147796631, -0.022597042843699455, 0.06670501083135605, 0.09463408589363098, 0.015480568632483482, -0.004803720861673355, -0.0775190070271492, -0.009345325641334057, -0.013692321255803108, -0.04917779192328453, -0.1620999574661255, -0.08534086495637894, 0.009782372042536736, 0.06212989240884781, 0.053268853574991226, -0.006164444610476494, -0.04346588999032974, -0.22680896520614624, 0.10521252453327179, -0.019057920202612877, -0.1073637381196022, -0.13983002305030823, -0.07599319517612457, -0.02838883362710476, -0.0204424187541008, 0.09173392504453659, -0.028998158872127533, 0.014463642612099648, -0.020682474598288536, -0.25097569823265076, 0.1381993293762207, -0.08106812834739685, -0.08650887757539749, -0.01595996879041195, 0.04809178039431572, -0.11508235335350037, -0.02773086354136467, 0.07785876095294952, -0.011151517741382122, -0.030129315331578255, -0.07923132926225662, 0.043086856603622437, -0.06396598368883133, 0.03502914682030678, 0.030244583263993263, -0.015197398141026497, -0.15898610651493073, 0.040246374905109406, -0.041414495557546616, 0.12420830130577087, 0.15661786496639252, -0.0637773647904396, 0.13300397992134094, 0.09593695402145386, -0.04812851920723915, -0.3044188618659973, -0.05391097813844681, -0.14880038797855377, -0.03806065768003464, 0.026853639632463455, -0.08486668765544891, 0.05357035622000694, 0.029291871935129166, -0.04776764288544655, 0.1511058509349823, -0.09526396542787552, -0.14894628524780273, 0.10826534032821655, 0.10662660002708435, 0.28563469648361206, -0.14557614922523499, -0.10750503838062286, -0.11565861105918884, -0.19913767278194427, 0.15085695683956146, -0.13007915019989014, 0.029233254492282867, 0.007940400391817093, 0.02955903485417366, -0.003593528876081109, -0.01709669642150402, 0.01939409226179123, -0.0866658166050911, 0.07843184471130371, -0.14811372756958008, -0.01823735423386097, 0.06551799923181534, -0.011497837491333485, 0.1513364464044571, -0.15822964906692505, 0.10727252811193466, -0.007436956278979778, -0.08534491062164307, -0.021231723949313164, 0.028192974627017975, 0.014151538722217083, -0.05752086639404297, -0.03326455131173134, -0.05864962190389633, 0.018931880593299866, -0.039687689393758774, 0.014707358554005623, -0.09295827895402908, 0.05243132635951042, 0.12871800363063812, 0.18939357995986938, -0.10173755139112473, 0.1369035840034485, -0.07953570783138275, -0.13183274865150452, 0.03573335334658623, -0.23921708762645721, 0.05962096154689789, 0.05416678264737129, -0.04184722155332565, 0.07673627138137817, 0.06724696606397629, 0.05793124437332153, -0.002916372613981366, 0.12838752567768097, -0.13063980638980865, -0.18830201029777527, -0.06833355873823166, -0.08239935338497162, 0.07005296647548676, 0.09474748373031616, 0.10956236720085144, -0.05146259441971779, -0.009807989932596684, -0.018782353028655052, -0.01029991079121828, -0.0006193583249114454, 0.006167034152895212, 0.05481371656060219, 0.01585952378809452, -0.13892559707164764, 0.08995254337787628, 0.005107453092932701, -0.0702868103981018, 0.05530137941241264, 0.07410304248332977, -0.1481684297323227, -0.1250520944595337, 0.04133712500333786, 0.25080496072769165, -0.14424921572208405, -0.1210518330335617, -0.01725783944129944, -0.18329426646232605, 0.057707879692316055, 0.2640872299671173, 0.048856865614652634, 0.0758884847164154, 0.055442288517951965, -0.056632526218891144, -0.03000171296298504, 0.03661546856164932, -0.02020622231066227, 0.05000521242618561, -0.18243488669395447, 0.035669777542352676, -0.03584938123822212, 0.08581184595823288, -0.11770064383745193, 0.026856746524572372, -0.12047018855810165, -0.01552457083016634, -0.21892887353897095, 0.07741555571556091, -0.01532033085823059, 0.007093849591910839, 0.029042847454547882, -0.026006454601883888, -0.02392612025141716, -0.007920381613075733, -0.08986733108758926, 0.03625122457742691, 0.05420202389359474, 0.07123943418264389, -0.08118806034326553, 0.0017387152183800936, 0.009535650722682476, 0.01186148077249527, 0.14711648225784302, 0.0317765474319458, -0.1038731262087822, 0.1034984365105629, -0.25072261691093445, -0.01670209690928459, 0.05675606057047844, 0.01645474135875702, -0.012194639071822166, 0.06172822415828705, 0.05104891583323479, 0.1154792308807373, -0.016552625223994255, 0.06842593103647232, 0.02539883181452751, -0.0851978212594986, 0.010369183495640755, -0.09775297343730927, -0.022078683599829674, -0.007158394902944565, -0.000006168991149024805, 0.08832478523254395, -0.05619661509990692, 0.16273073852062225, -0.12298864126205444, -0.021500151604413986, -0.1115633025765419, 0.004960689693689346, -0.00027596455765888095, -0.12838679552078247, -0.1681528240442276, -0.04710681363940239, -0.0074648167937994, -0.009183257818222046, 0.25939974188804626, 0.03672175109386444, -0.0545613057911396, 0.018719838932156563, 0.0674794614315033, 0.011657852679491043, 0.009218995459377766, 0.3649032711982727, 0.02527385763823986, -0.013761064037680626, -0.11160598695278168, 0.038576580584049225, 0.02113158255815506, -0.09228066354990005, 0.0853346660733223, 0.09627439081668854, -0.16705115139484406, 0.11424306035041809, 0.07299887388944626, 0.025410637259483337, 0.01537033636122942, -0.04282519221305847, -0.0719861388206482, 0.050504494458436966, -0.033279724419116974, -0.030810361728072166, 0.16961605846881866, 0.025153985247015953, -0.014447730965912342, -0.02681051380932331, -0.016934091225266457, -0.15938562154769897, -0.20770183205604553, -0.13022390007972717, -0.13861136138439178, 0.03704172372817993, -0.006260894704610109, -0.0017879188526421785, 0.05212561413645744, 0.07900866866111755, -0.09916458278894424, 0.05143509805202484, -0.1301276683807373, -0.00039099101559259, 0.07515177875757217, -0.049758680164813995, -0.04356873780488968, -0.0036627277731895447, -0.10315951704978943, -0.011076400056481361, 0.011944882571697235, -0.04530041292309761, 0.04384075850248337, -0.001886109821498394, 0.04988975450396538, -0.13247744739055634, -0.06229564920067787, -0.026576466858386993, 0.07498318701982498, -0.06046082079410553, 0.04124279320240021, -0.0034082261845469475, -0.029817288741469383, 0.051714878529310226, 0.13450010120868683, 0.008927070535719395, -0.12971031665802002, -0.0784037709236145, 0.14954940974712372, -0.04010963812470436, 0.1133074015378952, -0.0015207543037831783, -0.04440871998667717, -0.016271203756332397, 0.3303850591182709, 0.2830037474632263, -0.0334121435880661, 0.03180244565010071, -0.022111568599939346, 0.01698911376297474, 0.07770604640245438, 0.14065295457839966, 0.015882547944784164, 0.07681380957365036, 0.021872228011488914, -0.022436264902353287, 0.016065726056694984, 0.007605378981679678, -0.05070414021611214, 0.14548616111278534, 0.007888995110988617, -0.05183643847703934, -0.03585005924105644, 0.07335736602544785, -0.1449408084154129, 0.14651936292648315, 0.01034309808164835, -0.007816306315362453, -0.0025065410882234573, -0.029471740126609802, 0.04577690362930298, 0.03168300539255142, -0.009494494646787643, -0.0353061817586422, -0.004578795749694109, -0.029589399695396423, -0.02580740861594677, -0.24975964426994324, 0.0434640571475029, -0.045066285878419876, -0.031544364988803864, 0.1732518970966339, 0.029201200231909752, 0.07575695216655731, 0.07770797610282898, 0.025030246004462242, -0.06735352426767349, 0.15598784387111664, -0.03356572985649109, 0.05777078494429588, 0.09830648452043533, -0.04874067008495331, 0.0035824959632009268, -0.09425041824579239, 0.013388711027801037, -0.13413499295711517, 0.0356454961001873, 0.04408583417534828, -0.11783868819475174, -0.047589078545570374, 0.051587022840976715, -0.07595604658126831, 0.06493625789880753, 0.06568825244903564, -0.0071123759262263775, 0.02785060554742813, -0.02909361571073532, 0.09554219245910645, 0.0070836106315255165, -0.13128560781478882, 0.012991376221179962, -0.05979399383068085, -0.06621895730495453, 0.03523486852645874, 0.013006800785660744, -0.27338504791259766, 0.054782506078481674, -0.2165314257144928, 0.03483348339796066, -0.16611579060554504, 0.08221590518951416, 0.277937650680542, 0.06563211977481842, -0.011520500294864178, -0.026617728173732758, 0.03977501764893532, 0.12005657702684402, -0.03498945012688637, -0.07710424065589905 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0136 - Accuracy: 0.9938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0694 | 1.0 | 56 | 0.0158 | 0.995 | | 0.0495 | 1.99 | 112 | 0.0207 | 0.9925 | | 0.0402 | 2.99 | 168 | 0.0136 | 0.9938 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "metrics": ["accuracy"], "base_model": "microsoft/swin-tiny-patch4-window7-224", "model-index": [{"name": "swin-tiny-patch4-window7-224-finetuned-eurosat", "results": []}]}
image-classification
Artemiy27/swin-tiny-patch4-window7-224-finetuned-eurosat
[ "transformers", "tensorboard", "safetensors", "swin", "image-classification", "generated_from_trainer", "base_model:microsoft/swin-tiny-patch4-window7-224", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:02:41+00:00
[]
[]
TAGS #transformers #tensorboard #safetensors #swin #image-classification #generated_from_trainer #base_model-microsoft/swin-tiny-patch4-window7-224 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
swin-tiny-patch4-window7-224-finetuned-eurosat ============================================== This model is a fine-tuned version of microsoft/swin-tiny-patch4-window7-224 on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 0.0136 * Accuracy: 0.9938 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 5e-05 * train\_batch\_size: 32 * eval\_batch\_size: 32 * seed: 42 * gradient\_accumulation\_steps: 4 * total\_train\_batch\_size: 128 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * lr\_scheduler\_warmup\_ratio: 0.1 * num\_epochs: 3 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu118 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 32\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 128\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 3", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #tensorboard #safetensors #swin #image-classification #generated_from_trainer #base_model-microsoft/swin-tiny-patch4-window7-224 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 32\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 128\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 3", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 77, 144, 4, 33 ]
[ "passage: TAGS\n#transformers #tensorboard #safetensors #swin #image-classification #generated_from_trainer #base_model-microsoft/swin-tiny-patch4-window7-224 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 32\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 128\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.142108753323555, 0.13684308528900146, -0.0017721746116876602, 0.08609165996313095, 0.13537132740020752, 0.015442646108567715, 0.1117485836148262, 0.1371457874774933, -0.09346076101064682, 0.10155948996543884, 0.12644082307815552, 0.10468471795320511, 0.059301506727933884, 0.1564706265926361, -0.020286180078983307, -0.2907329499721527, 0.019829558208584785, 0.002619728446006775, -0.1444190889596939, 0.11058244854211807, 0.08443490415811539, -0.12224722653627396, 0.0744141936302185, 0.019152013584971428, -0.14705181121826172, -0.012307708151638508, -0.036832451820373535, -0.04907233268022537, 0.1026068776845932, 0.04491725191473961, 0.09330634027719498, 0.03892440348863602, 0.09775363653898239, -0.21441620588302612, 0.005762992426753044, 0.07254830747842789, 0.00981395598500967, 0.0822603851556778, 0.09765268862247467, 0.006390061695128679, 0.1245041936635971, -0.08841603994369507, 0.06748756766319275, 0.04400906711816788, -0.09028732776641846, -0.27743101119995117, -0.10432130843400955, 0.09550890326499939, 0.134536013007164, 0.07454100251197815, -0.015221948735415936, 0.09395764023065567, -0.07113272696733475, 0.09038938581943512, 0.24515213072299957, -0.2719154357910156, -0.08249150216579437, 0.048684291541576385, 0.045246243476867676, 0.019609695300459862, -0.12265818566083908, -0.012294407933950424, 0.04990779608488083, 0.023915864527225494, 0.10157512128353119, 0.017941100522875786, 0.04993849992752075, -0.0032513816840946674, -0.14833512902259827, -0.0465596467256546, 0.11876654624938965, 0.10462414473295212, -0.04302350804209709, -0.09605542570352554, -0.030610499903559685, -0.2233513742685318, -0.03064710646867752, -0.0015827551251277328, 0.04273907095193863, -0.06333941221237183, -0.11271431297063828, 0.021658767014741898, -0.08607985824346542, -0.07977423071861267, 0.028834639117121696, 0.13439083099365234, 0.057704780250787735, -0.004550494719296694, 0.020958784967660904, 0.13478828966617584, 0.053431201726198196, -0.14312715828418732, -0.02225382812321186, 0.013888627290725708, -0.06757865846157074, -0.022878864780068398, -0.023732172325253487, 0.008969289250671864, 0.012446636334061623, 0.17277325689792633, -0.0595560148358345, 0.06498900800943375, 0.07567469775676727, 0.03531895950436592, -0.10371533781290054, 0.16622991859912872, -0.0708678662776947, -0.05674123018980026, -0.038991302251815796, 0.1087045669555664, 0.017629649490118027, -0.010487081483006477, -0.09059803932905197, 0.03502736985683441, 0.10969316959381104, 0.034225985407829285, -0.0265779010951519, 0.042097825556993484, -0.0593608021736145, -0.02774691767990589, 0.07639983296394348, -0.07775530964136124, 0.04170171543955803, 0.011481248773634434, -0.09082844853401184, -0.018451249226927757, 0.032470349222421646, 0.00797455944120884, 0.011237087659537792, 0.15050485730171204, -0.09022468328475952, -0.01689380593597889, -0.10034637898206711, -0.08375444263219833, 0.030060013756155968, -0.07427205890417099, 0.005326437763869762, -0.09083627164363861, -0.1386558562517166, -0.04948790743947029, 0.06381472945213318, -0.0464777834713459, -0.07139500975608826, -0.0462152436375618, -0.10729215294122696, 0.04501128941774368, -0.0171307735145092, 0.1333126425743103, -0.05843557044863701, 0.10160088539123535, 0.031014423817396164, 0.06276872754096985, 0.01678410731256008, 0.04112319275736809, -0.05658872798085213, 0.061264071613550186, -0.18442358076572418, 0.04310375452041626, -0.09323512017726898, 0.08760568499565125, -0.10942113399505615, -0.11567205935716629, -0.00911786686629057, 0.00023431038425769657, 0.06773495674133301, 0.10318784415721893, -0.15338777005672455, -0.081278957426548, 0.15802736580371857, -0.0915004089474678, -0.1368112862110138, 0.10307426005601883, -0.008327998220920563, -0.025358596816658974, 0.03322826325893402, 0.12476074695587158, 0.08712643384933472, -0.09684033691883087, -0.046018943190574646, -0.021622469648718834, 0.09432925283908844, -0.018669554963707924, 0.10188506543636322, -0.019564498215913773, 0.051176656037569046, 0.01414088811725378, -0.07572613656520844, 0.04681515693664551, -0.09879950433969498, -0.07990256696939468, -0.025460580363869667, -0.08888735622167587, 0.015072563663125038, 0.06228168308734894, 0.05386761203408241, -0.1008220687508583, -0.14313121140003204, 0.04030134901404381, 0.11902514845132828, -0.08263697475194931, 0.008794493041932583, -0.06970788538455963, 0.09254097193479538, -0.03358834981918335, -0.018105562776327133, -0.14061757922172546, -0.07915689051151276, 0.021892888471484184, -0.048109427094459534, -0.014908745884895325, -0.010665997862815857, 0.06856152415275574, 0.08641883730888367, -0.05978083610534668, -0.07421229034662247, -0.07061510533094406, -0.004940766841173172, -0.0908898338675499, -0.22991223633289337, -0.09470296651124954, -0.02007591724395752, 0.16296550631523132, -0.2605118155479431, 0.03201080858707428, 0.03272641450166702, 0.14638710021972656, 0.052948880940675735, -0.04919341579079628, -0.030210597440600395, 0.04920674115419388, -0.04375908896327019, -0.08803219348192215, 0.04726635664701462, 0.005397476721554995, -0.09985093772411346, -0.029720788821578026, -0.10988936573266983, 0.1398582011461258, 0.11725998669862747, -0.010172916576266289, -0.10066671669483185, -0.04310715198516846, -0.08748825639486313, -0.05036470293998718, -0.020572824403643608, -0.006320246495306492, 0.08605725318193436, 0.016433943063020706, 0.12473226338624954, -0.07362035661935806, -0.05832654610276222, 0.03229125961661339, -0.004761251620948315, -0.018827835097908974, 0.11913590878248215, 0.11239123344421387, -0.05648148059844971, 0.14128801226615906, 0.10857261717319489, -0.06402182579040527, 0.13092392683029175, -0.05068670213222504, -0.09496565908193588, -0.013868862763047218, 0.03239981085062027, 0.03535934537649155, 0.14690174162387848, -0.10013118386268616, -0.006655453704297543, 0.016767261549830437, 0.01920754462480545, 0.02318478189408779, -0.20739911496639252, -0.016912076622247696, 0.03976460173726082, -0.047174859791994095, 0.023380683735013008, -0.02379421703517437, -0.022708268836140633, 0.08712484687566757, 0.015033742412924767, -0.05190308764576912, 0.011718712747097015, 0.002034003846347332, -0.08024808764457703, 0.20738936960697174, -0.07295769453048706, -0.1540222465991974, -0.1584727019071579, -0.004138754215091467, -0.04426312819123268, -0.0034613455645740032, 0.046260323375463486, -0.09195338934659958, -0.04677728936076164, -0.08032583445310593, 0.020357420668005943, -0.006583377718925476, 0.03651180490851402, 0.01636262983083725, 0.027408039197325706, 0.08864499628543854, -0.0988432765007019, 0.016927314922213554, -0.016129454597830772, -0.05197833105921745, 0.03372667729854584, 0.03390775993466377, 0.10921814292669296, 0.1402229517698288, 0.025705058127641678, 0.024179210886359215, -0.032773394137620926, 0.18111762404441833, -0.099313884973526, -0.010517806746065617, 0.11171719431877136, 0.005892439279705286, 0.055292610079050064, 0.1221752017736435, 0.048317696899175644, -0.09632322937250137, 0.028827767819166183, 0.06405586004257202, -0.024419572204351425, -0.19631943106651306, -0.01646794006228447, -0.03361191600561142, 0.009374664165079594, 0.1142418310046196, 0.05302010476589203, 0.01890130527317524, 0.06928567588329315, -0.012715335004031658, 0.034849800169467926, -0.021735720336437225, 0.08300548791885376, 0.028390832245349884, 0.047795575112104416, 0.13106833398342133, -0.03941755369305611, -0.01846196874976158, 0.03911922872066498, -0.006147565320134163, 0.2426961213350296, -0.02683739736676216, 0.12425535917282104, 0.06560824066400528, 0.1723344773054123, 0.0005869832821190357, 0.061318252235651016, 0.009645386599004269, -0.044444698840379715, 0.01032834593206644, -0.055703055113554, -0.010071173310279846, 0.0482250340282917, 0.012819843366742134, 0.06753825396299362, -0.13563816249370575, 0.043596863746643066, 0.0456123910844326, 0.32847580313682556, 0.08015381544828415, -0.3592989146709442, -0.10452672094106674, 0.004238010849803686, -0.04902859404683113, -0.03493840619921684, 0.012151455506682396, 0.12652280926704407, -0.10216464847326279, 0.07705792039632797, -0.08370634913444519, 0.08559180051088333, -0.04863584041595459, -0.003016076749190688, 0.1093931719660759, 0.08991527557373047, 0.0027173859998583794, 0.06724763661623001, -0.21911214292049408, 0.2676466107368469, -0.011796269565820694, 0.06880851835012436, -0.03750334680080414, 0.034520212560892105, 0.0474005788564682, 0.04495496302843094, 0.08822651207447052, -0.01347913034260273, -0.06439048796892166, -0.1941443830728531, -0.0985703095793724, 0.01065988652408123, 0.11942530423402786, -0.09552699327468872, 0.12549391388893127, -0.023964691907167435, -0.02707802876830101, 0.04513375461101532, -0.03649286553263664, -0.08588876575231552, -0.10549768805503845, 0.008110875263810158, -0.0317671000957489, 0.02899756282567978, -0.11239533871412277, -0.1252964437007904, -0.10821112990379333, 0.1774902045726776, -0.08809283375740051, -0.0298329908400774, -0.1401616632938385, 0.10697504132986069, 0.11817566305398941, -0.07691267132759094, 0.060520656406879425, -0.0010061695938929915, 0.12628436088562012, 0.03342977166175842, -0.029396556317806244, 0.10706859081983566, -0.08943392336368561, -0.24100258946418762, -0.05795600265264511, 0.1271304041147232, 0.028836173936724663, 0.04483934864401817, -0.029902471229434013, 0.020047420635819435, 0.0036741760559380054, -0.08730915188789368, 0.05359102040529251, 0.009652734734117985, 0.05243838205933571, 0.030297307297587395, -0.02231936901807785, 0.020991584286093712, -0.04521907866001129, -0.04879356175661087, 0.10354410856962204, 0.3073889911174774, -0.10395745187997818, -0.0073776026256382465, 0.014780224300920963, -0.039373546838760376, -0.16928209364414215, 0.04887448996305466, 0.1225360780954361, 0.022296875715255737, 0.028458788990974426, -0.16815631091594696, 0.10346341133117676, 0.12122716009616852, -0.03579854592680931, 0.15183617174625397, -0.2786029279232025, -0.13801144063472748, 0.08188187330961227, 0.1382482349872589, -0.03801514953374863, -0.17989638447761536, -0.06523643434047699, -0.010757271200418472, -0.13037936389446259, 0.08191753923892975, -0.024915354326367378, 0.11082760989665985, -0.014157026074826717, 0.004873081110417843, 0.006932512391358614, -0.061214979737997055, 0.16156142950057983, -0.010970162227749825, 0.08680970966815948, -0.03586709499359131, 0.020174002274870872, 0.016866251826286316, -0.0839988961815834, 0.016107551753520966, -0.08884072303771973, 0.031210290268063545, -0.09983893483877182, -0.022009707987308502, -0.08284760266542435, 0.04138823598623276, -0.0582646019756794, -0.03408527374267578, -0.04317580536007881, 0.05398757383227348, 0.03166240453720093, -0.0012376962695270777, 0.15936042368412018, 0.0002497345267329365, 0.1469387263059616, 0.1046115830540657, 0.04515349864959717, -0.03635425493121147, -0.10332658886909485, -0.033035293221473694, -0.017300015315413475, 0.05399352312088013, -0.13850831985473633, 0.014898828230798244, 0.1406271904706955, 0.03219612315297127, 0.14321795105934143, 0.06181904673576355, -0.0627182126045227, 0.013324507512152195, 0.09926800429821014, -0.09973344951868057, -0.09862872958183289, -0.01737768016755581, -0.006022187415510416, -0.1365719735622406, 0.061461836099624634, 0.0939912274479866, -0.06149795278906822, 0.002228945028036833, 0.0033069963101297617, 0.023955831304192543, -0.02756534330546856, 0.19918130338191986, 0.06127419322729111, 0.08422619104385376, -0.08559281378984451, 0.09423352777957916, 0.028236432000994682, -0.13145962357521057, 0.004766366444528103, 0.07434501498937607, -0.0701247900724411, -0.015099738724529743, 0.032173749059438705, 0.09237074851989746, -0.03279140591621399, -0.0632251426577568, -0.1455378383398056, -0.12230440229177475, 0.07594893127679825, 0.09763048589229584, 0.07435218244791031, 0.022556675598025322, -0.0008955951198004186, 0.05219699442386627, -0.10441222041845322, 0.10777166485786438, 0.07260981947183609, 0.10469920188188553, -0.19879218935966492, 0.12970319390296936, 0.02912967838346958, 0.014529419131577015, -0.00371048622764647, 0.031469959765672684, -0.1177612841129303, -0.008313584141433239, -0.10318111628293991, -0.023342648521065712, -0.06662328541278839, -0.005495721474289894, 0.009962731041014194, -0.0423119030892849, -0.05913878232240677, 0.02163059450685978, -0.11574289947748184, -0.061108533293008804, 0.014922317117452621, 0.05712481960654259, -0.1041756421327591, -0.012107450515031815, 0.02999127097427845, -0.11465331166982651, 0.08421500772237778, 0.014972670935094357, 0.05031929537653923, 0.027631588280200958, -0.0929485335946083, 0.03227577731013298, 0.05730896070599556, -0.01648581400513649, 0.04538307338953018, -0.11464536190032959, -0.01178854051977396, -0.03590529039502144, 0.031019987538456917, -0.00432550348341465, 0.036615170538425446, -0.14287732541561127, -0.022342020645737648, -0.041221123188734055, -0.0547616221010685, -0.0558551624417305, 0.051076069474220276, 0.07122842222452164, 0.008934942074120045, 0.187181293964386, -0.07672353088855743, 0.019647665321826935, -0.22816850244998932, -0.0023213496897369623, -0.015346813015639782, -0.06660059094429016, -0.06970872730016708, -0.012073936872184277, 0.0722886472940445, -0.06154827028512955, 0.091408871114254, -0.044532544910907745, 0.04915495589375496, 0.03245088458061218, -0.07331129908561707, 0.0511748231947422, 0.04811864718794823, 0.17653684318065643, 0.023502422496676445, -0.005821924656629562, 0.055508267134428024, 0.030401382595300674, 0.07899224013090134, 0.04036668688058853, 0.19206760823726654, 0.13447584211826324, -0.04807964712381363, 0.12521056830883026, 0.056341446936130524, -0.10803958773612976, -0.1722346842288971, 0.08072025328874588, -0.05269575119018555, 0.12954701483249664, -0.01824936643242836, 0.16015495359897614, 0.12402281910181046, -0.20112308859825134, 0.015950122848153114, -0.030512528494000435, -0.07782671600580215, -0.09108449518680573, -0.06981247663497925, -0.07676440477371216, -0.187748983502388, 0.022576680406928062, -0.10192059725522995, 0.0034494129940867424, 0.1030164435505867, 0.02833383157849312, 0.017911551520228386, 0.15796521306037903, 0.01817493326961994, 0.030208351090550423, 0.07085248827934265, 0.023581715300679207, -0.03333115577697754, -0.04851057007908821, -0.08797997981309891, 0.008154249750077724, -0.020430875942111015, 0.049222707748413086, -0.07106050848960876, -0.08830761909484863, 0.07175970077514648, 0.04886763542890549, -0.09396222233772278, 0.024560654535889626, 0.0011988120386376977, 0.05004674941301346, 0.06260272860527039, 0.00452206376940012, 0.01949910819530487, -0.032734692096710205, 0.2242208868265152, -0.10504477471113205, -0.024587856605648994, -0.1352684199810028, 0.211861252784729, 0.007615834008902311, -0.014360508881509304, 0.02713337354362011, -0.09980112314224243, -0.0025243833661079407, 0.16182570159435272, 0.15612632036209106, -0.033329274505376816, -0.026297997683286667, 0.020708097144961357, -0.01723862625658512, -0.05560535937547684, 0.09267064183950424, 0.11433325707912445, 0.07919614762067795, -0.06840848177671432, -0.05626333877444267, -0.0373346246778965, -0.04821944609284401, -0.017570268362760544, 0.05393710359930992, 0.029461584985256195, 0.008792621083557606, -0.042974770069122314, 0.08661022782325745, -0.033174753189086914, -0.12566204369068146, 0.10747767984867096, -0.17819814383983612, -0.19027049839496613, -0.02661769464612007, 0.08005619794130325, 0.0005066932644695044, 0.06596958637237549, -0.002928623463958502, -0.03230467811226845, 0.09564641118049622, -0.009353388100862503, -0.06527457386255264, -0.11426998674869537, 0.06611964106559753, -0.0694882869720459, 0.2467334121465683, -0.05364130809903145, 0.012855751439929008, 0.1340671181678772, 0.042944371700286865, -0.1028362363576889, 0.024212868884205818, 0.06401029974222183, -0.1042792797088623, 0.041472580283880234, 0.14177273213863373, -0.035108260810375214, 0.12838411331176758, 0.03685615211725235, -0.1418662816286087, 0.009125638753175735, -0.061460837721824646, -0.053216300904750824, -0.07367372512817383, -0.002755098044872284, -0.054925624281167984, 0.13262932002544403, 0.2259153574705124, -0.05133938044309616, -0.01856672391295433, -0.054561492055654526, 0.03857790678739548, 0.047399356961250305, 0.07543516159057617, -0.017340390011668205, -0.2596227824687958, 0.04589718580245972, -0.004451770335435867, -0.0051298378966748714, -0.2456425130367279, -0.09020856022834778, 0.027858726680278778, -0.06317060440778732, -0.07412051409482956, 0.09924187511205673, 0.07951610535383224, 0.06099012494087219, -0.06459913402795792, -0.05694381147623062, -0.07358408719301224, 0.1696784347295761, -0.1717754602432251, -0.08402284979820251 ]
null
null
transformers
All Rights Reserved
{}
text-generation
f0rGoTTen000/AgroGPT_125M
[ "transformers", "pytorch", "gpt_neo", "text-generation", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:11:20+00:00
[]
[]
TAGS #transformers #pytorch #gpt_neo #text-generation #autotrain_compatible #endpoints_compatible #region-us
All Rights Reserved
[]
[ "TAGS\n#transformers #pytorch #gpt_neo #text-generation #autotrain_compatible #endpoints_compatible #region-us \n" ]
[ 39 ]
[ "passage: TAGS\n#transformers #pytorch #gpt_neo #text-generation #autotrain_compatible #endpoints_compatible #region-us \n" ]
[ -0.031129082664847374, 0.010089954361319542, -0.005786326713860035, 0.002382909180596471, 0.17449840903282166, 0.03556443750858307, 0.05251007154583931, 0.13062667846679688, -0.03914913907647133, -0.02130643092095852, 0.14617420732975006, 0.1955074667930603, -0.02011914923787117, 0.14744721353054047, -0.035015322268009186, -0.3012809455394745, 0.0668836161494255, 0.0863509401679039, 0.0026083714328706264, 0.12159165740013123, 0.07029002159833908, -0.051336709409952164, 0.08695213496685028, -0.020881932228803635, -0.192106232047081, 0.02526378072798252, 0.02548271045088768, -0.13042253255844116, 0.10609611123800278, 0.06094646453857422, 0.10886907577514648, 0.007111807353794575, -0.08177061378955841, -0.09621547162532806, 0.030766339972615242, 0.018171828240156174, -0.06898625195026398, 0.028535010293126106, 0.09077950567007065, -0.08035074174404144, 0.12096447497606277, 0.0948849618434906, -0.013570641167461872, 0.05053356662392616, -0.1480543166399002, -0.12057223170995712, -0.035545725375413895, 0.013391988351941109, 0.04688593000173569, 0.10570015758275986, -0.01637245900928974, 0.1550234854221344, -0.10759449750185013, 0.1093287467956543, 0.11622975766658783, -0.306506872177124, -0.018959544599056244, 0.07280903309583664, 0.04868428036570549, 0.0499807745218277, 0.00401983130723238, 0.05944172292947769, 0.02354683354496956, 0.027370715513825417, -0.024261586368083954, -0.07338255643844604, -0.10565178841352463, 0.03728616610169411, -0.1027790978550911, -0.03968832269310951, 0.2253064662218094, -0.07986065745353699, 0.07575152069330215, -0.017073331400752068, -0.09419377893209457, -0.04402818903326988, -0.03407347574830055, -0.007864759303629398, -0.08271715044975281, 0.08080948144197464, 0.02461075782775879, -0.08078321069478989, -0.12437527626752853, -0.013455464504659176, -0.21004018187522888, 0.20692363381385803, 0.03766799718141556, 0.04828088358044624, -0.16247053444385529, 0.09595648944377899, -0.016827883198857307, -0.07692684233188629, 0.02813238836824894, -0.1045098751783371, 0.06244322285056114, -0.007837570272386074, -0.07625097781419754, -0.04524878412485123, 0.08287202566862106, 0.11080460250377655, 0.06776498258113861, 0.01699594222009182, -0.018918734043836594, 0.08632151037454605, -0.005265329964458942, 0.08605947345495224, 0.003338998882099986, -0.05905919894576073, 0.06772942841053009, -0.14486974477767944, 0.009365036152303219, -0.06962982565164566, -0.1542762815952301, -0.0463273711502552, 0.04240013659000397, 0.07755520939826965, 0.03401273488998413, 0.08908217400312424, -0.02579878643155098, -0.02021842449903488, 0.08939865231513977, -0.06620746850967407, -0.008752282708883286, -0.04591304808855057, 0.01353310327976942, 0.12984871864318848, -0.011057836003601551, -0.007188257295638323, -0.11584924906492233, 0.08444838225841522, -0.06890571117401123, -0.02308056876063347, -0.051757074892520905, -0.038186486810445786, 0.014841767027974129, -0.08966115117073059, 0.02063010446727276, -0.1768714040517807, -0.19415350258350372, 0.010771522298455238, 0.0029499572701752186, -0.014174412935972214, -0.058061715215444565, -0.025006454437971115, -0.013664478436112404, 0.036239851266145706, -0.07966022938489914, -0.02394472435116768, -0.0678841769695282, 0.10565657913684845, 0.003322645090520382, 0.0745408907532692, -0.14525361359119415, 0.08436290919780731, -0.11614706367254257, -0.05054355040192604, -0.1184040755033493, 0.05523751676082611, -0.02555534988641739, 0.13686202466487885, 0.009256524033844471, -0.03479095175862312, -0.08934856951236725, 0.06801962107419968, -0.05364271625876427, 0.15775740146636963, -0.061896584928035736, -0.14089645445346832, 0.2818533480167389, -0.09847146272659302, -0.13268627226352692, 0.10493571311235428, 0.027656899765133858, 0.012798811309039593, 0.12022672593593597, 0.16587285697460175, 0.085322305560112, 0.02018827758729458, 0.06841620802879333, 0.0738065093755722, -0.13210614025592804, -0.13035961985588074, -0.0025289386976510286, -0.01515515148639679, -0.15185584127902985, 0.07353635132312775, 0.03968385234475136, 0.11489952355623245, -0.04668506234884262, -0.022608604282140732, -0.036566827446222305, -0.015254344791173935, 0.10166749358177185, 0.03607023507356644, 0.12372622638940811, -0.08723437041044235, -0.042704708874225616, -0.04395192861557007, -0.0023180926218628883, 0.013728264719247818, 0.035767048597335815, -0.04259014502167702, 0.13821043074131012, -0.016536327078938484, 0.06375143676996231, -0.17739364504814148, -0.12384787201881409, -0.00739120552316308, 0.11473635584115982, 0.027013573795557022, 0.11876898258924484, 0.048658858984708786, -0.025054646655917168, -0.02290394902229309, -0.0014903926057741046, 0.15048716962337494, -0.014796219766139984, -0.026444941759109497, -0.033720195293426514, 0.100754015147686, -0.05490269139409065, -0.005193965509533882, -0.0354442335665226, 0.00586405536159873, 0.057986557483673096, 0.1109469085931778, -0.018384983763098717, 0.04644210264086723, -0.04118485748767853, 0.05071396008133888, -0.07351252436637878, 0.010727220214903355, 0.09251506626605988, 0.01356500294059515, -0.060675520449876785, 0.19014489650726318, -0.1735045462846756, 0.269416481256485, 0.22719405591487885, -0.26520389318466187, -0.016877876594662666, -0.014247525483369827, -0.006955756805837154, 0.013052054680883884, 0.054400477558374405, -0.0006826766184531152, 0.10323262959718704, -0.016923559829592705, 0.18621520698070526, -0.040173131972551346, -0.034652695059776306, -0.0044190590269863605, -0.04835738241672516, -0.012065422721207142, 0.055628228932619095, 0.10040371865034103, -0.1495267003774643, 0.20926684141159058, 0.16302648186683655, -0.002117529744282365, 0.1995295137166977, 0.027632474899291992, 0.002313166856765747, 0.08617078512907028, -0.01971135474741459, -0.02039228193461895, -0.06690041720867157, -0.16959458589553833, -0.028710581362247467, 0.08211936801671982, 0.029492998495697975, 0.11092962324619293, -0.12686915695667267, -0.05075617879629135, -0.01872740313410759, -0.0040296330116689205, 0.04924027994275093, 0.11936520785093307, 0.046829722821712494, 0.1019718125462532, -0.009881514124572277, -0.0031859742011874914, 0.10591757297515869, 0.039506807923316956, -0.06070554628968239, 0.17075152695178986, -0.15568973124027252, -0.3752075135707855, -0.16381482779979706, -0.17378787696361542, -0.0558621920645237, 0.05926607549190521, 0.11840647459030151, -0.13333021104335785, -0.03998136892914772, 0.04291021078824997, 0.0632622018456459, -0.06947090476751328, 0.032827228307724, -0.07648226618766785, 0.03527563810348511, -0.09670843183994293, -0.06658577173948288, -0.060766030102968216, -0.030608315020799637, -0.07032708823680878, 0.14811205863952637, -0.11825244128704071, 0.08137024194002151, 0.1700449436903, 0.02830468863248825, 0.06969326734542847, -0.03205663710832596, 0.17220844328403473, -0.11065097153186798, -0.016356434673070908, 0.22273781895637512, -0.005430166143923998, 0.07640647143125534, 0.10786274820566177, -0.0039592343382537365, -0.08588084578514099, 0.0345965214073658, -0.04569869861006737, -0.1069793775677681, -0.189424067735672, -0.16665416955947876, -0.13557173311710358, 0.05022421479225159, 0.03792794048786163, 0.06975994259119034, 0.1874701976776123, 0.08890176564455032, -0.028456516563892365, 0.025313949212431908, -0.024169955402612686, 0.08544623851776123, 0.26393330097198486, -0.018320947885513306, 0.15629401803016663, -0.0659184530377388, -0.14257510006427765, 0.1265127956867218, 0.03594663739204407, 0.14925815165042877, 0.0584728866815567, 0.017254691570997238, 0.003530807327479124, 0.09560886770486832, 0.16665785014629364, 0.10127329081296921, 0.03609233349561691, -0.031726475805044174, -0.0124103594571352, -0.0033739707432687283, -0.04486117139458656, 0.03923090547323227, 0.05444679036736488, -0.1715770810842514, -0.0266302190721035, -0.13696376979351044, 0.08060925453901291, 0.04047684744000435, 0.029830513522028923, -0.2580944299697876, 0.015362191013991833, 0.05844176188111305, -0.010978426784276962, -0.12240520864725113, 0.06721215695142746, -0.06021101400256157, -0.14304588735103607, 0.10183826833963394, -0.0416141152381897, 0.10529831051826477, -0.03550513833761215, 0.07203033566474915, -0.024919595569372177, -0.05787020921707153, 0.030922019854187965, 0.11547231674194336, -0.26666778326034546, 0.2089860588312149, -0.013612428680062294, -0.0414728969335556, -0.09180762618780136, 0.004546645563095808, 0.030303779989480972, 0.16590476036071777, 0.0851602628827095, -0.0024248824920505285, -0.06904315203428268, -0.09997926652431488, -0.015113791450858116, 0.02812795341014862, 0.12344718724489212, -0.028040308505296707, -0.018480949103832245, -0.04405156150460243, -0.020888831466436386, -0.0354076623916626, -0.047806646674871445, 0.03180430456995964, -0.1878862977027893, 0.09522426873445511, 0.054683469235897064, 0.07625807076692581, 0.017873404547572136, -0.036659106612205505, -0.09845006465911865, 0.2327526956796646, -0.034977201372385025, -0.09655734151601791, -0.11356431245803833, -0.06649398058652878, 0.06557216495275497, -0.0886421725153923, 0.0773204118013382, -0.09016019850969315, 0.011031057685613632, -0.046682436019182205, -0.21029099822044373, 0.117007777094841, -0.10280606895685196, -0.02321510761976242, -0.03803114965558052, 0.15507180988788605, -0.07687166333198547, -0.0012123265769332647, 0.013466388918459415, 0.025087732821702957, -0.13331559300422668, -0.09960509091615677, -0.00744849257171154, 0.0041800434701144695, 0.05463219806551933, 0.021470757201313972, -0.07020214945077896, -0.009173196740448475, -0.02233494631946087, -0.04275517538189888, 0.2961091697216034, 0.14484967291355133, -0.05178162083029747, 0.15182289481163025, 0.11294306069612503, -0.06398403644561768, -0.32992491126060486, -0.09386253356933594, -0.10609658062458038, -0.0459163673222065, -0.0836896151304245, -0.1884639859199524, 0.1072632297873497, 0.04390168562531471, -0.01844533160328865, 0.1559537798166275, -0.18532617390155792, -0.086736299097538, 0.17083775997161865, -0.023562896996736526, 0.3908919394016266, -0.12736301124095917, -0.09369111806154251, -0.058435916900634766, -0.10556250065565109, 0.12947210669517517, 0.05026144161820412, 0.10463044792413712, -0.03256060183048248, 0.1045200452208519, 0.03485913574695587, -0.04669565334916115, 0.08325749635696411, -0.03270239755511284, -0.022598134353756905, -0.12308771908283234, 0.0034993872977793217, 0.04423809424042702, 0.013685637153685093, 0.0220852792263031, 0.002386674052104354, 0.012299573980271816, -0.09862980991601944, -0.06044474244117737, -0.10862745344638824, 0.06295667588710785, 0.06456346809864044, -0.05966703221201897, -0.012333273887634277, -0.03274334594607353, -0.027769388630986214, -0.0038542263209819794, 0.15354019403457642, -0.04438522085547447, 0.18880651891231537, 0.00796740036457777, 0.06542114168405533, -0.1858902871608734, 0.0074711693450808525, -0.06326393783092499, -0.06561136990785599, 0.07661034166812897, -0.04913490265607834, 0.03994038328528404, 0.1141594722867012, -0.0653548538684845, 0.03230259567499161, 0.1112024188041687, 0.013293834403157234, -0.014053384773433208, 0.15813803672790527, -0.25436875224113464, 0.016909228637814522, -0.08940565586090088, -0.021643739193677902, 0.1153331771492958, 0.07422289997339249, 0.13618126511573792, 0.02551603503525257, -0.055229831486940384, -0.008881678804755211, -0.01802634634077549, -0.029812857508659363, 0.0599924698472023, 0.02903338335454464, 0.007729559671133757, -0.15673024952411652, 0.036573588848114014, 0.023351021111011505, -0.16398018598556519, -0.003059283597394824, 0.15871115028858185, -0.15232044458389282, -0.1256476193666458, -0.006498214788734913, 0.08335095643997192, -0.17237624526023865, -0.041759997606277466, -0.046677522361278534, -0.11503859609365463, 0.09273003786802292, 0.15395088493824005, 0.09272746741771698, 0.07362539321184158, -0.03879985585808754, -0.001011897693388164, -0.04078572988510132, -0.0342881940305233, 0.0038001174107193947, 0.059726864099502563, -0.09658229351043701, 0.060932327061891556, -0.013766593299806118, 0.13911305367946625, -0.09779860079288483, -0.06290259212255478, -0.13887611031532288, 0.034254468977451324, -0.05433883145451546, -0.09179159998893738, -0.10247929394245148, -0.05668949708342552, -0.0010147683788090944, -0.037240855395793915, -0.049630653113126755, -0.0481664203107357, -0.12249751389026642, 0.011968618258833885, -0.03820304945111275, 0.000412924331612885, -0.06759525835514069, 0.0003884316829498857, 0.10107669979333878, -0.01229989156126976, 0.10866524279117584, 0.1292319893836975, -0.06823951005935669, 0.13376788794994354, -0.07051028311252594, -0.1058233380317688, 0.11321138590574265, 0.04307303950190544, 0.056289661675691605, 0.10505025088787079, 0.0393630713224411, 0.08146274089813232, 0.03567902371287346, 0.06723611801862717, 0.02368045598268509, -0.1279667764902115, 0.048201583325862885, -0.03613842651247978, -0.14919014275074005, -0.04065664857625961, -0.03764449059963226, 0.07700496166944504, 0.0269741453230381, 0.11613918095827103, -0.053693871945142746, 0.08729376643896103, -0.031306520104408264, 0.013176069594919682, -0.016736159101128578, -0.21699488162994385, -0.0029739029705524445, -0.059742506593465805, 0.02577371336519718, 0.03793112188577652, 0.2711925208568573, 0.023542040959000587, 0.03225276246666908, 0.028367560356855392, 0.09628448635339737, 0.039575159549713135, -0.007659249007701874, 0.21082724630832672, 0.13333238661289215, -0.05104073882102966, -0.08348605781793594, 0.0970832109451294, 0.022329000756144524, -0.04496956616640091, 0.128034308552742, -0.005076235625892878, 0.006931707728654146, 0.08202848583459854, -0.03333063796162605, 0.015767723321914673, -0.13639573752880096, -0.18092264235019684, -0.04292859882116318, 0.07011877745389938, 0.01279852818697691, 0.08770560473203659, 0.13724498450756073, -0.014948742464184761, 0.04563716799020767, -0.012143542990088463, -0.044513847678899765, -0.1787245124578476, -0.1527620404958725, -0.09623204916715622, -0.16940303146839142, 0.01569877192378044, -0.08730725198984146, 0.02859184332191944, 0.05892246216535568, 0.05718833953142166, -0.0658145397901535, 0.13411875069141388, 0.02754482626914978, -0.10496889054775238, 0.055827364325523376, -0.05057678744196892, 0.04104554280638695, 0.00791406724601984, -0.024804087355732918, -0.14429858326911926, 0.0021959797013550997, -0.014849083498120308, 0.044599104672670364, -0.09073369204998016, 0.03926699236035347, -0.1457255631685257, -0.07800900191068649, -0.0548502579331398, 0.060635995119810104, -0.0316510945558548, 0.08622502535581589, -0.008110527880489826, -0.02730400674045086, 0.04850656911730766, 0.19766031205654144, -0.054137762635946274, -0.0742267444729805, -0.026472067460417747, 0.1661439687013626, 0.07111826539039612, 0.10105226188898087, -0.022376256063580513, 0.023907259106636047, -0.08951849490404129, 0.36407849192619324, 0.2948777973651886, -0.04334401711821556, 0.019950615242123604, 0.04496606066823006, 0.03687956929206848, 0.1122455894947052, 0.15845566987991333, 0.10974565893411636, 0.2776230573654175, -0.06522352993488312, -0.06664638966321945, -0.04289455711841583, -0.002704246900975704, -0.13300712406635284, 0.054363127797842026, 0.05595457926392555, -0.0700044259428978, -0.03975712135434151, 0.10663200914859772, -0.22094812989234924, 0.23190239071846008, -0.06781763583421707, -0.18604502081871033, -0.07858342677354813, 0.008123397827148438, 0.13923221826553345, -0.020355211570858955, 0.08033937960863113, 0.006466401740908623, -0.0903434008359909, 0.027562886476516724, 0.02899870276451111, -0.24679312109947205, -0.056015681475400925, 0.0687813013792038, -0.009281081147491932, 0.021994875743985176, -0.027821291238069534, 0.017353994771838188, 0.06502227485179901, 0.047715190798044205, -0.031931716948747635, 0.06152510643005371, -0.011462797410786152, -0.07180262356996536, 0.041733138263225555, 0.02588403970003128, 0.00871620699763298, -0.12307285517454147, 0.055850423872470856, -0.11888336390256882, 0.051367852836847305, -0.03217913210391998, -0.01615728810429573, 0.0029854844324290752, 0.03753616660833359, -0.0902276486158371, 0.0816858783364296, 0.07655339688062668, 0.007215214427560568, -0.013926667161285877, -0.07056357711553574, -0.0004528927383944392, 0.008693980984389782, -0.01938118040561676, -0.1074824258685112, -0.12153991311788559, -0.13491666316986084, 0.10788989812135696, -0.0023197466507554054, -0.16446331143379211, -0.003280984703451395, -0.11429202556610107, 0.0561283677816391, -0.1808566302061081, 0.09133588522672653, 0.05677841603755951, -0.0020508356392383575, 0.001873051398433745, -0.035686831921339035, 0.03481144830584526, 0.05560597777366638, -0.14782238006591797, -0.09609004855155945 ]
null
null
diffusers
### My-Pet-rabbit Dreambooth model trained by MANASA-1919 following the "Build your own Gen AI model" session by NxtWave. Project Submission Code: PIETW-212 Sample pictures of this concept: ![0](https://huggingface.co/MANASA-1919/my-pet-rabbit/resolve/main/sample_images/enq-1998-pxu2yxCTbTU-unsplash_(1).jpg)
{"license": "creativeml-openrail-m", "tags": ["NxtWave-GenAI-Webinar", "text-to-image", "stable-diffusion"]}
text-to-image
MANASA-1919/my-pet-rabbit
[ "diffusers", "safetensors", "NxtWave-GenAI-Webinar", "text-to-image", "stable-diffusion", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
2023-11-12T17:23:49+00:00
[]
[]
TAGS #diffusers #safetensors #NxtWave-GenAI-Webinar #text-to-image #stable-diffusion #license-creativeml-openrail-m #endpoints_compatible #has_space #diffusers-StableDiffusionPipeline #region-us
### My-Pet-rabbit Dreambooth model trained by MANASA-1919 following the "Build your own Gen AI model" session by NxtWave. Project Submission Code: PIETW-212 Sample pictures of this concept: !0.jpg)
[ "### My-Pet-rabbit Dreambooth model trained by MANASA-1919 following the \"Build your own Gen AI model\" session by NxtWave.\n\nProject Submission Code: PIETW-212\n\nSample pictures of this concept:\n\n !0.jpg)" ]
[ "TAGS\n#diffusers #safetensors #NxtWave-GenAI-Webinar #text-to-image #stable-diffusion #license-creativeml-openrail-m #endpoints_compatible #has_space #diffusers-StableDiffusionPipeline #region-us \n", "### My-Pet-rabbit Dreambooth model trained by MANASA-1919 following the \"Build your own Gen AI model\" session by NxtWave.\n\nProject Submission Code: PIETW-212\n\nSample pictures of this concept:\n\n !0.jpg)" ]
[ 77, 59 ]
[ "passage: TAGS\n#diffusers #safetensors #NxtWave-GenAI-Webinar #text-to-image #stable-diffusion #license-creativeml-openrail-m #endpoints_compatible #has_space #diffusers-StableDiffusionPipeline #region-us \n### My-Pet-rabbit Dreambooth model trained by MANASA-1919 following the \"Build your own Gen AI model\" session by NxtWave.\n\nProject Submission Code: PIETW-212\n\nSample pictures of this concept:\n\n !0.jpg)" ]
[ -0.09466423839330673, 0.13253657519817352, -0.0013108402490615845, 0.004656786564737558, 0.06036553904414177, -0.014421354047954082, 0.14546090364456177, 0.009823665954172611, -0.05602928251028061, 0.03862942010164261, 0.1504296213388443, 0.036816202104091644, 0.021976718679070473, 0.1762906014919281, 0.0007476287428289652, -0.12488988041877747, 0.042667586356401443, 0.06638659536838531, -0.011314974166452885, 0.08000347018241882, 0.059822626411914825, -0.06741256266832352, 0.1253436654806137, -0.008451446890830994, -0.1592765599489212, -0.007213372737169266, -0.03999638557434082, -0.03663548082113266, 0.08423853665590286, 0.025349488481879234, 0.08407993614673615, 0.0909164622426033, 0.030442969873547554, -0.034637048840522766, 0.042459189891815186, 0.04342355206608772, -0.04949452728033066, 0.06961862742900848, 0.030251234769821167, 0.08014936000108719, 0.11355660855770111, 0.029052549973130226, -0.058441393077373505, 0.036899540573358536, -0.08200672268867493, -0.04198680445551872, 0.011014393530786037, 0.09862709045410156, 0.11059897392988205, 0.07575060427188873, 0.0007354666013270617, 0.1401897519826889, 0.03614867106080055, 0.10792702436447144, 0.15792931616306305, -0.2700788676738739, -0.10106765478849411, 0.15293407440185547, 0.07914109528064728, 0.06083850935101509, -0.04328315705060959, 0.09277506172657013, 0.09524785727262497, 0.007372935302555561, 0.03572868928313255, -0.05793999135494232, 0.07419177144765854, -0.07756076753139496, -0.13288173079490662, 0.006657904479652643, 0.23124760389328003, 0.05253100395202637, -0.03058849833905697, -0.04967706277966499, -0.11473691463470459, -0.010793319903314114, -0.054877132177352905, -0.01711900532245636, -0.041143886744976044, 0.024497469887137413, -0.03189430013298988, -0.06415905803442001, -0.13262848556041718, -0.045859117060899734, -0.037069689482450485, 0.14369569718837738, 0.005144281778484583, 0.07209695130586624, -0.11773291975259781, 0.0758337453007698, 0.02230880595743656, -0.1081271544098854, 0.03944646939635277, -0.09984259307384491, 0.039727985858917236, 0.059473179280757904, 0.00994765106588602, -0.0927024632692337, 0.09050320088863373, -0.01312047615647316, -0.019485320895910263, -0.02443641424179077, 0.044334255158901215, 0.08914721012115479, 0.024322452023625374, -0.04208986088633537, -0.12736834585666656, -0.09574075043201447, 0.002253802726045251, -0.05146939307451248, 0.010356838814914227, -0.01744917407631874, -0.07966860383749008, 0.01347348652780056, -0.006093785632401705, 0.01214023120701313, 0.05521615222096443, 0.06543438881635666, -0.013743565417826176, -0.025972507894039154, 0.2015163153409958, 0.047484345734119415, -0.02854713425040245, -0.019798435270786285, 0.042666833847761154, 0.02397584170103073, 0.047954220324754715, -0.00897952914237976, 0.008710670284926891, -0.01807098649442196, -0.10301975160837173, -0.06910346448421478, -0.031371984630823135, -0.056363970041275024, -0.006813984364271164, -0.13090309500694275, 0.030514445155858994, -0.2021600306034088, -0.05335279181599617, 0.04455162584781647, 0.05213823914527893, -0.022630497813224792, -0.045267730951309204, -0.014154806733131409, -0.08562158048152924, 0.012719906866550446, -0.0037763910368084908, 0.005176975857466459, -0.019608421251177788, 0.04769851639866829, -0.02587207965552807, 0.10112100094556808, -0.21169684827327728, 0.00448946189135313, -0.06503236293792725, 0.037967387586832047, -0.00921196024864912, -0.01404312252998352, -0.017421847209334373, 0.0921991840004921, -0.03706102818250656, -0.017510835081338882, -0.041570812463760376, 0.0007792208925820887, 0.030482284724712372, 0.1521623134613037, -0.09828316420316696, 0.01673927903175354, 0.14854171872138977, -0.1131247952580452, -0.16220425069332123, 0.11136176437139511, 0.05029044300317764, 0.09643924236297607, 0.04286513477563858, 0.1562124788761139, 0.10162009298801422, -0.1828565150499344, -0.00434239162132144, 0.042035847902297974, -0.12155276536941528, -0.17617174983024597, 0.0067776101641356945, 0.1291261911392212, -0.10382173955440521, -0.0010333637474104762, -0.06837310642004013, 0.09948878735303879, -0.08107726275920868, -0.032071251422166824, -0.02639918588101864, -0.12554487586021423, -0.003434147220104933, 0.009604623541235924, 0.043020956218242645, -0.025589650496840477, 0.00199054810218513, -0.13286054134368896, 0.03444785252213478, -0.02524172142148018, 0.001650791964493692, -0.08434164524078369, 0.07966894656419754, -0.05114830285310745, 0.00452386774122715, -0.039397940039634705, -0.06849762797355652, 0.023452047258615494, 0.114894799888134, -0.00022395593987312168, 0.17324788868427277, 0.054522767663002014, 0.07159362733364105, -0.007933790795505047, -0.078816257417202, 0.09784159809350967, 0.038022324442863464, -0.054466407746076584, -0.13564033806324005, 0.07670588046312332, -0.06944353878498077, -0.049007873982191086, -0.11754407733678818, 0.03710046410560608, 0.02087974175810814, 0.12619836628437042, 0.0484146885573864, 0.0008821970550343394, 0.03833552449941635, 0.00001641245762584731, -0.05751604959368706, -0.0014465744607150555, 0.07350841164588928, 0.04135282337665558, -0.07503984123468399, 0.1716003566980362, -0.12898693978786469, 0.1823354810476303, 0.09414562582969666, -0.047470495104789734, -0.04099539294838905, 0.04304976016283035, -0.07086098939180374, 0.019320180639624596, 0.0036435676738619804, -0.05131003260612488, -0.0212169848382473, -0.043858423829078674, 0.11364981532096863, -0.06533645838499069, -0.013049288652837276, 0.061883918941020966, -0.05095428600907326, -0.05853341147303581, 0.07926690578460693, 0.03525687754154205, -0.185389444231987, 0.14226031303405762, 0.17186933755874634, 0.009540433064103127, 0.19327020645141602, 0.023273484781384468, 0.01633286662399769, -0.053615376353263855, 0.06474477797746658, 0.008657866157591343, 0.2073318511247635, -0.06916619837284088, 0.02736116386950016, 0.02339363470673561, -0.004388085566461086, 0.04498882219195366, -0.1255827248096466, -0.06865640729665756, -0.003925779834389687, -0.005434328690171242, 0.09790946543216705, 0.08147997409105301, -0.10687337815761566, 0.09822617471218109, -0.06747197359800339, -0.1145486831665039, 0.01891731470823288, 0.004131932742893696, -0.05946431681513786, 0.08768030256032944, -0.07078734040260315, -0.2445831596851349, -0.12867775559425354, -0.0666302815079689, -0.04269718378782272, -0.007186187896877527, 0.06642350554466248, -0.049340371042490005, -0.010606430470943451, -0.09426818042993546, -0.06455844640731812, -0.07038556784391403, 0.046438466757535934, 0.03659304603934288, 0.025088492780923843, 0.0062446570955216885, -0.05945444852113724, 0.022710438817739487, -0.04971110075712204, 0.0347912572324276, 0.11350367218255997, -0.005609604995697737, 0.1741696000099182, 0.07228491455316544, -0.002918575657531619, -0.022415313869714737, -0.009165361523628235, 0.29050227999687195, -0.039874449372291565, 0.11338668316602707, 0.1320650726556778, 0.06533165276050568, 0.08185601234436035, 0.17863230407238007, 0.043855540454387665, -0.07819190621376038, 0.052014611661434174, -0.08761799335479736, -0.11170028150081635, -0.08523022383451462, -0.08695632964372635, -0.07081034034490585, 0.12185359746217728, 0.015376496128737926, 0.06123873218894005, 0.07042054831981659, 0.13727091252803802, 0.008922233246266842, -0.01971113309264183, -0.05539610609412193, 0.08673042804002762, -0.005902519915252924, -0.0455150343477726, 0.05376371741294861, -0.07113675028085709, -0.061011143028736115, 0.09812001138925552, 0.061669670045375824, 0.1377507597208023, 0.036192137748003006, 0.007462394889444113, 0.08071450144052505, 0.14563649892807007, 0.13276556134223938, 0.10595181584358215, -0.04048207774758339, -0.06020835414528847, -0.02688334882259369, -0.06893619149923325, 0.07728838920593262, 0.05787159875035286, -0.029344871640205383, -0.06265594810247421, 0.07131098955869675, -0.012479478493332863, -0.015013785101473331, 0.13544093072414398, 0.12999920547008514, -0.23389282822608948, 0.0177083071321249, 0.0019807112403213978, 0.032289158552885056, -0.05604427307844162, 0.01386899221688509, 0.23052754998207092, 0.004395959433168173, 0.05828934907913208, -0.025309216231107712, 0.07913815230131149, 0.08259940147399902, 0.02123398706316948, -0.04252423718571663, 0.018434928730130196, -0.022565990686416626, 0.02410992793738842, -0.16662080585956573, 0.22211262583732605, -0.014592739753425121, 0.006875037215650082, 0.0020628185011446476, -0.04758387431502342, -0.03244011476635933, 0.18024738132953644, 0.14951170980930328, 0.03170514851808548, -0.05016627162694931, -0.061670441180467606, -0.09257159382104874, 0.022295484319329262, 0.033596232533454895, 0.013205762021243572, 0.027916450053453445, 0.05981026217341423, -0.023562990128993988, 0.006597467698156834, 0.09717091172933578, -0.16636808216571808, -0.10158810764551163, -0.02040799893438816, 0.223492830991745, 0.025392642244696617, -0.020844249054789543, 0.02141742780804634, -0.0661146268248558, 0.057159364223480225, -0.16412776708602905, -0.07174967229366302, -0.08666551858186722, -0.07829705625772476, -0.02426525577902794, -0.04323366656899452, 0.0056053572334349155, -0.08402635157108307, 0.0595550537109375, -0.055326249450445175, -0.12190017849206924, 0.04729832708835602, -0.168380469083786, -0.09718326479196548, -0.09658975899219513, 0.0461556613445282, 0.0324413925409317, -0.0038021423388272524, 0.009109657257795334, -0.029183445498347282, -0.0796467512845993, -0.09801869094371796, 0.015099418349564075, 0.019124656915664673, -0.0355694480240345, -0.047369830310344696, -0.08240322023630142, -0.1050209179520607, -0.03349815309047699, -0.03418667986989021, 0.1017589196562767, 0.23716357350349426, -0.09994825720787048, 0.06811671704053879, 0.22544778883457184, -0.024297624826431274, -0.26092472672462463, -0.1457538902759552, -0.046670883893966675, -0.029131030663847923, 0.035469312220811844, -0.09591636806726456, 0.12315227091312408, 0.0242945346981287, -0.04044459015130997, 0.12823157012462616, -0.2489706575870514, -0.056172262877225876, 0.04104908928275108, 0.13741153478622437, 0.31035810708999634, -0.14737769961357117, -0.00739972572773695, -0.004704544320702553, -0.07793576270341873, 0.16530586779117584, -0.000016306554243783467, 0.0723864808678627, -0.04085037112236023, 0.02548600733280182, -0.029078541323542595, -0.031515877693891525, 0.10508350282907486, -0.06349732726812363, 0.036967407912015915, -0.06335552036762238, 0.08297032117843628, 0.1712510585784912, -0.017412923276424408, 0.021994641050696373, -0.12879578769207, 0.039291832596063614, -0.09995710104703903, 0.013556160032749176, -0.05830860137939453, 0.005322371609508991, -0.03515585511922836, -0.10596350580453873, -0.09936463832855225, 0.02077600546181202, 0.013311715796589851, 0.016030242666602135, -0.018442997708916664, 0.00785304605960846, 0.04526362195611, 0.19683875143527985, 0.015806475654244423, -0.08598051220178604, -0.034135278314352036, -0.06485558301210403, -0.05982571467757225, 0.12432374805212021, -0.02600136026740074, -0.03204651549458504, 0.08892565965652466, -0.0028998018242418766, 0.02909138798713684, 0.037676528096199036, -0.05023027956485748, 0.035902559757232666, 0.12087401002645493, -0.15934693813323975, -0.17658311128616333, -0.02286042459309101, 0.18805423378944397, 0.049045905470848083, 0.10731566697359085, 0.14832310378551483, -0.09290722757577896, 0.02701965719461441, -0.04331360384821892, 0.017745032906532288, -0.045522432774305344, 0.05870332196354866, 0.015750180929899216, 0.052062176167964935, -0.05081517994403839, -0.0018688676645979285, -0.018136899918317795, -0.022735347971320152, -0.027656985446810722, 0.03810732439160347, -0.10500991344451904, -0.08848273754119873, 0.057979684323072433, 0.07840409129858017, -0.1141735091805458, -0.07625696063041687, -0.022762898355722427, -0.06459216773509979, 0.046273212879896164, 0.0829991027712822, 0.025252629071474075, 0.009902452118694782, 0.025045834481716156, -0.01932946778833866, -0.026241766288876534, 0.0204791147261858, -0.043975938111543655, 0.11092734336853027, -0.20891498029232025, -0.07184720784425735, -0.004805782809853554, 0.04317324236035347, -0.09221665561199188, -0.012181776575744152, -0.10851844400167465, 0.008496220223605633, 0.011336101219058037, 0.055048618465662, -0.11684299260377884, -0.06854163110256195, -0.04146783798933029, -0.029736412689089775, -0.07258512079715729, 0.01619349606335163, -0.035116396844387054, 0.04743679240345955, 0.027915844693779945, 0.00039460445987060666, -0.02752179466187954, -0.0073986477218568325, -0.004744817037135363, -0.03554914519190788, 0.06104322150349617, -0.07557187229394913, -0.11015400290489197, -0.03592560067772865, -0.1852012425661087, -0.008246151730418205, 0.06672428548336029, 0.012324300594627857, 0.005325173027813435, 0.05325547978281975, -0.012157680466771126, 0.027144858613610268, 0.025991540402173996, -0.006985170301049948, 0.07002312690019608, -0.07781801372766495, 0.012132162228226662, -0.03302657976746559, -0.009384343400597572, -0.06877953559160233, -0.05051485076546669, 0.10049216449260712, 0.03745751827955246, 0.14184266328811646, -0.06578464806079865, 0.024853060021996498, -0.05049922317266464, 0.019716810435056686, 0.07363343983888626, -0.0892493724822998, 0.056139081716537476, -0.04522941634058952, -0.013641051948070526, 0.011855696327984333, 0.12247391790151596, -0.01901737041771412, -0.21820193529129028, 0.0015933343674987555, -0.06281819194555283, -0.05439525842666626, -0.018946081399917603, 0.2645494043827057, 0.0020871686283499002, -0.0005611410015262663, -0.12056891620159149, 0.05804886296391487, 0.06573327630758286, 0.12620921432971954, 0.04197673499584198, 0.09539296478033066, -0.0006494645494967699, 0.06737952679395676, 0.03240978717803955, 0.030929794535040855, -0.09593658149242401, -0.021521346643567085, -0.13399125635623932, 0.12762713432312012, -0.04283556714653969, 0.06765427440404892, 0.17537330090999603, -0.021895119920372963, -0.011061988770961761, 0.06226116418838501, -0.01767692342400551, -0.04470840096473694, -0.1855662316083908, -0.04958465322852135, -0.1382414698600769, -0.01861272193491459, -0.050383955240249634, -0.037382058799266815, 0.003919163718819618, 0.051586344838142395, -0.034063465893268585, 0.09097504615783691, 0.05286644026637077, -0.02577822655439377, 0.08240745216608047, 0.01209409162402153, -0.02870125137269497, 0.03422421216964722, 0.00475566741079092, 0.00016521300130989403, 0.02124432474374771, -0.009716486558318138, 0.037431199103593826, -0.02642245404422283, 0.04671301692724228, 0.0033141488675028086, -0.053132541477680206, -0.04262254759669304, -0.017219306901097298, -0.0038267842028290033, 0.09221480041742325, 0.02545781619846821, -0.020614856854081154, 0.015538794919848442, 0.1161508858203888, -0.01812819205224514, -0.03328193724155426, -0.09723710268735886, 0.09521274268627167, -0.12415800243616104, 0.04655934497714043, -0.04181516915559769, -0.02571134828031063, -0.06977801024913788, 0.2712433338165283, 0.20985835790634155, -0.10002698004245758, 0.01842183992266655, -0.07645899802446365, 0.013346088118851185, -0.04101535305380821, 0.12900777161121368, 0.061596501618623734, 0.23863615095615387, -0.04644656926393509, -0.028595590963959694, -0.12996570765972137, -0.030179094523191452, -0.11170795559883118, -0.08230456709861755, 0.045325059443712234, -0.05535753816366196, -0.10774178057909012, 0.09597840905189514, -0.1555635929107666, -0.050194911658763885, 0.06483660638332367, -0.04284978657960892, -0.011444087140262127, -0.011403613723814487, 0.13526883721351624, 0.006518397014588118, 0.05044032633304596, -0.09001653641462326, 0.04908835515379906, 0.021115649491548538, -0.03570970892906189, -0.07043508440256119, 0.03790769726037979, 0.013142253272235394, -0.20982466638088226, 0.1631499081850052, -0.026704618707299232, -0.02601747028529644, 0.08641301095485687, -0.0683688223361969, -0.13353711366653442, 0.11452265828847885, -0.01417736615985632, -0.05396411567926407, -0.025916608050465584, 0.09422852098941803, 0.005240780301392078, 0.015596387907862663, 0.005780130159109831, -0.12236052751541138, -0.039410609751939774, 0.08728672564029694, 0.06417273730039597, -0.07816486805677414, 0.07339297980070114, -0.03834906965494156, 0.10957983881235123, -0.01974210888147354, -0.04888119921088219, -0.01992262713611126, -0.01335177756845951, 0.011367559432983398, -0.01022561639547348, -0.05369631201028824, 0.03813885152339935, -0.16044756770133972, -0.05442880466580391, 0.005326454062014818, 0.05128881707787514, -0.17351624369621277, -0.010445843450725079, -0.16905175149440765, 0.020651493221521378, -0.04645497351884842, 0.0390377938747406, 0.19101037085056305, -0.016803137958049774, 0.008816376328468323, -0.05791327729821205, -0.03647162392735481, 0.04103194177150726, -0.0032460002694278955, -0.12352623045444489 ]
null
null
transformers
<div align="center"> <h1> TransNormerLLM -- A Faster and Better LLM </h1> </div> <p align="center"> 💻 <a href="https://github.com/OpenNLPLab/TransnormerLLM" target="_blank">GitHub </a> • 💬 <a href="https://discord.gg/W4Vr7AKW" target="_blank">Discord</a> • 💬 <a href="./images/contact_me_qr.png" target="_blank">Wechat</a> </p> # Table of Contents - [Introduction](#introduction) - [Released Weights](#released-weights) - [Benchmark Results](#benchmark-results) - [General Domain](#general-domain) - [Model Results](#model-results) - [Inference and Deployment](#inference-and-deployment) - [Dependency Installation](#dependency-installation) - [Notice](#notice) - [Python Code Inference](#python-code-inference) - [Demonstration of Base Model Inference](#demonstration-of-base-model-inference) - [Fine-tuning the Model](#fine-tuning-the-model) - [Dependency Installation](#dependency-installation-1) - [Training](#training) - [Community and Ecosystem](#community-and-ecosystem) - [Disclaimer, License and Citation](#disclaimer-license-and-citation) - [Disclaimer](#disclaimer) - [License](#license) - [Acknowledgments](#acknowledgments) - [Citation](#citation) # Introduction We are re-inventing the Large Language Model (LLM). This is the official implementation of TransNormerLLM in [link](https://arxiv.org/pdf/2307.14995.pdf). Our opened weights of TransNormerLLM are now accessible to individuals, creators, researchers and businesses of all sizes so that they can experiment, innovate and scale their ideas responsibly. Our release contains the TransNormerLLM model implementation, the open-source weights and the starting code for Supervised Fine-tuning (SFT). We will show examples on how to load [TransNormerLLM](https://github.com/OpenNLPLab/Transnormer) models, run SFT and inference on it. - TransNormerLLM is the first linear attention-based LLM that outperforms conventional softmax attention-based models in terms of both accuracy and efficiency. It was trained on a high-quality corpus with up to **1.4 trillion** tokens. - TransNormerLLM evolves from the previous linear attention architecture TransNormer by making advanced modifications that include LRPE positional embedding, Lightning Attention acceleration, new gating and normalization mechanisms. - TransNormerLLM achieved competitive performance of its size on multiple well-approved Chinese, English, and multi-language general and domain-specific benchmarks. - This release includes **Base** versions with **385M**, **1B**, and **7B** parameters. - All versions are fully open to academic research. Developers only need to apply via email and obtain official commercial permission to use it for free commercially. - For more information, welcome reading our academic paper [TransNormerLLM](https://arxiv.org/pdf/2307.14995.pdf). # Released Weights The specific released versions and download links are shown as below: | | Base Models | |:-------:|:-----------:| | 385M | 🤗 [TransNormerLLM-385M](https://huggingface.co/OpenNLPLab/TransNormerLLM-385M) | | 1B | 🤗 [TransNormerLLM-1B](https://huggingface.co/OpenNLPLab/TransNormerLLM-1B) | | 7B | 🤗 [TransNormerLLM-7B](https://huggingface.co/OpenNLPLab/TransNormerLLM-7B) | # Benchmark Results To validate TransNormerLLM, we tested our 385M, 1B, and 7B models on Commonsense Reasoning Task, MMLU, CMMLU, and C-Eval. For comparison, we selected several open-source models as competitors, including Transformer-based models such as OPT, Pythia, BLOOM, GPT-Neo, GPT-J, MPT, Falcon, LLaMA1/2, OpenLLAMA v1/v2, Baichuan 1/2, ChatGLM 1/2, and non-Transformer model RWKV. It can be observed that, compared to these models, TransNormerLLM remains highly competitive. **Commonsense Reasoning** We report BoolQ, PIQA, SIQA, HellaSwag, WinoGrande, ARC easy and challenge, OpenBookQA and their average. We report 0-shot results for all benchmarks using LM-Eval-Harness. All of our models achieve competitive performance compared to existing state-of-the-art LLMs, showcasing a remarkable ability to comprehend and apply commonsense reasoning. **Aggregated Benchmarks** We report the overall results for MMLU, CMMLU, C-Eval. Official scripts were used for evaluating MMLU, CMMLU, and C-Eval, with all evaluation results being conducted with a 5-shot setup. In comparison to top-tier open-source models available in the industry, our models have demonstrated matched performance in both English and Chinese benchmarks. ## General Domain In the general domain, we conducted 5-shot tests on the following datasets: - [C-Eval](https://cevalbenchmark.com/index.html#home) is a comprehensive Chinese basic model evaluation dataset, covering 52 disciplines and four levels of difficulty. Our evaluation approach followed that of [LM-Evaluation-Harness](https://github.com/EleutherAI/lm-evaluation-harness). - [MMLU](https://arxiv.org/abs/2009.03300) is an English evaluation dataset comprising 57 tasks, encompassing elementary math, American history, computer science, law, etc. The difficulty ranges from high school level to expert level. It's a mainstream LLM evaluation dataset. We used its [official](https://github.com/hendrycks/test) evaluation approach. - [CMMLU](https://github.com/haonan-li/CMMLU) is a comprehensive Chinese evaluation benchmark covering 67 topics, specifically designed to assess language models' knowledge and reasoning capabilities in a Chinese context. We adopted its [official](https://github.com/haonan-li/CMMLU) evaluation approach. ### Model Results **Performance Comparison on Commonsense Reasoning and Aggregated Benchmarks.** For a fair comparison, we report competing methods' results reproduced by us using their released models. PS: parameter size (billion). T: tokens (trillion). HS: HellaSwag. WG: WinoGrande. | Model | PS | T | BoolQ | PIQA | HS | WG | ARC-e | ARC-c | OBQA | MMLU | CMMLU | C-Eval | |-------------|------|------|----------------|----------------|----------------|----------------|----------------|----------------|----------------|----------------|----------------|----------------| | GPT-J | 6.9 | 0.3 | 65.44 | 75.41 | 66.25 | 64.09 | 66.92 | 36.60 | 38.20 | 25.40 | 26.47 | 23.39 | | OPT | 6.7 | 0.3 | 66.18 | 76.22 | 67.21 | 65.19 | 65.66 | 34.64 | 37.20 | 24.57 | 25.36 | 25.32 | | Pythia | 6.9 | 0.3 | 63.46 | 75.14 | 63.92 | 60.77 | 67.34 | 35.41 | 37.00 | 24.64 | 25.56 | 26.40 | | BLOOM | 7.1 | 0.35 | 62.91 | 72.69 | 62.33 | 64.01 | 65.11 | 33.45 | 35.80 | 26.25 | 24.97 | 24.25 | | RWKV | 7.4 | - | - | 76.06 | 65.51 | 61.01 | 67.80 | 37.46 | 40.20 | 24.96 | - | - | | MPT | 6.9 | 1.0 | 73.88 | 79.43 | 76.25 | 68.27 | 74.79 | 41.72 | 42.20 | 30.80 | 25.99 | 24.06 | | Falcon | 7.2 | 1.5 | 73.73 | 79.38 | 76.3 | 67.17 | 74.62 | 43.60 | 43.80 | 27.79 | 25.73 | 22.92 | | Baichuan1 | 7.0 | 1.2 | 70.09 | 76.01 | 70.06 | 64.09 | 71.72 | 40.53 | 38.20 | 42.30 | 44.43 | 42.80 | | Baichuan2 | 7.0 | 2.6 | 72.72 | 76.50 | 72.17 | 68.35 | 75.17 | 42.32 | 39.60 | 54.16 | 57.07 | 54.00 | | ChatGLM1 | 6.7 | 1.0 | 74.74 | 68.88 | 45.57 | 52.25 | 48.78 | 31.66 | 36.80 | 40.63 | 37.48 | 40.23 | | ChatGLM2 | 7.1 | 1.4 | 77.65 | 69.37 | 50.51 | 57.62 | 59.13 | 34.30 | 37.00 | 45.46 | 48.80 | 52.55 | | OpenLLaMAv1 | 6.7 | 1.0 | 70.43 | 75.68 | 69.23 | 66.69 | 71.17 | 38.57 | 39.00 | 30.49 | 25.40 | 26.09 | | OpenLLaMAv2 | 6.7 | 1.0 | 72.20 | 78.84 | 74.51 | 65.67 | 72.39 | 41.30 | 41.00 | 41.29 | 29.58 | 30.01 | | LLaMA1 | 6.7 | 1.0 | 76.50 | 79.80 | 76.10 | 70.10 | 72.80 | 47.60 | 57.20 | 35.10 | 25.62 | 25.72 | | LLaMA2 | 6.7 | 2.0 | 77.68 | 78.07 | 76.02 | 68.98 | 76.30 | 46.33 | 44.20 | 45.30 | 32.96 | 33.20 | | **Ours** | 6.8 | 1.4 | 75.11 | 85.47 | 78.61 | 66.93 | 73.11 | 52.99 | 61.60 | 44.90 | 49.32 | 45.01 | # Inference and Deployment The model weights, source code, and configuration needed for inference have been released on Hugging Face. Download links can be found in the table at the beginning of this document. Below, we demonstrate various inference methods using TransNormerLLM-7B-Chat as an example. The program will automatically download the required resources from Hugging Face. ## Dependency Installation **📝Note** Please configure the following environment before using the model: ```shell pip install triton==2.0.0 pip install einops ``` ## Notice If you encounter errors related to Triton, please set the following environment variables: ``` export use_triton=False ``` ## Python Code Inference ### Demonstration of Base Model Inference **📝Note** Kindly utilize the model employing `bfloat16` instead of `float16`. ```python >>> from transformers import AutoModelForCausalLM, AutoTokenizer >>> tokenizer = AutoTokenizer.from_pretrained("OpenNLPLab/TransNormerLLM-7B", trust_remote_code=True) >>> model = AutoModelForCausalLM.from_pretrained("OpenNLPLab/TransNormerLLM-7B", torch_dtype=torch.bfloat16, device_map="auto", trust_remote_code=True) >>> inputs = tokenizer('今天是美好的一天', return_tensors='pt') >>> pred = model.generate(**inputs, max_new_tokens=4096, repetition_penalty=1.0) >>> print(tokenizer.decode(pred.cpu()[0], skip_special_tokens=True)) ``` > In the above code snippets, the model loading specifies `device_map='auto'`, which will use all available GPUs. If you need to specify the device(s) to use, you can control it in a way similar to `export CUDA_VISIBLE_DEVICES=0,1` (using the 0 and 1 graphics cards). # Fine-tuning the Model ## Dependency Installation ```shell git clone https://github.com/OpenNLPLab/TransNormerLLM.git cd TransNormerLLM/fine-tune pip install -r requirements.txt ``` - To use lightweight fine-tuning methods like LoRA, you must additionally install [peft](https://github.com/huggingface/peft). ## Training Below, we provide an example of fine-tuning the TransNormerLLM-1B on a single machine with ZeRO-3. Training Data: `alpaca_data.json`. This sample data was drawn from [alpaca_data.json](https://raw.githubusercontent.com/tatsu-lab/stanford_alpaca/main/alpaca_data.json), consisting of a selection of 52,002 entries, and has been reformatted. The main purpose is to demonstrate how to SFT our model, and effectiveness is not guaranteed. ```shell torchrun \ --nproc_per_node=8 \ train.py \ --model_name_or_path OpenNLPLab/TransNormerLLM-1B \ --data_path ./alpaca_data.json \ --output_dir output \ --num_train_epochs 1 \ --per_device_train_batch_size 2 \ --per_device_eval_batch_size 1 \ --gradient_accumulation_steps 1 \ --bf16 true \ --adam_beta1 0.9 \ --adam_beta2 0.95 \ --evaluation_strategy "no" \ --save_strategy "steps" \ --save_steps 5000 \ --save_total_limit 30 \ --learning_rate 1e-4 \ --weight_decay 0.1 \ --warmup_ratio 0.1 \ --lr_scheduler_type "cosine" \ --deepspeed 'configs/zero3.json' \ --logging_steps 1 \ --dataloader_num_workers 24 \ --ddp_find_unused_parameters false \ --tf32 true \ ``` # Community and Ecosystem **📢📢📢 We will continuously update the support for TransNormerLLM from the community and ecosystem here 😀😀😀** - [nanoTransnormer](https://github.com/Doraemonzzz/nanoTransNormer) # Disclaimer, License and Citation ## Disclaimer We hereby declare that our team has not developed any applications based on TransNormerLLM models, not on iOS, Android, the web, or any other platform. We strongly call on all users not to use TransNormerLLM models for any activities that harm national / social security or violate the law. Also, we ask users not to use TransNormerLLM models for Internet services that have not undergone appropriate security reviews and filings. We hope that all users can abide by this principle and ensure that the development of technology proceeds in a regulated and legal environment. We have done our best to ensure the compliance of the data used in the model training process. However, despite our considerable efforts, there may still be some unforeseeable issues due to the complexity of the model and data. Therefore, if any problems arise due to the use of TransNormerLLM open-source models, including but not limited to data security issues, public opinion risks, or any risks and problems brought about by the model being misled, abused, spread or improperly exploited, we will not assume any responsibility. ## License The community usage of TransNormerLLM model requires adherence to [Apache 2.0](https://github.com/OpenNLPLab/TransNormerLLM/blob/main/LICENSE) and [Community License for TransNormerLLM Model](https://huggingface.co/OpenNLPLab/TransNormerLLM-1B/blob/main/TransNormerLLM模型社区许可协议.pdf). The TransNormerLLM model supports commercial use. If you plan to use the TransNormerLLM model or its derivatives for commercial purposes, please ensure that your entity meets the following conditions: 1. The Daily Active Users (DAU) of your or your affiliate's service or product is less than 1 million. 2. Neither you nor your affiliates are software service providers or cloud service providers. 3. There is no possibility for you or your affiliates to grant the commercial license given to you, to reauthorize it to other third parties without TransNormerLLM's permission. Upon meeting the above conditions, you need to submit the application materials required by the TransNormerLLM Model Community License Agreement via the following contact email: opennlplab@gmail.com. Once approved, TransNormerLLM will hereby grant you a non-exclusive, global, non-transferable, non-sublicensable, revocable commercial copyright license. ## Acknowledgments Our project is developed based on the following open source projects: - [Baichuan](https://github.com/baichuan-inc/Baichuan-7B) for the tokenizer. - [metaseq](https://github.com/facebookresearch/metaseq) for training. - [lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) for evaluation. ## Citation If you wish to cite our work, please use the following reference: ``` @article{qin2023scaling, title={Scaling transnormer to 175 billion parameters}, author={Qin, Zhen and Li, Dong and Sun, Weigao and Sun, Weixuan and Shen, Xuyang and Han, Xiaodong and Wei, Yunshen and Lv, Baohong and Yuan, Fei and Luo, Xiao and others}, journal={arXiv preprint arXiv:2307.14995}, year={2023} } ```
{"language": ["en", "zh"], "license": "other", "tags": [" TransNormerLLM"], "pipeline_tag": "text-generation"}
text-generation
OpenNLPLab/TransNormerLLM-7B
[ "transformers", "pytorch", "text-generation", " TransNormerLLM", "custom_code", "en", "zh", "arxiv:2307.14995", "arxiv:2009.03300", "license:other", "autotrain_compatible", "region:us" ]
2023-11-12T17:25:50+00:00
[ "2307.14995", "2009.03300" ]
[ "en", "zh" ]
TAGS #transformers #pytorch #text-generation # TransNormerLLM #custom_code #en #zh #arxiv-2307.14995 #arxiv-2009.03300 #license-other #autotrain_compatible #region-us
TransNormerLLM -- A Faster and Better LLM =========================================== [GitHub](URL target=) • [Discord](URL target=) • [Wechat](./images/contact_me_qr.png) Table of Contents ================= * Introduction * Released Weights * Benchmark Results + General Domain - Model Results * Inference and Deployment + Dependency Installation + Notice + Python Code Inference - Demonstration of Base Model Inference * Fine-tuning the Model + Dependency Installation + Training * Community and Ecosystem * Disclaimer, License and Citation + Disclaimer + License + Acknowledgments + Citation Introduction ============ We are re-inventing the Large Language Model (LLM). This is the official implementation of TransNormerLLM in link. Our opened weights of TransNormerLLM are now accessible to individuals, creators, researchers and businesses of all sizes so that they can experiment, innovate and scale their ideas responsibly. Our release contains the TransNormerLLM model implementation, the open-source weights and the starting code for Supervised Fine-tuning (SFT). We will show examples on how to load TransNormerLLM models, run SFT and inference on it. * TransNormerLLM is the first linear attention-based LLM that outperforms conventional softmax attention-based models in terms of both accuracy and efficiency. It was trained on a high-quality corpus with up to 1.4 trillion tokens. * TransNormerLLM evolves from the previous linear attention architecture TransNormer by making advanced modifications that include LRPE positional embedding, Lightning Attention acceleration, new gating and normalization mechanisms. * TransNormerLLM achieved competitive performance of its size on multiple well-approved Chinese, English, and multi-language general and domain-specific benchmarks. * This release includes Base versions with 385M, 1B, and 7B parameters. * All versions are fully open to academic research. Developers only need to apply via email and obtain official commercial permission to use it for free commercially. * For more information, welcome reading our academic paper TransNormerLLM. Released Weights ================ The specific released versions and download links are shown as below: Benchmark Results ================= To validate TransNormerLLM, we tested our 385M, 1B, and 7B models on Commonsense Reasoning Task, MMLU, CMMLU, and C-Eval. For comparison, we selected several open-source models as competitors, including Transformer-based models such as OPT, Pythia, BLOOM, GPT-Neo, GPT-J, MPT, Falcon, LLaMA1/2, OpenLLAMA v1/v2, Baichuan 1/2, ChatGLM 1/2, and non-Transformer model RWKV. It can be observed that, compared to these models, TransNormerLLM remains highly competitive. Commonsense Reasoning We report BoolQ, PIQA, SIQA, HellaSwag, WinoGrande, ARC easy and challenge, OpenBookQA and their average. We report 0-shot results for all benchmarks using LM-Eval-Harness. All of our models achieve competitive performance compared to existing state-of-the-art LLMs, showcasing a remarkable ability to comprehend and apply commonsense reasoning. Aggregated Benchmarks We report the overall results for MMLU, CMMLU, C-Eval. Official scripts were used for evaluating MMLU, CMMLU, and C-Eval, with all evaluation results being conducted with a 5-shot setup. In comparison to top-tier open-source models available in the industry, our models have demonstrated matched performance in both English and Chinese benchmarks. General Domain -------------- In the general domain, we conducted 5-shot tests on the following datasets: * C-Eval is a comprehensive Chinese basic model evaluation dataset, covering 52 disciplines and four levels of difficulty. Our evaluation approach followed that of LM-Evaluation-Harness. * MMLU is an English evaluation dataset comprising 57 tasks, encompassing elementary math, American history, computer science, law, etc. The difficulty ranges from high school level to expert level. It's a mainstream LLM evaluation dataset. We used its official evaluation approach. * CMMLU is a comprehensive Chinese evaluation benchmark covering 67 topics, specifically designed to assess language models' knowledge and reasoning capabilities in a Chinese context. We adopted its official evaluation approach. ### Model Results Performance Comparison on Commonsense Reasoning and Aggregated Benchmarks. For a fair comparison, we report competing methods' results reproduced by us using their released models. PS: parameter size (billion). T: tokens (trillion). HS: HellaSwag. WG: WinoGrande. Inference and Deployment ======================== The model weights, source code, and configuration needed for inference have been released on Hugging Face. Download links can be found in the table at the beginning of this document. Below, we demonstrate various inference methods using TransNormerLLM-7B-Chat as an example. The program will automatically download the required resources from Hugging Face. Dependency Installation ----------------------- Note Please configure the following environment before using the model: Notice ------ If you encounter errors related to Triton, please set the following environment variables: Python Code Inference --------------------- ### Demonstration of Base Model Inference Note Kindly utilize the model employing 'bfloat16' instead of 'float16'. > > In the above code snippets, the model loading specifies 'device\_map='auto'', which will use all available GPUs. If you need to specify the device(s) to use, you can control it in a way similar to 'export CUDA\_VISIBLE\_DEVICES=0,1' (using the 0 and 1 graphics cards). > > > Fine-tuning the Model ===================== Dependency Installation ----------------------- * To use lightweight fine-tuning methods like LoRA, you must additionally install peft. Training -------- Below, we provide an example of fine-tuning the TransNormerLLM-1B on a single machine with ZeRO-3. Training Data: 'alpaca\_data.json'. This sample data was drawn from alpaca\_data.json, consisting of a selection of 52,002 entries, and has been reformatted. The main purpose is to demonstrate how to SFT our model, and effectiveness is not guaranteed. Community and Ecosystem ======================= We will continuously update the support for TransNormerLLM from the community and ecosystem here * nanoTransnormer Disclaimer, License and Citation ================================ Disclaimer ---------- We hereby declare that our team has not developed any applications based on TransNormerLLM models, not on iOS, Android, the web, or any other platform. We strongly call on all users not to use TransNormerLLM models for any activities that harm national / social security or violate the law. Also, we ask users not to use TransNormerLLM models for Internet services that have not undergone appropriate security reviews and filings. We hope that all users can abide by this principle and ensure that the development of technology proceeds in a regulated and legal environment. We have done our best to ensure the compliance of the data used in the model training process. However, despite our considerable efforts, there may still be some unforeseeable issues due to the complexity of the model and data. Therefore, if any problems arise due to the use of TransNormerLLM open-source models, including but not limited to data security issues, public opinion risks, or any risks and problems brought about by the model being misled, abused, spread or improperly exploited, we will not assume any responsibility. License ------- The community usage of TransNormerLLM model requires adherence to Apache 2.0 and Community License for TransNormerLLM Model. The TransNormerLLM model supports commercial use. If you plan to use the TransNormerLLM model or its derivatives for commercial purposes, please ensure that your entity meets the following conditions: 1. The Daily Active Users (DAU) of your or your affiliate's service or product is less than 1 million. 2. Neither you nor your affiliates are software service providers or cloud service providers. 3. There is no possibility for you or your affiliates to grant the commercial license given to you, to reauthorize it to other third parties without TransNormerLLM's permission. Upon meeting the above conditions, you need to submit the application materials required by the TransNormerLLM Model Community License Agreement via the following contact email: opennlplab@URL. Once approved, TransNormerLLM will hereby grant you a non-exclusive, global, non-transferable, non-sublicensable, revocable commercial copyright license. Acknowledgments --------------- Our project is developed based on the following open source projects: * Baichuan for the tokenizer. * metaseq for training. * lm-evaluation-harness for evaluation. If you wish to cite our work, please use the following reference:
[ "### Model Results\n\n\nPerformance Comparison on Commonsense Reasoning and Aggregated Benchmarks. For a fair comparison, we report competing methods' results reproduced by us using their released models. PS: parameter size (billion). T: tokens (trillion). HS: HellaSwag. WG: WinoGrande.\n\n\n\nInference and Deployment\n========================\n\n\nThe model weights, source code, and configuration needed for inference have been released on Hugging Face. Download links can be found in the table at the beginning of this document. Below, we demonstrate various inference methods using TransNormerLLM-7B-Chat as an example. The program will automatically download the required resources from Hugging Face.\n\n\nDependency Installation\n-----------------------\n\n\nNote Please configure the following environment before using the model:\n\n\nNotice\n------\n\n\nIf you encounter errors related to Triton, please set the following environment variables:\n\n\nPython Code Inference\n---------------------", "### Demonstration of Base Model Inference\n\n\nNote Kindly utilize the model employing 'bfloat16' instead of 'float16'.\n\n\n\n> \n> In the above code snippets, the model loading specifies 'device\\_map='auto'', which will use all available GPUs. If you need to specify the device(s) to use, you can control it in a way similar to 'export CUDA\\_VISIBLE\\_DEVICES=0,1' (using the 0 and 1 graphics cards).\n> \n> \n> \n\n\nFine-tuning the Model\n=====================\n\n\nDependency Installation\n-----------------------\n\n\n* To use lightweight fine-tuning methods like LoRA, you must additionally install peft.\n\n\nTraining\n--------\n\n\nBelow, we provide an example of fine-tuning the TransNormerLLM-1B on a single machine with ZeRO-3.\n\n\nTraining Data: 'alpaca\\_data.json'. This sample data was drawn from alpaca\\_data.json, consisting of a selection of 52,002 entries, and has been reformatted. The main purpose is to demonstrate how to SFT our model, and effectiveness is not guaranteed.\n\n\nCommunity and Ecosystem\n=======================\n\n\nWe will continuously update the support for TransNormerLLM from the community and ecosystem here\n\n\n* nanoTransnormer\n\n\nDisclaimer, License and Citation\n================================\n\n\nDisclaimer\n----------\n\n\nWe hereby declare that our team has not developed any applications based on TransNormerLLM models, not on iOS, Android, the web, or any other platform. We strongly call on all users not to use TransNormerLLM models for any activities that harm national / social security or violate the law. Also, we ask users not to use TransNormerLLM models for Internet services that have not undergone appropriate security reviews and filings. We hope that all users can abide by this principle and ensure that the development of technology proceeds in a regulated and legal environment.\n\n\nWe have done our best to ensure the compliance of the data used in the model training process. However, despite our considerable efforts, there may still be some unforeseeable issues due to the complexity of the model and data. Therefore, if any problems arise due to the use of TransNormerLLM open-source models, including but not limited to data security issues, public opinion risks, or any risks and problems brought about by the model being misled, abused, spread or improperly exploited, we will not assume any responsibility.\n\n\nLicense\n-------\n\n\nThe community usage of TransNormerLLM model requires adherence to Apache 2.0 and Community License for TransNormerLLM Model. The TransNormerLLM model supports commercial use. If you plan to use the TransNormerLLM model or its derivatives for commercial purposes, please ensure that your entity meets the following conditions:\n\n\n1. The Daily Active Users (DAU) of your or your affiliate's service or product is less than 1 million.\n2. Neither you nor your affiliates are software service providers or cloud service providers.\n3. There is no possibility for you or your affiliates to grant the commercial license given to you, to reauthorize it to other third parties without TransNormerLLM's permission.\n\n\nUpon meeting the above conditions, you need to submit the application materials required by the TransNormerLLM Model Community License Agreement via the following contact email: opennlplab@URL. Once approved, TransNormerLLM will hereby grant you a non-exclusive, global, non-transferable, non-sublicensable, revocable commercial copyright license.\n\n\nAcknowledgments\n---------------\n\n\nOur project is developed based on the following open source projects:\n\n\n* Baichuan for the tokenizer.\n* metaseq for training.\n* lm-evaluation-harness for evaluation.\n\n\nIf you wish to cite our work, please use the following reference:" ]
[ "TAGS\n#transformers #pytorch #text-generation # TransNormerLLM #custom_code #en #zh #arxiv-2307.14995 #arxiv-2009.03300 #license-other #autotrain_compatible #region-us \n", "### Model Results\n\n\nPerformance Comparison on Commonsense Reasoning and Aggregated Benchmarks. For a fair comparison, we report competing methods' results reproduced by us using their released models. PS: parameter size (billion). T: tokens (trillion). HS: HellaSwag. WG: WinoGrande.\n\n\n\nInference and Deployment\n========================\n\n\nThe model weights, source code, and configuration needed for inference have been released on Hugging Face. Download links can be found in the table at the beginning of this document. Below, we demonstrate various inference methods using TransNormerLLM-7B-Chat as an example. The program will automatically download the required resources from Hugging Face.\n\n\nDependency Installation\n-----------------------\n\n\nNote Please configure the following environment before using the model:\n\n\nNotice\n------\n\n\nIf you encounter errors related to Triton, please set the following environment variables:\n\n\nPython Code Inference\n---------------------", "### Demonstration of Base Model Inference\n\n\nNote Kindly utilize the model employing 'bfloat16' instead of 'float16'.\n\n\n\n> \n> In the above code snippets, the model loading specifies 'device\\_map='auto'', which will use all available GPUs. If you need to specify the device(s) to use, you can control it in a way similar to 'export CUDA\\_VISIBLE\\_DEVICES=0,1' (using the 0 and 1 graphics cards).\n> \n> \n> \n\n\nFine-tuning the Model\n=====================\n\n\nDependency Installation\n-----------------------\n\n\n* To use lightweight fine-tuning methods like LoRA, you must additionally install peft.\n\n\nTraining\n--------\n\n\nBelow, we provide an example of fine-tuning the TransNormerLLM-1B on a single machine with ZeRO-3.\n\n\nTraining Data: 'alpaca\\_data.json'. This sample data was drawn from alpaca\\_data.json, consisting of a selection of 52,002 entries, and has been reformatted. The main purpose is to demonstrate how to SFT our model, and effectiveness is not guaranteed.\n\n\nCommunity and Ecosystem\n=======================\n\n\nWe will continuously update the support for TransNormerLLM from the community and ecosystem here\n\n\n* nanoTransnormer\n\n\nDisclaimer, License and Citation\n================================\n\n\nDisclaimer\n----------\n\n\nWe hereby declare that our team has not developed any applications based on TransNormerLLM models, not on iOS, Android, the web, or any other platform. We strongly call on all users not to use TransNormerLLM models for any activities that harm national / social security or violate the law. Also, we ask users not to use TransNormerLLM models for Internet services that have not undergone appropriate security reviews and filings. We hope that all users can abide by this principle and ensure that the development of technology proceeds in a regulated and legal environment.\n\n\nWe have done our best to ensure the compliance of the data used in the model training process. However, despite our considerable efforts, there may still be some unforeseeable issues due to the complexity of the model and data. Therefore, if any problems arise due to the use of TransNormerLLM open-source models, including but not limited to data security issues, public opinion risks, or any risks and problems brought about by the model being misled, abused, spread or improperly exploited, we will not assume any responsibility.\n\n\nLicense\n-------\n\n\nThe community usage of TransNormerLLM model requires adherence to Apache 2.0 and Community License for TransNormerLLM Model. The TransNormerLLM model supports commercial use. If you plan to use the TransNormerLLM model or its derivatives for commercial purposes, please ensure that your entity meets the following conditions:\n\n\n1. The Daily Active Users (DAU) of your or your affiliate's service or product is less than 1 million.\n2. Neither you nor your affiliates are software service providers or cloud service providers.\n3. There is no possibility for you or your affiliates to grant the commercial license given to you, to reauthorize it to other third parties without TransNormerLLM's permission.\n\n\nUpon meeting the above conditions, you need to submit the application materials required by the TransNormerLLM Model Community License Agreement via the following contact email: opennlplab@URL. Once approved, TransNormerLLM will hereby grant you a non-exclusive, global, non-transferable, non-sublicensable, revocable commercial copyright license.\n\n\nAcknowledgments\n---------------\n\n\nOur project is developed based on the following open source projects:\n\n\n* Baichuan for the tokenizer.\n* metaseq for training.\n* lm-evaluation-harness for evaluation.\n\n\nIf you wish to cite our work, please use the following reference:" ]
[ 62, 207, 857 ]
[ "passage: TAGS\n#transformers #pytorch #text-generation # TransNormerLLM #custom_code #en #zh #arxiv-2307.14995 #arxiv-2009.03300 #license-other #autotrain_compatible #region-us \n### Model Results\n\n\nPerformance Comparison on Commonsense Reasoning and Aggregated Benchmarks. For a fair comparison, we report competing methods' results reproduced by us using their released models. PS: parameter size (billion). T: tokens (trillion). HS: HellaSwag. WG: WinoGrande.\n\n\n\nInference and Deployment\n========================\n\n\nThe model weights, source code, and configuration needed for inference have been released on Hugging Face. Download links can be found in the table at the beginning of this document. Below, we demonstrate various inference methods using TransNormerLLM-7B-Chat as an example. The program will automatically download the required resources from Hugging Face.\n\n\nDependency Installation\n-----------------------\n\n\nNote Please configure the following environment before using the model:\n\n\nNotice\n------\n\n\nIf you encounter errors related to Triton, please set the following environment variables:\n\n\nPython Code Inference\n---------------------" ]
[ -0.08346836268901825, 0.02420392446219921, 0.001026795944198966, 0.07055620104074478, 0.13346947729587555, 0.033620063215494156, 0.02029181271791458, 0.06454194337129593, 0.030359001830220222, 0.03821771219372749, 0.01619586907327175, 0.03526497259736061, 0.11383260786533356, 0.04776724427938461, 0.07154260575771332, -0.20840780436992645, -0.02172027714550495, 0.004031465854495764, -0.04372936487197876, 0.08358833938837051, 0.05457808077335358, 0.0012330819154158235, 0.08433473855257034, -0.00545543385669589, -0.2073228806257248, -0.00008960368722910061, -0.05088037997484207, 0.06020795926451683, 0.0905279666185379, 0.04902922362089157, 0.04102679714560509, 0.03265384957194328, 0.05576480180025101, -0.08836103975772858, 0.009830497205257416, 0.04634353891015053, -0.03877461329102516, 0.048335496336221695, 0.07660027593374252, -0.008474991656839848, 0.10070755332708359, -0.08168457448482513, 0.017181912437081337, 0.09107127040624619, -0.03525922819972038, -0.14760175347328186, -0.06115012243390083, 0.051862575113773346, 0.11989371478557587, 0.023910250514745712, 0.002033553784713149, 0.23028193414211273, 0.001960173947736621, 0.07365194708108902, 0.16522866487503052, -0.15873652696609497, -0.03166043385863304, 0.12623877823352814, 0.026449210941791534, 0.012893358245491982, -0.013602875173091888, 0.030220746994018555, 0.09295397996902466, 0.03264383226633072, 0.09633255749940872, -0.0387418270111084, -0.05279792100191116, -0.057175662368535995, -0.14287832379341125, 0.00011748926772270352, 0.1823188215494156, 0.024217888712882996, -0.07049688696861267, -0.08635876327753067, -0.09262111783027649, 0.04977307468652725, 0.020285679027438164, 0.04190896451473236, 0.017742762342095375, -0.005391969345510006, -0.034891556948423386, -0.055788710713386536, -0.0533306784927845, -0.0699058547616005, 0.021758293733000755, 0.05622703582048416, 0.0018620408372953534, 0.03344022110104561, 0.040764063596725464, 0.12616761028766632, -0.1614096611738205, -0.07826437056064606, -0.034353744238615036, -0.09425623714923859, -0.07584571838378906, 0.046275753527879715, 0.0005926006124354899, -0.014219935983419418, 0.06084791570901871, 0.12316993623971939, -0.04354286938905716, 0.03092297725379467, -0.061251699924468994, 0.0484495609998703, -0.00982485618442297, 0.14659318327903748, -0.05369443818926811, -0.21356868743896484, 0.06608545780181885, 0.015057642012834549, 0.08212105184793472, 0.00037531822454184294, -0.048672739416360855, -0.08112286776304245, 0.04412059485912323, 0.038026563823223114, 0.07649971544742584, 0.0771871954202652, -0.015555205754935741, -0.05236687883734703, 0.2688722312450409, -0.045274630188941956, -0.010530374944210052, -0.021562233567237854, -0.0797787606716156, -0.03884441778063774, 0.18355830013751984, -0.014237749390304089, -0.062276240438222885, -0.03090599924325943, -0.06846017390489578, 0.0293597262352705, -0.024582862854003906, -0.1293458640575409, 0.015427066944539547, -0.06482073664665222, 0.007132510654628277, -0.14101935923099518, -0.15203551948070526, -0.0009905872866511345, 0.021510088816285133, 0.013522869907319546, -0.006264892406761646, 0.02665184810757637, 0.01864754781126976, -0.0729498565196991, -0.02341734990477562, -0.03072946146130562, -0.041410431265830994, 0.03474944829940796, 0.01079433411359787, 0.017036961391568184, -0.04311884939670563, 0.0062047578394412994, -0.07854502648115158, 0.04585420340299606, -0.1190972775220871, 0.07716858386993408, -0.06117303669452667, 0.13155092298984528, -0.06484979391098022, -0.06696190685033798, -0.03600602596998215, -0.006553415209054947, 0.10305418074131012, 0.20225033164024353, -0.1400853842496872, -0.0529998280107975, 0.2085626870393753, -0.13767321407794952, -0.14040915668010712, 0.1721929907798767, 0.022306695580482483, 0.024151738733053207, 0.03999011218547821, 0.18467579782009125, 0.07897250354290009, -0.1968998908996582, 0.005866345949470997, 0.03332316130399704, -0.07708469778299332, -0.014248590916395187, 0.015491721220314503, -0.001322688884101808, -0.09452244639396667, 0.06064722314476967, -0.046944789588451385, 0.12648507952690125, -0.011644897051155567, -0.004585207439959049, -0.04935814440250397, -0.09174555540084839, -0.03102581948041916, 0.000779869151301682, -0.05702334642410278, -0.05057511851191521, -0.08053567260503769, 0.03200584650039673, 0.11654825508594513, -0.06266041845083237, 0.014810390770435333, 0.01864592358469963, 0.10693541169166565, -0.05834990367293358, 0.0020545374136418104, -0.07609903812408447, -0.06466522067785263, 0.07780361920595169, 0.11416145414113998, 0.0862756296992302, -0.016147052869200706, 0.05810360610485077, 0.12133751064538956, 0.0019907092209905386, -0.03886965289711952, 0.09728314727544785, -0.03441007435321808, -0.03097909316420555, -0.05353909730911255, -0.058823052793741226, -0.02032434567809105, 0.2305884063243866, -0.1399097442626953, 0.07840774208307266, -0.045135192573070526, 0.033338095992803574, -0.03499726206064224, -0.04618680849671364, 0.06135650724172592, -0.0640900507569313, -0.035242367535829544, -0.07252400368452072, 0.04433562979102135, 0.05620620399713516, -0.09527795761823654, 0.07812124490737915, -0.20935054123401642, -0.05347507819533348, 0.10351963341236115, -0.027162376791238785, -0.0326116606593132, -0.13937629759311676, -0.07213171571493149, -0.07221056520938873, -0.004071427043527365, -0.09641695767641068, 0.07973775267601013, -0.017682118341326714, 0.11228322237730026, -0.07855147868394852, -0.004734633024781942, 0.0188891738653183, -0.01539241150021553, -0.007245078682899475, 0.03171740844845772, -0.09938595443964005, -0.004999870900064707, 0.033349018543958664, -0.04097829759120941, -0.06491457670927048, 0.06779786944389343, 0.02285825088620186, -0.002080125967040658, 0.0034429572988301516, 0.06144849210977554, 0.02158130332827568, 0.09365707635879517, -0.06744710355997086, 0.006075572222471237, 0.06918176263570786, -0.021718446165323257, 0.05074838921427727, -0.09554567188024521, 0.007880417630076408, 0.011617867276072502, -0.021840844303369522, -0.016479823738336563, 0.017634250223636627, -0.03925427421927452, 0.027165580540895462, -0.043219227343797684, 0.04659337177872658, -0.03299801051616669, -0.031177325174212456, -0.15096867084503174, 0.15849541127681732, -0.09348525106906891, -0.16647686064243317, -0.1466263234615326, 0.04089256376028061, -0.10241035372018814, -0.00884043239057064, 0.06800850480794907, -0.045385122299194336, -0.02695050649344921, -0.10340109467506409, 0.06337276101112366, -0.044425275176763535, -0.0330432653427124, -0.113213449716568, -0.030348878353834152, 0.039312686771154404, -0.1538543850183487, -0.010501961223781109, -0.01854131557047367, -0.06359834969043732, 0.058030158281326294, -0.12377078831195831, 0.10957707464694977, 0.0787581205368042, -0.047372397035360336, 0.001616426045075059, -0.03459487110376358, 0.18506845831871033, -0.009548893198370934, 0.09516672790050507, 0.17494216561317444, 0.0460825078189373, 0.09188421070575714, 0.05401573330163956, -0.026228096336126328, -0.035662226378917694, 0.02943720854818821, -0.0054844217374920845, -0.10862243920564651, -0.12679453194141388, -0.09546712040901184, -0.06003405153751373, 0.11408217996358871, 0.10408943146467209, 0.02289116568863392, 0.08883616328239441, 0.09803641587495804, -0.0685538798570633, 0.014397242106497288, 0.09235047549009323, 0.15280763804912567, -0.055624645203351974, 0.02479521557688713, 0.09430061280727386, -0.020221959799528122, 0.042136769741773605, 0.09932884573936462, 0.00014588433259632438, 0.19499291479587555, -0.13945429027080536, 0.027657508850097656, 0.02276226319372654, 0.13796420395374298, -0.01545007899403572, 0.16522148251533508, -0.02088262140750885, 0.04191442206501961, -0.006994694005697966, -0.0865117684006691, -0.07047882676124573, 0.06570715457201004, 0.03776872903108597, 0.006336662918329239, -0.04245907813310623, -0.037819117307662964, 0.013730333186686039, 0.16254329681396484, -0.014791149646043777, -0.23841506242752075, -0.06209554523229599, 0.006225540768355131, 0.07042209804058075, -0.06335262954235077, -0.018328173086047173, 0.012284045107662678, -0.10174278169870377, 0.1247268095612526, -0.050289370119571686, 0.12274543195962906, 0.053204745054244995, -0.027257701382040977, -0.12563425302505493, 0.13822337985038757, -0.016294341534376144, 0.053876765072345734, -0.10866685956716537, 0.08770307898521423, 0.015077720396220684, 0.02549130842089653, -0.04760930687189102, 0.008118344470858574, 0.05377596989274025, 0.1400151252746582, 0.10326597094535828, 0.026932574808597565, 0.057837508618831635, -0.006043366622179747, -0.07323799282312393, 0.027686968445777893, -0.025374172255396843, -0.01958346925675869, 0.005733662750571966, -0.004554192069917917, -0.019355889409780502, 0.013284776359796524, 0.055202506482601166, -0.1018640324473381, -0.09942173957824707, -0.01910882629454136, -0.09519478678703308, -0.055050380527973175, -0.03140867501497269, -0.023254824802279472, -0.09212931245565414, 0.1838868409395218, -0.04437990486621857, -0.13578633964061737, -0.11007987707853317, 0.0019033553544431925, 0.1486111283302307, -0.045225709676742554, -0.010336182080209255, -0.07518218457698822, 0.03478141129016876, -0.09332891553640366, -0.11550179868936539, 0.04148923605680466, -0.13378509879112244, -0.015270047821104527, 0.008482996374368668, 0.11267374455928802, 0.027162685990333557, 0.04801814630627632, 0.0047332472167909145, -0.024555513635277748, -0.06483937799930573, -0.09122756868600845, -0.043669261038303375, 0.09670969843864441, 0.06897187978029251, 0.0783773809671402, -0.08926510810852051, -0.06003332883119583, -0.08205755054950714, 0.04932922497391701, 0.016849970445036888, 0.25453799962997437, -0.05867455154657364, 0.030807722359895706, 0.17268428206443787, -0.0318191833794117, -0.19446472823619843, 0.01789078488945961, 0.03675423189997673, 0.01594163477420807, 0.05941939353942871, -0.029495857656002045, 0.05847671627998352, 0.04023072496056557, 0.007380224298685789, 0.0866488441824913, -0.0694388896226883, -0.10546460747718811, 0.009881772100925446, 0.09158194810152054, 0.12331856787204742, -0.11232630908489227, -0.01498534344136715, -0.018007252365350723, -0.2074209600687027, 0.12429024279117584, -0.13874967396259308, 0.053524695336818695, -0.015555708669126034, 0.011107619851827621, 0.056599538773298264, -0.07212147116661072, 0.10090462863445282, -0.01189809013158083, 0.050777412950992584, -0.05779271200299263, 0.07095905393362045, -0.006278201472014189, -0.086259625852108, 0.20883174240589142, -0.12471827119588852, 0.038542233407497406, -0.11474356055259705, -0.005841143429279327, -0.08273031562566757, 0.13327980041503906, -0.010855582542717457, -0.08407806605100632, -0.0424998477101326, 0.017595306038856506, 0.10740312933921814, 0.029023880138993263, -0.042589135468006134, 0.00016755476826801896, 0.07898995280265808, 0.20494045317173004, 0.07369536906480789, -0.027891352772712708, -0.03843643516302109, 0.012983824126422405, -0.001655329717323184, 0.039674852043390274, -0.17510156333446503, 0.0218789242208004, 0.08635054528713226, 0.02671220153570175, 0.054597482085227966, 0.02059761807322502, -0.119133859872818, -0.04177311807870865, 0.025131920352578163, -0.07102709263563156, -0.08516023308038712, -0.058797381818294525, -0.07614436745643616, -0.029291778802871704, 0.03273461014032364, 0.09341883659362793, -0.0740918442606926, 0.005438433960080147, 0.015865152701735497, 0.02863139845430851, -0.061903420835733414, 0.16079296171665192, 0.05289113521575928, 0.049619317054748535, -0.08578438311815262, 0.08256743103265762, -0.0197755116969347, 0.007763322908431292, 0.023432442918419838, 0.13278336822986603, -0.12328383326530457, -0.04330151900649071, -0.050892334431409836, 0.03292956203222275, -0.10766217857599258, -0.030213838443160057, -0.04554226994514465, 0.010374343954026699, -0.12482529133558273, -0.00024388986639678478, 0.05825837329030037, -0.002579550491645932, 0.033530183136463165, 0.009460975416004658, -0.1408989578485489, 0.05175449326634407, 0.0842851921916008, 0.08804450184106827, -0.04433843493461609, 0.04947071895003319, 0.06460025161504745, 0.11734232306480408, -0.05440857261419296, -0.028062060475349426, -0.07408006489276886, -0.0343991219997406, -0.10176876932382584, 0.012300397269427776, -0.07407423853874207, 0.009112134575843811, -0.027269573882222176, -0.016044406220316887, 0.00847469363361597, 0.06882396340370178, -0.015720322728157043, -0.02849927544593811, -0.05347312614321709, -0.005964689888060093, -0.045704059302806854, -0.058404143899679184, 0.0331047885119915, -0.07301881909370422, 0.06700073927640915, 0.040559396147727966, -0.04888074845075607, -0.021694112569093704, -0.09598322212696075, -0.01012734230607748, 0.012738529592752457, 0.0021301975939422846, 0.036383312195539474, -0.004473500419408083, 0.02415117248892784, -0.04128136858344078, -0.00007176508370321244, -0.04189896583557129, 0.1533731371164322, -0.051524896174669266, 0.03562973812222481, -0.08267316967248917, -0.06259571015834808, -0.056790370494127274, 0.025219015777111053, 0.0029966847505420446, 0.1414456069469452, 0.023960420861840248, -0.02921944670379162, 0.04937699809670448, -0.1678663045167923, -0.06886745989322662, 0.01656806468963623, -0.060326844453811646, 0.07070670276880264, -0.03291899338364601, 0.055441442877054214, 0.046910036355257034, 0.13919316232204437, 0.030227724462747574, 0.08844170719385147, -0.0208622794598341, 0.09952306002378464, 0.1229407787322998, -0.09239897131919861, 0.033712029457092285, -0.09225103259086609, 0.03388744220137596, 0.008709928020834923, 0.06237770617008209, 0.03419071435928345, -0.13849963247776031, 0.01112623605877161, 0.009192573837935925, -0.0923876091837883, 0.0505826473236084, 0.04075469821691513, -0.07952485233545303, -0.1286037415266037, -0.19805677235126495, 0.05451531335711479, 0.04145936295390129, -0.05490671843290329, 0.16705623269081116, 0.16804559528827667, -0.10175403952598572, 0.04485943913459778, 0.09743798524141312, -0.07919175922870636, -0.06880368292331696, -0.08737150579690933, -0.009053880348801613, -0.11276506632566452, 0.00597850838676095, -0.04163322597742081, -0.076632060110569, 0.0779903307557106, -0.000051530179916881025, 0.008260790258646011, 0.09677886962890625, 0.06672247499227524, -0.09121920168399811, -0.025909170508384705, -0.008087369613349438, -0.01761714741587639, 0.12960416078567505, -0.0007567296852357686, 0.07304670661687851, 0.009819409810006618, 0.07950718700885773, 0.023900633677840233, 0.0752381831407547, 0.0737033411860466, 0.02703234553337097, -0.07822176814079285, -0.049229077994823456, 0.04644935950636864, -0.01863996870815754, 0.21425288915634155, 0.03689561039209366, -0.03259152173995972, -0.03133239969611168, 0.03375157713890076, -0.05702069029211998, -0.03776472806930542, -0.07978661358356476, 0.21125322580337524, -0.08826237171888351, -0.05117814242839813, -0.021134469658136368, -0.08767136186361313, 0.0024325200356543064, 0.2515968084335327, 0.1953480988740921, 0.03866541385650635, -0.025240326300263405, -0.023616891354322433, -0.008956116624176502, -0.04370782524347305, 0.13092181086540222, 0.05962168425321579, 0.13993044197559357, -0.05801907181739807, 0.08837420493364334, -0.04737475886940956, -0.07219667732715607, -0.1107359305024147, -0.023305831477046013, -0.06531453132629395, -0.03671834245324135, 0.025858178734779358, 0.05409985035657883, -0.04726837947964668, -0.1424376666545868, 0.03199063986539841, -0.07741406559944153, -0.037268999963998795, -0.026748279109597206, -0.04257960990071297, 0.011116503737866879, 0.06977462023496628, -0.032267894595861435, 0.04293423891067505, 0.13323873281478882, -0.04967600852251053, -0.1448986977338791, -0.053339164704084396, -0.00794733315706253, -0.015013499185442924, 0.14240214228630066, -0.0424373485147953, 0.0697924941778183, 0.05833081528544426, 0.02511918731033802, -0.08428977429866791, 0.03983502835035324, 0.005072526168078184, -0.06664147228002548, -0.004491598345339298, 0.11439147591590881, -0.04422639682888985, -0.027057744562625885, -0.04012627154588699, -0.04315716400742531, -0.031706877052783966, 0.03655581548810005, 0.032865263521671295, -0.10475542396306992, 0.014503350481390953, -0.07987821102142334, 0.13711492717266083, 0.05958379805088043, -0.03336196020245552, -0.02197822369635105, -0.13693846762180328, -0.03198183327913284, 0.04395919665694237, 0.08550164103507996, 0.005435250699520111, -0.17759369313716888, -0.0018220035126432776, 0.09671568125486374, -0.0028556210454553366, -0.20799534022808075, -0.007136096712201834, -0.07247530668973923, -0.07008638978004456, -0.026282835751771927, 0.12456485629081726, -0.01357024721801281, 0.021119583398103714, 0.022257275879383087, -0.189652681350708, 0.0034079456236213446, 0.0938555970788002, -0.1419302225112915, -0.07851716876029968 ]
null
null
null
# Lora of haruka_makino_onichichi This model is trained with [HCP-Diffusion](https://github.com/7eu7d7/HCP-Diffusion). And the auto-training framework is maintained by [DeepGHS Team](https://huggingface.co/deepghs). The base model used during training is [NAI](https://huggingface.co/deepghs/animefull-latest), and the base model used for generating preview images is [Meina/MeinaMix_V11](https://huggingface.co/Meina/MeinaMix_V11). After downloading the pt and safetensors files for the specified step, you need to use them simultaneously. The pt file will be used as an embedding, while the safetensors file will be loaded for Lora. For example, if you want to use the model from step 4800, you need to download `4800/haruka_makino_onichichi.pt` as the embedding and `4800/haruka_makino_onichichi.safetensors` for loading Lora. By using both files together, you can generate images for the desired characters. **The best step we recommend is 4800**, with the score of 0.982. The trigger words are: 1. `haruka_makino_onichichi` 2. `blue_eyes, blush, long_hair, purple_hair, breasts, large_breasts, pink_hair` For the following groups, it is not recommended to use this model and we express regret: 1. Individuals who cannot tolerate any deviations from the original character design, even in the slightest detail. 2. Individuals who are facing the application scenarios with high demands for accuracy in recreating character outfits. 3. Individuals who cannot accept the potential randomness in AI-generated images based on the Stable Diffusion algorithm. 4. Individuals who are not comfortable with the fully automated process of training character models using LoRA, or those who believe that training character models must be done purely through manual operations to avoid disrespecting the characters. 5. Individuals who finds the generated image content offensive to their values. These are available steps: | Steps | Score | Download | pattern_1 | pattern_2 | pattern_3 | pattern_4 | pattern_5 | pattern_6 | pattern_7 | pattern_8 | pattern_9 | bikini | bondage | free | maid | miko | nude | nude2 | suit | yukata | |:---------|:----------|:-------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:-------------------------------------------------|:--------------------------------------------------|:-----------------------------------------------|:-------------------------------------|:-------------------------------------|:-----------------------------------------------|:------------------------------------------------|:-------------------------------------|:-------------------------------------------------| | 6000 | 0.977 | [Download](6000/haruka_makino_onichichi.zip) | [<NSFW, click to see>](6000/previews/pattern_1.png) | [<NSFW, click to see>](6000/previews/pattern_2.png) | [<NSFW, click to see>](6000/previews/pattern_3.png) | [<NSFW, click to see>](6000/previews/pattern_4.png) | [<NSFW, click to see>](6000/previews/pattern_5.png) | [<NSFW, click to see>](6000/previews/pattern_6.png) | [<NSFW, click to see>](6000/previews/pattern_7.png) | [<NSFW, click to see>](6000/previews/pattern_8.png) | [<NSFW, click to see>](6000/previews/pattern_9.png) | [<NSFW, click to see>](6000/previews/bikini.png) | [<NSFW, click to see>](6000/previews/bondage.png) | [<NSFW, click to see>](6000/previews/free.png) | ![maid-6000](6000/previews/maid.png) | ![miko-6000](6000/previews/miko.png) | [<NSFW, click to see>](6000/previews/nude.png) | [<NSFW, click to see>](6000/previews/nude2.png) | ![suit-6000](6000/previews/suit.png) | [<NSFW, click to see>](6000/previews/yukata.png) | | 5600 | 0.980 | [Download](5600/haruka_makino_onichichi.zip) | [<NSFW, click to see>](5600/previews/pattern_1.png) | [<NSFW, click to see>](5600/previews/pattern_2.png) | [<NSFW, click to see>](5600/previews/pattern_3.png) | [<NSFW, click to see>](5600/previews/pattern_4.png) | [<NSFW, click to see>](5600/previews/pattern_5.png) | [<NSFW, click to see>](5600/previews/pattern_6.png) | [<NSFW, click to see>](5600/previews/pattern_7.png) | [<NSFW, click to see>](5600/previews/pattern_8.png) | [<NSFW, click to see>](5600/previews/pattern_9.png) | [<NSFW, click to see>](5600/previews/bikini.png) | [<NSFW, click to see>](5600/previews/bondage.png) | [<NSFW, click to see>](5600/previews/free.png) | ![maid-5600](5600/previews/maid.png) | ![miko-5600](5600/previews/miko.png) | [<NSFW, click to see>](5600/previews/nude.png) | [<NSFW, click to see>](5600/previews/nude2.png) | ![suit-5600](5600/previews/suit.png) | [<NSFW, click to see>](5600/previews/yukata.png) | | 5200 | 0.973 | [Download](5200/haruka_makino_onichichi.zip) | [<NSFW, click to see>](5200/previews/pattern_1.png) | [<NSFW, click to see>](5200/previews/pattern_2.png) | [<NSFW, click to see>](5200/previews/pattern_3.png) | [<NSFW, click to see>](5200/previews/pattern_4.png) | [<NSFW, click to see>](5200/previews/pattern_5.png) | [<NSFW, click to see>](5200/previews/pattern_6.png) | [<NSFW, click to see>](5200/previews/pattern_7.png) | [<NSFW, click to see>](5200/previews/pattern_8.png) | [<NSFW, click to see>](5200/previews/pattern_9.png) | [<NSFW, click to see>](5200/previews/bikini.png) | [<NSFW, click to see>](5200/previews/bondage.png) | [<NSFW, click to see>](5200/previews/free.png) | ![maid-5200](5200/previews/maid.png) | ![miko-5200](5200/previews/miko.png) | [<NSFW, click to see>](5200/previews/nude.png) | [<NSFW, click to see>](5200/previews/nude2.png) | ![suit-5200](5200/previews/suit.png) | [<NSFW, click to see>](5200/previews/yukata.png) | | **4800** | **0.982** | [**Download**](4800/haruka_makino_onichichi.zip) | [<NSFW, click to see>](4800/previews/pattern_1.png) | [<NSFW, click to see>](4800/previews/pattern_2.png) | [<NSFW, click to see>](4800/previews/pattern_3.png) | [<NSFW, click to see>](4800/previews/pattern_4.png) | [<NSFW, click to see>](4800/previews/pattern_5.png) | [<NSFW, click to see>](4800/previews/pattern_6.png) | [<NSFW, click to see>](4800/previews/pattern_7.png) | [<NSFW, click to see>](4800/previews/pattern_8.png) | [<NSFW, click to see>](4800/previews/pattern_9.png) | [<NSFW, click to see>](4800/previews/bikini.png) | [<NSFW, click to see>](4800/previews/bondage.png) | [<NSFW, click to see>](4800/previews/free.png) | ![maid-4800](4800/previews/maid.png) | ![miko-4800](4800/previews/miko.png) | [<NSFW, click to see>](4800/previews/nude.png) | [<NSFW, click to see>](4800/previews/nude2.png) | ![suit-4800](4800/previews/suit.png) | [<NSFW, click to see>](4800/previews/yukata.png) | | 4400 | 0.978 | [Download](4400/haruka_makino_onichichi.zip) | [<NSFW, click to see>](4400/previews/pattern_1.png) | [<NSFW, click to see>](4400/previews/pattern_2.png) | [<NSFW, click to see>](4400/previews/pattern_3.png) | [<NSFW, click to see>](4400/previews/pattern_4.png) | [<NSFW, click to see>](4400/previews/pattern_5.png) | [<NSFW, click to see>](4400/previews/pattern_6.png) | [<NSFW, click to see>](4400/previews/pattern_7.png) | [<NSFW, click to see>](4400/previews/pattern_8.png) | [<NSFW, click to see>](4400/previews/pattern_9.png) | [<NSFW, click to see>](4400/previews/bikini.png) | [<NSFW, click to see>](4400/previews/bondage.png) | [<NSFW, click to see>](4400/previews/free.png) | ![maid-4400](4400/previews/maid.png) | ![miko-4400](4400/previews/miko.png) | [<NSFW, click to see>](4400/previews/nude.png) | [<NSFW, click to see>](4400/previews/nude2.png) | ![suit-4400](4400/previews/suit.png) | [<NSFW, click to see>](4400/previews/yukata.png) | | 4000 | 0.978 | [Download](4000/haruka_makino_onichichi.zip) | [<NSFW, click to see>](4000/previews/pattern_1.png) | [<NSFW, click to see>](4000/previews/pattern_2.png) | [<NSFW, click to see>](4000/previews/pattern_3.png) | [<NSFW, click to see>](4000/previews/pattern_4.png) | [<NSFW, click to see>](4000/previews/pattern_5.png) | [<NSFW, click to see>](4000/previews/pattern_6.png) | [<NSFW, click to see>](4000/previews/pattern_7.png) | [<NSFW, click to see>](4000/previews/pattern_8.png) | [<NSFW, click to see>](4000/previews/pattern_9.png) | [<NSFW, click to see>](4000/previews/bikini.png) | [<NSFW, click to see>](4000/previews/bondage.png) | [<NSFW, click to see>](4000/previews/free.png) | ![maid-4000](4000/previews/maid.png) | ![miko-4000](4000/previews/miko.png) | [<NSFW, click to see>](4000/previews/nude.png) | [<NSFW, click to see>](4000/previews/nude2.png) | ![suit-4000](4000/previews/suit.png) | [<NSFW, click to see>](4000/previews/yukata.png) | | 3600 | 0.949 | [Download](3600/haruka_makino_onichichi.zip) | [<NSFW, click to see>](3600/previews/pattern_1.png) | [<NSFW, click to see>](3600/previews/pattern_2.png) | [<NSFW, click to see>](3600/previews/pattern_3.png) | [<NSFW, click to see>](3600/previews/pattern_4.png) | [<NSFW, click to see>](3600/previews/pattern_5.png) | [<NSFW, click to see>](3600/previews/pattern_6.png) | [<NSFW, click to see>](3600/previews/pattern_7.png) | [<NSFW, click to see>](3600/previews/pattern_8.png) | [<NSFW, click to see>](3600/previews/pattern_9.png) | [<NSFW, click to see>](3600/previews/bikini.png) | [<NSFW, click to see>](3600/previews/bondage.png) | [<NSFW, click to see>](3600/previews/free.png) | ![maid-3600](3600/previews/maid.png) | ![miko-3600](3600/previews/miko.png) | [<NSFW, click to see>](3600/previews/nude.png) | [<NSFW, click to see>](3600/previews/nude2.png) | ![suit-3600](3600/previews/suit.png) | [<NSFW, click to see>](3600/previews/yukata.png) | | 3200 | 0.960 | [Download](3200/haruka_makino_onichichi.zip) | [<NSFW, click to see>](3200/previews/pattern_1.png) | [<NSFW, click to see>](3200/previews/pattern_2.png) | [<NSFW, click to see>](3200/previews/pattern_3.png) | [<NSFW, click to see>](3200/previews/pattern_4.png) | [<NSFW, click to see>](3200/previews/pattern_5.png) | [<NSFW, click to see>](3200/previews/pattern_6.png) | [<NSFW, click to see>](3200/previews/pattern_7.png) | [<NSFW, click to see>](3200/previews/pattern_8.png) | [<NSFW, click to see>](3200/previews/pattern_9.png) | [<NSFW, click to see>](3200/previews/bikini.png) | [<NSFW, click to see>](3200/previews/bondage.png) | [<NSFW, click to see>](3200/previews/free.png) | ![maid-3200](3200/previews/maid.png) | ![miko-3200](3200/previews/miko.png) | [<NSFW, click to see>](3200/previews/nude.png) | [<NSFW, click to see>](3200/previews/nude2.png) | ![suit-3200](3200/previews/suit.png) | [<NSFW, click to see>](3200/previews/yukata.png) | | 2800 | 0.919 | [Download](2800/haruka_makino_onichichi.zip) | [<NSFW, click to see>](2800/previews/pattern_1.png) | [<NSFW, click to see>](2800/previews/pattern_2.png) | [<NSFW, click to see>](2800/previews/pattern_3.png) | [<NSFW, click to see>](2800/previews/pattern_4.png) | [<NSFW, click to see>](2800/previews/pattern_5.png) | [<NSFW, click to see>](2800/previews/pattern_6.png) | [<NSFW, click to see>](2800/previews/pattern_7.png) | [<NSFW, click to see>](2800/previews/pattern_8.png) | [<NSFW, click to see>](2800/previews/pattern_9.png) | [<NSFW, click to see>](2800/previews/bikini.png) | [<NSFW, click to see>](2800/previews/bondage.png) | [<NSFW, click to see>](2800/previews/free.png) | ![maid-2800](2800/previews/maid.png) | ![miko-2800](2800/previews/miko.png) | [<NSFW, click to see>](2800/previews/nude.png) | [<NSFW, click to see>](2800/previews/nude2.png) | ![suit-2800](2800/previews/suit.png) | [<NSFW, click to see>](2800/previews/yukata.png) | | 2400 | 0.872 | [Download](2400/haruka_makino_onichichi.zip) | [<NSFW, click to see>](2400/previews/pattern_1.png) | [<NSFW, click to see>](2400/previews/pattern_2.png) | [<NSFW, click to see>](2400/previews/pattern_3.png) | [<NSFW, click to see>](2400/previews/pattern_4.png) | [<NSFW, click to see>](2400/previews/pattern_5.png) | [<NSFW, click to see>](2400/previews/pattern_6.png) | [<NSFW, click to see>](2400/previews/pattern_7.png) | [<NSFW, click to see>](2400/previews/pattern_8.png) | [<NSFW, click to see>](2400/previews/pattern_9.png) | [<NSFW, click to see>](2400/previews/bikini.png) | [<NSFW, click to see>](2400/previews/bondage.png) | [<NSFW, click to see>](2400/previews/free.png) | ![maid-2400](2400/previews/maid.png) | ![miko-2400](2400/previews/miko.png) | [<NSFW, click to see>](2400/previews/nude.png) | [<NSFW, click to see>](2400/previews/nude2.png) | ![suit-2400](2400/previews/suit.png) | [<NSFW, click to see>](2400/previews/yukata.png) | | 2000 | 0.914 | [Download](2000/haruka_makino_onichichi.zip) | [<NSFW, click to see>](2000/previews/pattern_1.png) | [<NSFW, click to see>](2000/previews/pattern_2.png) | [<NSFW, click to see>](2000/previews/pattern_3.png) | [<NSFW, click to see>](2000/previews/pattern_4.png) | [<NSFW, click to see>](2000/previews/pattern_5.png) | [<NSFW, click to see>](2000/previews/pattern_6.png) | [<NSFW, click to see>](2000/previews/pattern_7.png) | [<NSFW, click to see>](2000/previews/pattern_8.png) | [<NSFW, click to see>](2000/previews/pattern_9.png) | [<NSFW, click to see>](2000/previews/bikini.png) | [<NSFW, click to see>](2000/previews/bondage.png) | [<NSFW, click to see>](2000/previews/free.png) | ![maid-2000](2000/previews/maid.png) | ![miko-2000](2000/previews/miko.png) | [<NSFW, click to see>](2000/previews/nude.png) | [<NSFW, click to see>](2000/previews/nude2.png) | ![suit-2000](2000/previews/suit.png) | [<NSFW, click to see>](2000/previews/yukata.png) | | 1600 | 0.770 | [Download](1600/haruka_makino_onichichi.zip) | [<NSFW, click to see>](1600/previews/pattern_1.png) | [<NSFW, click to see>](1600/previews/pattern_2.png) | [<NSFW, click to see>](1600/previews/pattern_3.png) | [<NSFW, click to see>](1600/previews/pattern_4.png) | [<NSFW, click to see>](1600/previews/pattern_5.png) | [<NSFW, click to see>](1600/previews/pattern_6.png) | [<NSFW, click to see>](1600/previews/pattern_7.png) | [<NSFW, click to see>](1600/previews/pattern_8.png) | [<NSFW, click to see>](1600/previews/pattern_9.png) | [<NSFW, click to see>](1600/previews/bikini.png) | [<NSFW, click to see>](1600/previews/bondage.png) | [<NSFW, click to see>](1600/previews/free.png) | ![maid-1600](1600/previews/maid.png) | ![miko-1600](1600/previews/miko.png) | [<NSFW, click to see>](1600/previews/nude.png) | [<NSFW, click to see>](1600/previews/nude2.png) | ![suit-1600](1600/previews/suit.png) | [<NSFW, click to see>](1600/previews/yukata.png) | | 1200 | 0.702 | [Download](1200/haruka_makino_onichichi.zip) | [<NSFW, click to see>](1200/previews/pattern_1.png) | [<NSFW, click to see>](1200/previews/pattern_2.png) | [<NSFW, click to see>](1200/previews/pattern_3.png) | [<NSFW, click to see>](1200/previews/pattern_4.png) | [<NSFW, click to see>](1200/previews/pattern_5.png) | [<NSFW, click to see>](1200/previews/pattern_6.png) | [<NSFW, click to see>](1200/previews/pattern_7.png) | [<NSFW, click to see>](1200/previews/pattern_8.png) | [<NSFW, click to see>](1200/previews/pattern_9.png) | [<NSFW, click to see>](1200/previews/bikini.png) | [<NSFW, click to see>](1200/previews/bondage.png) | [<NSFW, click to see>](1200/previews/free.png) | ![maid-1200](1200/previews/maid.png) | ![miko-1200](1200/previews/miko.png) | [<NSFW, click to see>](1200/previews/nude.png) | [<NSFW, click to see>](1200/previews/nude2.png) | ![suit-1200](1200/previews/suit.png) | [<NSFW, click to see>](1200/previews/yukata.png) | | 800 | 0.643 | [Download](800/haruka_makino_onichichi.zip) | [<NSFW, click to see>](800/previews/pattern_1.png) | [<NSFW, click to see>](800/previews/pattern_2.png) | [<NSFW, click to see>](800/previews/pattern_3.png) | [<NSFW, click to see>](800/previews/pattern_4.png) | [<NSFW, click to see>](800/previews/pattern_5.png) | [<NSFW, click to see>](800/previews/pattern_6.png) | [<NSFW, click to see>](800/previews/pattern_7.png) | [<NSFW, click to see>](800/previews/pattern_8.png) | [<NSFW, click to see>](800/previews/pattern_9.png) | [<NSFW, click to see>](800/previews/bikini.png) | [<NSFW, click to see>](800/previews/bondage.png) | [<NSFW, click to see>](800/previews/free.png) | ![maid-800](800/previews/maid.png) | ![miko-800](800/previews/miko.png) | [<NSFW, click to see>](800/previews/nude.png) | [<NSFW, click to see>](800/previews/nude2.png) | ![suit-800](800/previews/suit.png) | [<NSFW, click to see>](800/previews/yukata.png) | | 400 | 0.525 | [Download](400/haruka_makino_onichichi.zip) | [<NSFW, click to see>](400/previews/pattern_1.png) | [<NSFW, click to see>](400/previews/pattern_2.png) | [<NSFW, click to see>](400/previews/pattern_3.png) | [<NSFW, click to see>](400/previews/pattern_4.png) | [<NSFW, click to see>](400/previews/pattern_5.png) | [<NSFW, click to see>](400/previews/pattern_6.png) | [<NSFW, click to see>](400/previews/pattern_7.png) | [<NSFW, click to see>](400/previews/pattern_8.png) | [<NSFW, click to see>](400/previews/pattern_9.png) | [<NSFW, click to see>](400/previews/bikini.png) | [<NSFW, click to see>](400/previews/bondage.png) | [<NSFW, click to see>](400/previews/free.png) | ![maid-400](400/previews/maid.png) | ![miko-400](400/previews/miko.png) | [<NSFW, click to see>](400/previews/nude.png) | [<NSFW, click to see>](400/previews/nude2.png) | ![suit-400](400/previews/suit.png) | [<NSFW, click to see>](400/previews/yukata.png) |
{"license": "mit", "tags": ["art"], "datasets": ["CyberHarem/haruka_makino_onichichi"], "pipeline_tag": "text-to-image"}
text-to-image
CyberHarem/haruka_makino_onichichi
[ "art", "text-to-image", "dataset:CyberHarem/haruka_makino_onichichi", "license:mit", "region:us" ]
2023-11-12T17:26:47+00:00
[]
[]
TAGS #art #text-to-image #dataset-CyberHarem/haruka_makino_onichichi #license-mit #region-us
Lora of haruka\_makino\_onichichi ================================= This model is trained with HCP-Diffusion. And the auto-training framework is maintained by DeepGHS Team. The base model used during training is NAI, and the base model used for generating preview images is Meina/MeinaMix\_V11. After downloading the pt and safetensors files for the specified step, you need to use them simultaneously. The pt file will be used as an embedding, while the safetensors file will be loaded for Lora. For example, if you want to use the model from step 4800, you need to download '4800/haruka\_makino\_onichichi.pt' as the embedding and '4800/haruka\_makino\_onichichi.safetensors' for loading Lora. By using both files together, you can generate images for the desired characters. The best step we recommend is 4800, with the score of 0.982. The trigger words are: 1. 'haruka\_makino\_onichichi' 2. 'blue\_eyes, blush, long\_hair, purple\_hair, breasts, large\_breasts, pink\_hair' For the following groups, it is not recommended to use this model and we express regret: 1. Individuals who cannot tolerate any deviations from the original character design, even in the slightest detail. 2. Individuals who are facing the application scenarios with high demands for accuracy in recreating character outfits. 3. Individuals who cannot accept the potential randomness in AI-generated images based on the Stable Diffusion algorithm. 4. Individuals who are not comfortable with the fully automated process of training character models using LoRA, or those who believe that training character models must be done purely through manual operations to avoid disrespecting the characters. 5. Individuals who finds the generated image content offensive to their values. These are available steps:
[]
[ "TAGS\n#art #text-to-image #dataset-CyberHarem/haruka_makino_onichichi #license-mit #region-us \n" ]
[ 38 ]
[ "passage: TAGS\n#art #text-to-image #dataset-CyberHarem/haruka_makino_onichichi #license-mit #region-us \n" ]
[ 0.003054808359593153, 0.08358627557754517, -0.004329655319452286, 0.10880003869533539, 0.11663851141929626, 0.0768394023180008, 0.2980847656726837, 0.09001840651035309, 0.08185688406229019, -0.016139397397637367, 0.1508210450410843, 0.06848318874835968, 0.042822062969207764, 0.03177979588508606, -0.017472650855779648, -0.2685486674308777, 0.004068267997354269, -0.019763676449656487, 0.05298455432057381, 0.03670572489500046, 0.04115130752325058, -0.040159255266189575, 0.1287146508693695, -0.021694613620638847, -0.13337916135787964, -0.03177391365170479, -0.017858676612377167, -0.05035441368818283, 0.043917104601860046, 0.035031333565711975, 0.014313082210719585, 0.007114174775779247, 0.015541854314506054, -0.05769633501768112, 0.061341293156147, -0.059375446289777756, -0.15801580250263214, 0.001676655258052051, 0.10995372384786606, -0.06115318089723587, 0.0927400141954422, 0.008077825419604778, -0.12572742998600006, 0.02755776420235634, -0.15978792309761047, 0.15026497840881348, -0.022349629551172256, 0.10001709312200546, 0.1869005411863327, 0.03439152613282204, 0.030449768528342247, 0.036288101226091385, -0.0726819559931755, 0.05907243862748146, 0.018391868099570274, -0.10235249251127243, -0.09029573202133179, 0.13940729200839996, 0.039046723395586014, 0.15282617509365082, -0.11146479845046997, 0.10103201866149902, -0.008933616802096367, -0.028625965118408203, -0.18587389588356018, -0.08039884269237518, 0.007073202170431614, 0.06445050984621048, 0.03146335110068321, 0.029864104464650154, 0.27247294783592224, 0.10994044691324234, 0.037289805710315704, 0.020703593268990517, -0.05859289690852165, 0.06448348611593246, -0.05882278457283974, 0.12140117585659027, -0.012220172211527824, 0.05405706539750099, -0.05618252605199814, -0.026918891817331314, -0.14110882580280304, -0.014286376535892487, -0.12481964379549026, -0.09009703248739243, -0.05237601324915886, 0.07421945780515671, -0.19375541806221008, -0.0639209896326065, -0.04331593215465546, -0.07745645940303802, 0.020248880609869957, -0.0835219994187355, 0.11471551656723022, 0.06972755491733551, 0.036321934312582016, -0.1100625991821289, 0.13321326673030853, 0.09458547830581665, 0.13827262818813324, 0.024033645167946815, -0.02822653390467167, 0.17730297148227692, 0.11729143559932709, -0.09462990611791611, -0.05788768455386162, 0.05399634316563606, 0.02933042123913765, -0.062009748071432114, 0.03837206959724426, -0.1080923080444336, -0.18321895599365234, 0.01802239753305912, -0.11245857924222946, -0.0012837534304708242, 0.0013142921961843967, 0.013857356272637844, -0.09814706444740295, 0.010871099308133125, 0.15969282388687134, 0.014912242069840431, 0.04266992211341858, -0.016781775280833244, -0.05904139205813408, -0.0249275341629982, -0.0024424337316304445, 0.03549293056130409, 0.12904277443885803, 0.057093989104032516, -0.0996839627623558, 0.04641588032245636, 0.023983830586075783, 0.009775056503713131, 0.11690062284469604, 0.02821420133113861, 0.05351336672902107, -0.15567831695079803, -0.024188214913010597, -0.05593842640519142, 0.06242949515581131, -0.06574077159166336, 0.05147198960185051, 0.032018668949604034, -0.027250465005636215, 0.006352766416966915, 0.007643545046448708, -0.03922349959611893, -0.10604165494441986, 0.10216028243303299, -0.1334143579006195, 0.12882834672927856, -0.10733190923929214, -0.027013465762138367, -0.06681248545646667, -0.039798881858587265, -0.060731109231710434, -0.036491759121418, -0.03502492606639862, 0.20646987855434418, 0.04894077032804489, 0.0612025260925293, -0.12047244608402252, 0.012702850624918938, -0.014951823279261589, 0.29116931557655334, -0.137055903673172, -0.02042619325220585, 0.12663471698760986, -0.05596819519996643, -0.16627027094364166, 0.06724292039871216, -0.05235455185174942, 0.1784006506204605, 0.037777047604322433, 0.26165950298309326, -0.11162842065095901, -0.1065480038523674, -0.03596417233347893, 0.06582985073328018, -0.08241986483335495, -0.11425050348043442, 0.07582902908325195, 0.042189281433820724, 0.044488076120615005, -0.01669718325138092, -0.01769150421023369, 0.08844970911741257, -0.08282351493835449, -0.061472274363040924, 0.03126223012804985, -0.03962856903672218, -0.05021457001566887, 0.05271615460515022, 0.07610815763473511, -0.06103651970624924, -0.01474288571625948, -0.08431180566549301, -0.01053778175264597, 0.07478468120098114, 0.02412237972021103, -0.08679026365280151, 0.07164625078439713, 0.03519141674041748, -0.00018878068658523262, 0.00424162857234478, 0.03679212927818298, -0.06663559377193451, 0.04731813073158264, 0.11290396004915237, -0.11417815834283829, 0.03687813878059387, -0.020829949527978897, 0.011229710653424263, 0.053121112287044525, 0.034973107278347015, 0.02201712876558304, -0.010373326949775219, -0.1521281898021698, 0.09994059801101685, 0.0019774294923990965, 0.11063417792320251, -0.08457139879465103, -0.046956367790699005, 0.21367254853248596, -0.002972035901620984, -0.027816973626613617, 0.07592131942510605, 0.025046246126294136, -0.040484022349119186, -0.07462619245052338, 0.0034335581585764885, 0.10775267332792282, 0.030091742053627968, -0.1015605702996254, 0.1711140125989914, -0.04058445990085602, 0.11755663901567459, 0.1835813969373703, -0.1959567368030548, 0.022044211626052856, -0.053462255746126175, 0.022608796134591103, -0.006371218711137772, 0.0012625324307009578, 0.009211806580424309, -0.15343822538852692, -0.04141071066260338, 0.0516611747443676, -0.06996778398752213, 0.07321125268936157, 0.03311295434832573, -0.06551538407802582, -0.0770450085401535, 0.05867328122258186, 0.21470333635807037, -0.23530244827270508, 0.14678126573562622, 0.24836717545986176, 0.03148827329277992, 0.24103176593780518, 0.01738029345870018, 0.06324469298124313, -0.060123030096292496, -0.03627503290772438, -0.019873252138495445, 0.21356818079948425, -0.1656571626663208, -0.01923077180981636, -0.009770942851901054, -0.05802549049258232, 0.009988849051296711, -0.12341885268688202, -0.17509229481220245, -0.07772316038608551, 0.01634269580245018, -0.09071166813373566, 0.0650460347533226, -0.036423295736312866, 0.08758952468633652, -0.05957324057817459, -0.04947901889681816, 0.07889299094676971, -0.019454900175333023, -0.024777282029390335, 0.07162288576364517, -0.09975489228963852, -0.21318739652633667, -0.07056563347578049, -0.15443070232868195, -0.11731652170419693, 0.003291902830824256, 0.07626581192016602, -0.17094500362873077, 0.02834467589855194, -0.044479113072156906, -0.11880059540271759, -0.000916367513127625, -0.08610852807760239, -0.02375752106308937, 0.0454992800951004, -0.12352128326892853, -0.054725464433431625, -0.040467578917741776, -0.02793247625231743, 0.009841453284025192, 0.24595019221305847, -0.1143970713019371, 0.18811380863189697, 0.04440566897392273, 0.035627514123916626, 0.060528334230184555, -0.001926771248690784, 0.1803455799818039, -0.11791272461414337, 0.09207409620285034, 0.07766677439212799, 0.017474781721830368, 0.09559261053800583, 0.18701548874378204, 0.10355134308338165, -0.08213608711957932, -0.0020340662449598312, -0.00034501368645578623, -0.0953628197312355, -0.0681597962975502, -0.05970044434070587, -0.06550979614257812, 0.1755470335483551, 0.07200678437948227, 0.0749567449092865, 0.17877815663814545, 0.09377841651439667, 0.04270436614751816, -0.06288793683052063, 0.11886485666036606, 0.0647178441286087, -0.029295215383172035, -0.010201417841017246, 0.04714002087712288, -0.06487773358821869, -0.01675134338438511, 0.17725412547588348, 0.15107008814811707, 0.059340398758649826, 0.14718130230903625, 0.030202411115169525, 0.09459006041288376, 0.11217950284481049, 0.10307439416646957, -0.006022286601364613, 0.057119738310575485, -0.03194744139909744, -0.07231947034597397, -0.08388464152812958, 0.14885881543159485, 0.08856645226478577, -0.04798612743616104, -0.2585279047489166, 0.047160498797893524, -0.09505556523799896, 0.07999899238348007, -0.05687995254993439, 0.028087815269827843, -0.1404295116662979, 0.07750265300273895, 0.09067244827747345, 0.08847148716449738, -0.04617495834827423, 0.10414346307516098, 0.09044286608695984, -0.0982227474451065, 0.11119560152292252, -0.030857346951961517, 0.1457153707742691, 0.06400848925113678, 0.0057321046479046345, 0.011477416381239891, -0.25033414363861084, -0.0035179434344172478, 0.052595026791095734, -0.1431172639131546, 0.2269602119922638, 0.041081588715314865, -0.052848219871520996, -0.0768330916762352, -0.10921237617731094, 0.10001496225595474, 0.1831134706735611, 0.13840433955192566, 0.04900717735290527, -0.10141851752996445, -0.06854704767465591, -0.045213207602500916, 0.006525220815092325, 0.11030679941177368, 0.012052883394062519, -0.09719293564558029, 0.04684234410524368, -0.016980919986963272, -0.026075607165694237, 0.20189593732357025, -0.10659144073724747, -0.10530033707618713, 0.010476156137883663, 0.04412843659520149, 0.03647645190358162, 0.06281059980392456, 0.0008661271422170103, -0.04594903811812401, -0.018365316092967987, 0.021699700504541397, 0.029492361471056938, -0.06749487668275833, -0.032058484852313995, -0.04436470568180084, -0.025321267545223236, -0.037380773574113846, -0.09312508255243301, -0.06193362921476364, -0.12090450525283813, -0.11857672035694122, 0.0840899646282196, -0.03860753774642944, 0.042779888957738876, -0.12614159286022186, -0.05249106138944626, 0.047445204108953476, -0.007810136303305626, -0.017222123220562935, 0.016597647219896317, -0.07949446141719818, -0.0984925627708435, 0.06456871330738068, -0.12005762755870819, 0.0370478555560112, -0.04351862519979477, -0.10148375481367111, -0.09279412031173706, -0.06472553312778473, -0.08429210633039474, 0.03248747065663338, 0.32911407947540283, -0.011193572543561459, 0.09531241655349731, 0.2062942385673523, -0.07102252542972565, -0.2920047640800476, -0.08110490441322327, -0.24921680986881256, -0.0255583468824625, 0.14814573526382446, -0.13832519948482513, 0.05273626372218132, 0.10576904565095901, -0.0610894151031971, 0.1767152100801468, -0.3538729250431061, -0.08820067346096039, -0.028343915939331055, 0.039181970059871674, 0.43016496300697327, -0.26275303959846497, -0.026136018335819244, -0.10946490615606308, -0.09438318014144897, 0.15297941863536835, -0.02784833498299122, 0.040552034974098206, 0.05367022007703781, 0.04328514635562897, -0.03882886841893196, 0.010901382192969322, 0.19680564105510712, 0.01984032616019249, 0.08553401380777359, -0.13966244459152222, -0.19546659290790558, 0.2125876098871231, -0.02093878760933876, -0.10743051767349243, -0.07367035001516342, -0.05980600789189339, -0.14178064465522766, 0.08631958067417145, -0.05460071563720703, 0.02844608947634697, 0.030591396614909172, -0.025058388710021973, -0.1351589411497116, 0.11793060600757599, -0.05748957768082619, 0.05436667799949646, 0.2029639035463333, -0.012835098430514336, 0.01891203224658966, -0.055394724011421204, -0.05949554964900017, -0.10874253511428833, 0.0701356753706932, -0.10445047914981842, -0.06424742937088013, 0.08341886848211288, -0.1335669457912445, 0.016004636883735657, 0.04268695041537285, 0.02129218727350235, 0.06681493669748306, 0.025736579671502113, 0.010605322197079659, 0.10574306547641754, 0.20767875015735626, -0.11339757591485977, -0.02998988889157772, -0.01184395793825388, 0.00858709029853344, 0.2344922125339508, -0.06216981261968613, 0.090020552277565, 0.04810217395424843, 0.002725252415984869, 0.005040646530687809, 0.11146320402622223, -0.06867744028568268, -0.12937967479228973, 0.032742708921432495, -0.08648591488599777, -0.05167271941900253, 0.12217424809932709, 0.11344221979379654, -0.13963206112384796, -0.05770057067275047, 0.12223166227340698, -0.05077087879180908, -0.07796429097652435, -0.05614946037530899, 0.09197694063186646, -0.14575839042663574, -0.029243841767311096, -0.023645123466849327, 0.029622355476021767, -0.0680350586771965, 0.1146567240357399, 0.00974673219025135, -0.0024020441342145205, 0.09988704323768616, -0.01865468919277191, 0.00933330599218607, -0.014925374649465084, -0.00793377310037613, -0.00004596014696289785, -0.07358016073703766, -0.1817627251148224, 0.055656109005212784, 0.12510663270950317, -0.047837741672992706, -0.06854520738124847, -0.19279909133911133, 0.009813864715397358, 0.0071889497339725494, 0.0382317379117012, -0.1365216225385666, -0.06345260888338089, -0.0226519126445055, -0.007819948717951775, -0.12270847707986832, -0.12118083238601685, -0.09609812498092651, 0.016086885705590248, 0.07424257695674896, 0.06834089756011963, -0.07657244056463242, -0.05727821961045265, 0.12612010538578033, -0.00989496149122715, 0.064684197306633, 0.08407332748174667, -0.07281912863254547, -0.013089231215417385, -0.2099415510892868, -0.021549703553318977, 0.050848767161369324, -0.02299601212143898, -0.011937916278839111, 0.10200800001621246, -0.004598692525178194, 0.017053011804819107, 0.05784311890602112, 0.03200595825910568, 0.06010814011096954, -0.051189035177230835, 0.002529079094529152, -0.10771507024765015, -0.13797473907470703, -0.08884846419095993, 0.043454866856336594, 0.18128636479377747, -0.04286960884928703, 0.06698037683963776, 0.00629776157438755, 0.06563862413167953, -0.03384009003639221, 0.035939861088991165, 0.0407976359128952, -0.14019082486629486, -0.0898635983467102, -0.13285207748413086, -0.055543217808008194, -0.07170800119638443, 0.21896496415138245, 0.11392580717802048, -0.22949309647083282, 0.03564247488975525, 0.16176678240299225, -0.18661299347877502, 0.021698176860809326, 0.2579510509967804, -0.021578345447778702, -0.01632317155599594, -0.08339658379554749, 0.08229126036167145, -0.024614909663796425, 0.04550144448876381, 0.028766779229044914, 0.12852339446544647, 0.059141580015420914, 0.03721370920538902, 0.08332671225070953, 0.010254733264446259, 0.002556291176006198, -0.01002061553299427, 0.022738518193364143, 0.0694982260465622, -0.05230975151062012, -0.05685820430517197, 0.1981915980577469, -0.043369751423597336, 0.03799061104655266, -0.06839821487665176, -0.04169850051403046, -0.03157582879066467, -0.21254085004329681, -0.06447241455316544, -0.14969901740550995, 0.0911383256316185, -0.02365822158753872, 0.04538953676819801, 0.16202978789806366, 0.032047517597675323, -0.07073930650949478, -0.013644391670823097, -0.13095009326934814, -0.06736356765031815, 0.0785747691988945, -0.054856471717357635, 0.008787080645561218, -0.041923534125089645, -0.04677432030439377, -0.030731668695807457, -0.05983506515622139, -0.04264068976044655, 0.06054602935910225, 0.09062603861093521, 0.011446033604443073, -0.17531248927116394, -0.13421256840229034, -0.048532627522945404, -0.003959874156862497, -0.033388834446668625, 0.22021786868572235, 0.014093898236751556, 0.0529153011739254, 0.03536885231733322, 0.07330566644668579, 0.06850362569093704, 0.056532710790634155, -0.044341474771499634, -0.08171680569648743, -0.10285770148038864, -0.005794090684503317, -0.031817011535167694, -0.030880337581038475, -0.01168306265026331, 0.19449976086616516, 0.21307061612606049, -0.19636249542236328, -0.045850686728954315, 0.015154714696109295, 0.023459428921341896, 0.050939008593559265, 0.11311689019203186, -0.02604399062693119, 0.2148059606552124, -0.049757082015275955, 0.0013027333188802004, -0.0839763954281807, -0.06225118041038513, -0.05544685199856758, 0.010742835700511932, 0.10642141103744507, -0.04790468513965607, -0.06259646266698837, 0.21137909591197968, -0.1676148772239685, 0.04438948631286621, 0.17678995430469513, -0.1394767463207245, -0.005599916912615299, 0.05186187103390694, 0.05014937371015549, 0.06369932740926743, 0.10646913200616837, -0.12565414607524872, -0.03781043738126755, -0.03162899613380432, 0.06501659005880356, -0.201155886054039, -0.0902332291007042, -0.012288345955312252, -0.1612989604473114, 0.21385730803012848, -0.04134739190340042, 0.04035670682787895, 0.06229550018906593, -0.01654980331659317, -0.021817350760102272, 0.0345737598836422, 0.013559558428823948, 0.08876452594995499, -0.12948690354824066, -0.011419404298067093, 0.014421733096241951, -0.09486400336027145, 0.0911395400762558, 0.044379912316799164, 0.031202293932437897, 0.08092173933982849, -0.019550452008843422, -0.05688995495438576, 0.14226096868515015, -0.15730425715446472, 0.08865056186914444, -0.01830335520207882, 0.034936077892780304, -0.06715471297502518, -0.01292677316814661, 0.021166225895285606, 0.05638628453016281, -0.18105407059192657, -0.06271874159574509, 0.04074104502797127, -0.07577983289957047, -0.06268312782049179, 0.09864848107099533, -0.1413455307483673, -0.009246311150491238, -0.12247303873300552, 0.04141891375184059, -0.11013329774141312, 0.09519334882497787, 0.1526324450969696, -0.07404793798923492, 0.008995908312499523, -0.060072749853134155, 0.07502181082963943, -0.02520800195634365, 0.02164236083626747, -0.11344010382890701 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # pegasus-xsum_readme_summarization This model is a fine-tuned version of [google/pegasus-xsum](https://huggingface.co/google/pegasus-xsum) on the None dataset. It achieves the following results on the evaluation set: - Loss: 2.3151 - Rouge1: 0.4555 - Rouge2: 0.313 - Rougel: 0.43 - Rougelsum: 0.4306 - Gen Len: 20.4628 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 1 - eval_batch_size: 1 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len | |:-------------:|:-----:|:-----:|:---------------:|:------:|:------:|:------:|:---------:|:-------:| | 2.734 | 1.0 | 5831 | 2.4629 | 0.445 | 0.2988 | 0.417 | 0.4173 | 20.8801 | | 2.5168 | 2.0 | 11662 | 2.3496 | 0.4549 | 0.3112 | 0.4284 | 0.4286 | 19.6043 | | 2.3507 | 3.0 | 17493 | 2.3132 | 0.4555 | 0.3133 | 0.4295 | 0.429 | 20.747 | | 2.2409 | 4.0 | 23324 | 2.3151 | 0.4555 | 0.313 | 0.43 | 0.4306 | 20.4628 | ### Framework versions - Transformers 4.35.1 - Pytorch 2.1.0+cu121 - Datasets 2.14.6 - Tokenizers 0.14.1
{"tags": ["generated_from_trainer"], "metrics": ["rouge"], "base_model": "google/pegasus-xsum", "model-index": [{"name": "pegasus-xsum_readme_summarization", "results": []}]}
text2text-generation
bunbohue/pegasus-xsum_readme_summarization
[ "transformers", "safetensors", "pegasus", "text2text-generation", "generated_from_trainer", "base_model:google/pegasus-xsum", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:33:52+00:00
[]
[]
TAGS #transformers #safetensors #pegasus #text2text-generation #generated_from_trainer #base_model-google/pegasus-xsum #autotrain_compatible #endpoints_compatible #region-us
pegasus-xsum\_readme\_summarization =================================== This model is a fine-tuned version of google/pegasus-xsum on the None dataset. It achieves the following results on the evaluation set: * Loss: 2.3151 * Rouge1: 0.4555 * Rouge2: 0.313 * Rougel: 0.43 * Rougelsum: 0.4306 * Gen Len: 20.4628 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 2e-05 * train\_batch\_size: 1 * eval\_batch\_size: 1 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 4 ### Training results ### Framework versions * Transformers 4.35.1 * Pytorch 2.1.0+cu121 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 4", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.1\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #safetensors #pegasus #text2text-generation #generated_from_trainer #base_model-google/pegasus-xsum #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 4", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.1\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 61, 98, 4, 33 ]
[ "passage: TAGS\n#transformers #safetensors #pegasus #text2text-generation #generated_from_trainer #base_model-google/pegasus-xsum #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 4### Training results### Framework versions\n\n\n* Transformers 4.35.1\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.10172242671251297, 0.04882853478193283, -0.0015264730900526047, 0.11111883074045181, 0.17520025372505188, 0.013508275151252747, 0.15961115062236786, 0.09683938324451447, -0.08661746233701706, 0.04224911704659462, 0.13885879516601562, 0.11442262679338455, -0.003248627530410886, 0.15590906143188477, -0.05937732756137848, -0.228379026055336, 0.011811026372015476, 0.014154685661196709, -0.06420863419771194, 0.12490712106227875, 0.08541692048311234, -0.13713061809539795, 0.09952429682016373, -0.024013321846723557, -0.19402900338172913, 0.01598145253956318, 0.044035330414772034, -0.04905517026782036, 0.15138500928878784, 0.029259171336889267, 0.14455187320709229, 0.0256520863622427, 0.08870851993560791, -0.1825685054063797, 0.013656491413712502, 0.048145052045583725, 0.019217753782868385, 0.0717003345489502, 0.04044044762849808, -0.038729097694158554, 0.056541673839092255, -0.07915142923593521, 0.05587053298950195, 0.018856124952435493, -0.1531832218170166, -0.19704779982566833, -0.06724841892719269, 0.0100563308224082, 0.08397325873374939, 0.10502687096595764, -0.021596448495984077, 0.1390131413936615, -0.06996122747659683, 0.09103574603796005, 0.2372785210609436, -0.2980654239654541, -0.07468699663877487, 0.03759709745645523, 0.03166080266237259, 0.10512787103652954, -0.10217012465000153, 0.0006625639507547021, 0.07347573339939117, 0.02511894330382347, 0.11852484941482544, -0.04260604456067085, -0.07213770598173141, 0.006494823843240738, -0.13054481148719788, -0.006512148305773735, 0.15492293238639832, 0.04444269835948944, -0.04611244052648544, -0.03156287595629692, -0.06516954302787781, -0.1334952563047409, -0.044148094952106476, -0.03452738747000694, 0.04763501510024071, -0.03134160488843918, -0.07680898904800415, -0.019510509446263313, -0.10354842245578766, -0.08054516464471817, -0.05454644933342934, 0.17237220704555511, 0.03146454319357872, -0.0040422603487968445, -0.014307046309113503, 0.10173465311527252, -0.027576899155974388, -0.12016573548316956, 0.022005992010235786, 0.01623457670211792, 0.0010778411524370313, -0.07145492732524872, -0.06808293610811234, -0.04366884380578995, 0.02154192142188549, 0.1351044625043869, -0.09064540266990662, 0.050217825919389725, 0.036637622863054276, 0.026378579437732697, -0.09699425846338272, 0.16893184185028076, -0.052002694457769394, -0.042276568710803986, 0.03374367207288742, 0.08169082552194595, 0.0234110988676548, 0.00803831871598959, -0.1040516272187233, 0.005060320720076561, 0.11887533962726593, 0.019531697034835815, -0.08027710020542145, 0.08084481209516525, -0.05847322940826416, 0.009808179922401905, -0.033799562603235245, -0.09141790866851807, 0.03142103925347328, -0.0043964628130197525, -0.06201016157865524, -0.038219716399908066, 0.04062940552830696, 0.021904228255152702, 0.007553889416158199, 0.08271040767431259, -0.09650398045778275, 0.03874387592077255, -0.07652824372053146, -0.11801103502511978, -0.01135663315653801, -0.05645112320780754, 0.027836313471198082, -0.12380296736955643, -0.17397315800189972, -0.030519425868988037, 0.03626451641321182, -0.03220756724476814, -0.02399294823408127, -0.07685209810733795, -0.07330963760614395, -0.005179553292691708, -0.008317205123603344, 0.06529016047716141, -0.0644671618938446, 0.10631736367940903, 0.08091139793395996, 0.084110789000988, -0.04184181988239288, 0.032080039381980896, -0.11588950455188751, 0.025958679616451263, -0.22683224081993103, 0.05250334367156029, -0.046816300600767136, 0.08335600793361664, -0.07218889147043228, -0.07501208037137985, -0.026322349905967712, 0.019384926185011864, 0.08966941386461258, 0.14442765712738037, -0.13842222094535828, -0.07306040078401566, 0.20499587059020996, -0.08401390165090561, -0.15870893001556396, 0.12334143370389938, -0.055906884372234344, 0.09855148941278458, 0.08958398550748825, 0.16773340106010437, 0.05136476829648018, -0.09311722218990326, 0.022420218214392662, -0.03553925082087517, 0.05501801520586014, -0.02188112586736679, 0.04092022404074669, 0.0015994773712009192, -0.04083312302827835, 0.025312155485153198, -0.031305406242609024, 0.06340857595205307, -0.11016391962766647, -0.07004301249980927, -0.03765065595507622, -0.12217160314321518, 0.06540489941835403, 0.046936918050050735, 0.08486557006835938, -0.12902240455150604, -0.07231326401233673, 0.0720444768667221, 0.05484771355986595, -0.06878514587879181, 0.016331957653164864, -0.06016063317656517, 0.07334239035844803, -0.06595033407211304, -0.01808820851147175, -0.15815295279026031, -0.07728596776723862, 0.004199833143502474, 0.048665083944797516, 0.019773012027144432, -0.026933178305625916, 0.09325553476810455, 0.07989417761564255, -0.07160604000091553, -0.038576655089855194, -0.007176602259278297, 0.008370453491806984, -0.1380251795053482, -0.17625366151332855, 0.019138464704155922, -0.01933250203728676, 0.16235235333442688, -0.2311331033706665, 0.04672013223171234, -0.04560728371143341, 0.07244911789894104, 0.031741976737976074, -0.014180409722030163, -0.03490789607167244, 0.07390619069337845, -0.040559783577919006, -0.06122712045907974, 0.05571635067462921, 0.00164627970661968, -0.07131672650575638, -0.029091820120811462, -0.15579277276992798, 0.21226823329925537, 0.14019833505153656, -0.12485997378826141, -0.12147331237792969, 0.008801869116723537, -0.039145637303590775, -0.024143951013684273, -0.06929466873407364, 0.005926512647420168, 0.14343665540218353, -0.018605703487992287, 0.15149010717868805, -0.07277024537324905, -0.019635610282421112, 0.025517256930470467, -0.06661365181207657, 0.0360119603574276, 0.10211381316184998, 0.0741385743021965, -0.06797447800636292, 0.1377224326133728, 0.1476932018995285, -0.09295463562011719, 0.11546477675437927, -0.02753114141523838, -0.05665360391139984, -0.018816879019141197, 0.01292791310697794, 0.0005471279146149755, 0.096392422914505, -0.11219758540391922, -0.0101626580581069, -0.010133051313459873, 0.027757735922932625, 0.016362274065613747, -0.22459916770458221, -0.047993943095207214, 0.0453740693628788, -0.038716383278369904, -0.007977079600095749, -0.02792634814977646, -0.001995642436668277, 0.10489451885223389, 0.004663415253162384, -0.06040027365088463, 0.028251441195607185, 0.010865502059459686, -0.09572557359933853, 0.21371540427207947, -0.07175824046134949, -0.12593746185302734, -0.1190125048160553, -0.04479965567588806, -0.03392641618847847, 0.04843524098396301, 0.06331752240657806, -0.08272512257099152, -0.024947775527834892, -0.09364435076713562, 0.043013352900743484, 0.04545660316944122, 0.04348136857151985, 0.008274582214653492, -0.002840363886207342, 0.08865350484848022, -0.08284224569797516, -0.01343311183154583, -0.04946038872003555, -0.07170239090919495, 0.051337748765945435, 0.002199070295318961, 0.12322461605072021, 0.1319587230682373, -0.03495561331510544, 0.00041903636883944273, -0.031670764088630676, 0.2626217305660248, -0.06417016685009003, -0.02265373058617115, 0.12068303674459457, 0.005373140797019005, 0.036205630749464035, 0.12603847682476044, 0.04883034899830818, -0.12183685600757599, 0.04312317445874214, 0.03938637301325798, -0.026070233434438705, -0.1961490958929062, -0.022836456075310707, -0.02604294940829277, -0.002073317999020219, 0.08637437224388123, 0.0068073091097176075, 0.02002304419875145, 0.08305256813764572, 0.03573545813560486, 0.09322679042816162, -0.006276614964008331, 0.05550111457705498, 0.11101167649030685, 0.03994772583246231, 0.12418112903833389, -0.039997704327106476, -0.08461054414510727, 0.026156414300203323, -0.0237545445561409, 0.19678403437137604, 0.019312411546707153, 0.09172918647527695, 0.034414101392030716, 0.1444602608680725, -0.0044065918773412704, 0.0762002170085907, 0.026228059083223343, -0.06888312846422195, -0.0015408279141411185, -0.03857564181089401, -0.04454777017235756, 0.03220168128609657, -0.077813521027565, 0.07190676033496857, -0.13588620722293854, 0.027118593454360962, 0.06690645217895508, 0.21796369552612305, 0.05504896491765976, -0.34984058141708374, -0.10593530535697937, 0.013230632059276104, -0.019394448027014732, -0.025194093585014343, 0.032938361167907715, 0.1255510151386261, -0.07453419268131256, 0.022654078900814056, -0.058211445808410645, 0.07380899786949158, -0.029102154076099396, 0.05218956992030144, 0.03218758478760719, 0.07876217365264893, -0.02547585777938366, 0.044491324573755264, -0.28116029500961304, 0.27501145005226135, 0.007263156119734049, 0.08009073883295059, -0.021050244569778442, -0.01820124126970768, 0.034458938986063004, 0.09822118282318115, 0.06771837919950485, -0.016137847676873207, -0.04930852726101875, -0.22202658653259277, -0.05025775358080864, 0.04039141535758972, 0.11970026791095734, -0.04584381729364395, 0.12773197889328003, -0.039749957621097565, 0.0018512108363211155, 0.08379676938056946, -0.01856086403131485, -0.10478893667459488, -0.06614323705434799, -0.04695252701640129, 0.0264375451952219, 0.03039167821407318, -0.0713268369436264, -0.1011107861995697, -0.14004285633563995, 0.14360077679157257, 0.012504166923463345, -0.019404089078307152, -0.11420638114213943, 0.07971309870481491, 0.05558609589934349, -0.08022710680961609, 0.05123227462172508, 0.021689413115382195, 0.060140062123537064, 0.03657735884189606, -0.03560497611761093, 0.14293982088565826, -0.06672486662864685, -0.1631993055343628, -0.07249631732702255, 0.11064121127128601, 0.023890510201454163, 0.041151851415634155, -0.005063541699200869, 0.012035991065204144, -0.01714601181447506, -0.07872242480516434, 0.03686372563242912, -0.016197817400097847, 0.046733248978853226, 0.042978134006261826, -0.06092626228928566, 0.0181308314204216, -0.06267847120761871, -0.030035175383090973, 0.16569435596466064, 0.288547158241272, -0.08293379098176956, -0.027607498690485954, 0.03172604367136955, -0.05252699926495552, -0.21762052178382874, 0.07372526079416275, 0.04143528640270233, 0.007853844203054905, 0.05322545766830444, -0.1403459906578064, 0.1106175035238266, 0.0969104990363121, -0.013822521083056927, 0.10246524214744568, -0.2903108298778534, -0.1394607573747635, 0.10803716629743576, 0.17815905809402466, 0.16103844344615936, -0.16264580190181732, -0.021136784926056862, -0.0397154837846756, -0.10785188525915146, 0.07631772756576538, -0.11053699254989624, 0.10833746194839478, -0.0008729500113986433, 0.07303185760974884, 0.012088985182344913, -0.048340342938899994, 0.11557088792324066, -0.011210544966161251, 0.11514582484960556, -0.056452635675668716, -0.005625387188047171, 0.04185688495635986, -0.049051329493522644, 0.02174745872616768, -0.05632898584008217, 0.0411449670791626, -0.0344935804605484, -0.034161120653152466, -0.05955633521080017, 0.037367239594459534, -0.02788614109158516, -0.058096107095479965, -0.02394854463636875, 0.006154559087008238, 0.03347437456250191, -0.008290124125778675, 0.10407456755638123, -0.010565142147243023, 0.16307108104228973, 0.09299541264772415, 0.080560103058815, -0.07174907624721527, 0.00010924974776571617, 0.021895786747336388, -0.03612559288740158, 0.04412253201007843, -0.1556178629398346, 0.04539759084582329, 0.11629397422075272, 0.0082391407340765, 0.13792338967323303, 0.0874880701303482, -0.015470224432647228, 0.023061316460371017, 0.07044778019189835, -0.1783456653356552, -0.12002139538526535, -0.021204201504588127, -0.10293228179216385, -0.10630379617214203, 0.07048380374908447, 0.11642872542142868, -0.08051175624132156, -0.0032697708811610937, -0.03338932618498802, -0.002398083917796612, -0.045367274433374405, 0.19990277290344238, 0.06402453035116196, 0.04304961860179901, -0.07506072521209717, 0.07334177196025848, 0.02795376256108284, -0.07056327909231186, 0.010091271251440048, 0.06686662137508392, -0.08924659341573715, -0.04328304901719093, 0.0805317834019661, 0.19797521829605103, -0.046648506075143814, -0.030927501618862152, -0.16532011330127716, -0.1256018877029419, 0.04047364741563797, 0.18637040257453918, 0.09938760846853256, 0.015670688822865486, -0.006234724074602127, 0.018813880160450935, -0.13572007417678833, 0.10761316120624542, 0.0626986026763916, 0.08804449439048767, -0.1529334932565689, 0.1471318006515503, -0.010293965227901936, 0.013347861357033253, -0.03818299621343613, 0.040131811052560806, -0.13285842537879944, 0.009279435500502586, -0.15116706490516663, -0.026139529421925545, -0.020664941519498825, -0.004702982492744923, 0.0038450665306299925, -0.06878023594617844, -0.06672565639019012, 0.004549084696918726, -0.10527920722961426, -0.017831189557909966, 0.03839845955371857, 0.04630666971206665, -0.11241300404071808, -0.03637690097093582, 0.03561455383896828, -0.0701676607131958, 0.07001429051160812, 0.0569072961807251, 0.020313125103712082, 0.05589357018470764, -0.18153172731399536, 0.022031325846910477, 0.07563385367393494, -0.019762467592954636, 0.05113384872674942, -0.09604401886463165, -0.007224357686936855, -0.000882626511156559, 0.0843970775604248, 0.0240127295255661, 0.08384225517511368, -0.1162826269865036, 0.015226854011416435, -0.034702591598033905, -0.060513854026794434, -0.05049789324402809, 0.004462508484721184, 0.08019345253705978, -0.010491644032299519, 0.18977822363376617, -0.11806643754243851, 0.02482336014509201, -0.21429960429668427, -0.001632603001780808, -0.016387011855840683, -0.11137702316045761, -0.13410547375679016, -0.07050896435976028, 0.06730072945356369, -0.04376167058944702, 0.132027730345726, 0.008805757388472557, 0.07842163741588593, 0.02781621925532818, -0.046743009239435196, 0.042743127793073654, 0.047506626695394516, 0.20966953039169312, 0.039334677159786224, -0.05808943882584572, 0.02661319449543953, 0.07724803686141968, 0.13747042417526245, 0.08204785734415054, 0.18618279695510864, 0.14034932851791382, -0.08016888797283173, 0.1144966259598732, 0.02067272551357746, -0.036199428141117096, -0.12052694708108902, 0.031158391386270523, -0.07022324204444885, 0.04976679012179375, -0.031423673033714294, 0.18487785756587982, 0.09554543346166611, -0.13711398839950562, 0.009660129435360432, -0.07235065847635269, -0.08334221690893173, -0.11642644554376602, -0.003807893255725503, -0.10567468404769897, -0.18451263010501862, -0.0028613319154828787, -0.10795556008815765, -0.010395009070634842, 0.09470994770526886, -0.0006326237344183028, -0.016928160563111305, 0.217576265335083, 0.02912532165646553, 0.02023647353053093, 0.04581863805651665, -0.007642359938472509, -0.02717393822968006, -0.07889489084482193, -0.09972520172595978, -0.005557864438742399, -0.0275256410241127, 0.02114163339138031, -0.04802551493048668, -0.07750575989484787, 0.039587315171957016, -0.024221906438469887, -0.11037300527095795, 0.007659269496798515, 0.044130273163318634, 0.03248177096247673, -0.004760925658047199, 0.018263772130012512, -0.008852723985910416, -0.002294109668582678, 0.2516554594039917, -0.07935667783021927, -0.08114025741815567, -0.1070011705160141, 0.26146945357322693, 0.040377624332904816, 0.036260608583688736, -0.0065500009804964066, -0.09322600066661835, 0.04042091220617294, 0.247446671128273, 0.18293674290180206, -0.10500648617744446, 0.005468081217259169, -0.022213425487279892, -0.01001257635653019, -0.022829817607998848, 0.094837486743927, 0.11291578412055969, 0.002996844472363591, -0.08361342549324036, -0.039123039692640305, -0.029389889910817146, -0.015074989758431911, -0.04455893114209175, 0.05680490657687187, 0.03335852548480034, 0.03848738968372345, -0.06669191271066666, 0.06355331838130951, -0.0633363276720047, -0.08812958002090454, 0.057244908064603806, -0.20818719267845154, -0.1257605105638504, -0.02979658544063568, 0.07972295582294464, -0.0052509065717458725, 0.07743354141712189, -0.034042514860630035, -0.014508995227515697, 0.06555590778589249, -0.02852596342563629, -0.06836875528097153, -0.08170207589864731, 0.04401322081685066, -0.11127916723489761, 0.20393720269203186, -0.043770939111709595, 0.03614896535873413, 0.12604768574237823, 0.04011412709951401, -0.0732911229133606, 0.09566275030374527, 0.03957446664571762, -0.05695988982915878, 0.03076184168457985, 0.08133060485124588, -0.040208328515291214, 0.09621363878250122, 0.05056975409388542, -0.14118115603923798, 0.030501453205943108, -0.07143131643533707, -0.07230124622583389, -0.054273031651973724, -0.06552088260650635, -0.054718248546123505, 0.12572266161441803, 0.18419279158115387, -0.042353175580501556, 0.04343142732977867, -0.03828241676092148, 0.025528017431497574, 0.07604257762432098, 0.06793898344039917, -0.04731309786438942, -0.2730848789215088, 0.012540661729872227, 0.10366404801607132, -0.02108367532491684, -0.2804548740386963, -0.0785892978310585, -0.021285319700837135, -0.04023824259638786, -0.0835651084780693, 0.0988161712884903, 0.12079951912164688, 0.06112160161137581, -0.05888566002249718, -0.12983964383602142, -0.06879932433366776, 0.20033463835716248, -0.11282160878181458, -0.11816783994436264 ]
null
null
peft
# Model Card for CNC-7b ## Model Details - Name: CNC-7b - Version: 1.0 - Release Date: November 13, 2023 ## Intended Use CNC-7b is a lora adapter for Mistral-7b (Instruct) intended to be clear, concise, and helpful in short text conversations. It is designed for conversational agents and assistants. ## Training Data CNC-7b was trained on synthetic conversational data generated by Newstar using ChatGPT. The data was shaped using custom instructions to encourage clear, concise, and helpful responses. ## Evaluation Data CNC-7b was evaluated on a test set of human-human conversations to measure whether responses were clear, concise, and on-topic. ## Ethical Considerations - CNC-7b has limited conversational abilities and is not intended for complex conversations. - The training data was filtered to remove harmful, unethical, or dangerous content. - The model has no notion of facts about the real world. Any factual statements generated should not be assumed to be true. ## Caveats and Recommendations - Only the Peft adapter parameters are released for CNC-7b. The full model is not released. - CNC-7b has limited knowledge outside of conversational abilities. Do not use for anything requiring real world knowledge. - Monitor CNC-7b conversations for harmful content generated, and re-train the model as needed.
{"language": ["en", "tl"], "license": "cc-by-sa-4.0", "library_name": "peft", "tags": ["mistral", "lora", "instruct", "custom code"], "datasets": ["NewstaR/clearNconcise"], "pipeline_tag": "text-generation", "inference": false, "base_model": "mistralai/Mistral-7B-v0.1"}
text-generation
NewstaR/CNC-7b-lora
[ "peft", "mistral", "lora", "instruct", "custom code", "text-generation", "en", "tl", "dataset:NewstaR/clearNconcise", "base_model:mistralai/Mistral-7B-v0.1", "license:cc-by-sa-4.0", "region:us" ]
2023-11-12T17:35:38+00:00
[]
[ "en", "tl" ]
TAGS #peft #mistral #lora #instruct #custom code #text-generation #en #tl #dataset-NewstaR/clearNconcise #base_model-mistralai/Mistral-7B-v0.1 #license-cc-by-sa-4.0 #region-us
# Model Card for CNC-7b ## Model Details - Name: CNC-7b - Version: 1.0 - Release Date: November 13, 2023 ## Intended Use CNC-7b is a lora adapter for Mistral-7b (Instruct) intended to be clear, concise, and helpful in short text conversations. It is designed for conversational agents and assistants. ## Training Data CNC-7b was trained on synthetic conversational data generated by Newstar using ChatGPT. The data was shaped using custom instructions to encourage clear, concise, and helpful responses. ## Evaluation Data CNC-7b was evaluated on a test set of human-human conversations to measure whether responses were clear, concise, and on-topic. ## Ethical Considerations - CNC-7b has limited conversational abilities and is not intended for complex conversations. - The training data was filtered to remove harmful, unethical, or dangerous content. - The model has no notion of facts about the real world. Any factual statements generated should not be assumed to be true. ## Caveats and Recommendations - Only the Peft adapter parameters are released for CNC-7b. The full model is not released. - CNC-7b has limited knowledge outside of conversational abilities. Do not use for anything requiring real world knowledge. - Monitor CNC-7b conversations for harmful content generated, and re-train the model as needed.
[ "# Model Card for CNC-7b", "## Model Details\n\n- Name: CNC-7b\n- Version: 1.0\n- Release Date: November 13, 2023", "## Intended Use\n\nCNC-7b is a lora adapter for Mistral-7b (Instruct) intended to be clear, concise, and helpful in short text conversations. It is designed for conversational agents and assistants.", "## Training Data\n\nCNC-7b was trained on synthetic conversational data generated by Newstar using ChatGPT. The data was shaped using custom instructions to encourage clear, concise, and helpful responses.", "## Evaluation Data\n\nCNC-7b was evaluated on a test set of human-human conversations to measure whether responses were clear, concise, and on-topic.", "## Ethical Considerations\n\n- CNC-7b has limited conversational abilities and is not intended for complex conversations.\n- The training data was filtered to remove harmful, unethical, or dangerous content.\n- The model has no notion of facts about the real world. Any factual statements generated should not be assumed to be true.", "## Caveats and Recommendations\n\n- Only the Peft adapter parameters are released for CNC-7b. The full model is not released.\n- CNC-7b has limited knowledge outside of conversational abilities. Do not use for anything requiring real world knowledge.\n- Monitor CNC-7b conversations for harmful content generated, and re-train the model as needed." ]
[ "TAGS\n#peft #mistral #lora #instruct #custom code #text-generation #en #tl #dataset-NewstaR/clearNconcise #base_model-mistralai/Mistral-7B-v0.1 #license-cc-by-sa-4.0 #region-us \n", "# Model Card for CNC-7b", "## Model Details\n\n- Name: CNC-7b\n- Version: 1.0\n- Release Date: November 13, 2023", "## Intended Use\n\nCNC-7b is a lora adapter for Mistral-7b (Instruct) intended to be clear, concise, and helpful in short text conversations. It is designed for conversational agents and assistants.", "## Training Data\n\nCNC-7b was trained on synthetic conversational data generated by Newstar using ChatGPT. The data was shaped using custom instructions to encourage clear, concise, and helpful responses.", "## Evaluation Data\n\nCNC-7b was evaluated on a test set of human-human conversations to measure whether responses were clear, concise, and on-topic.", "## Ethical Considerations\n\n- CNC-7b has limited conversational abilities and is not intended for complex conversations.\n- The training data was filtered to remove harmful, unethical, or dangerous content.\n- The model has no notion of facts about the real world. Any factual statements generated should not be assumed to be true.", "## Caveats and Recommendations\n\n- Only the Peft adapter parameters are released for CNC-7b. The full model is not released.\n- CNC-7b has limited knowledge outside of conversational abilities. Do not use for anything requiring real world knowledge.\n- Monitor CNC-7b conversations for harmful content generated, and re-train the model as needed." ]
[ 72, 7, 20, 50, 47, 37, 77, 81 ]
[ "passage: TAGS\n#peft #mistral #lora #instruct #custom code #text-generation #en #tl #dataset-NewstaR/clearNconcise #base_model-mistralai/Mistral-7B-v0.1 #license-cc-by-sa-4.0 #region-us \n# Model Card for CNC-7b## Model Details\n\n- Name: CNC-7b\n- Version: 1.0\n- Release Date: November 13, 2023## Intended Use\n\nCNC-7b is a lora adapter for Mistral-7b (Instruct) intended to be clear, concise, and helpful in short text conversations. It is designed for conversational agents and assistants.## Training Data\n\nCNC-7b was trained on synthetic conversational data generated by Newstar using ChatGPT. The data was shaped using custom instructions to encourage clear, concise, and helpful responses.## Evaluation Data\n\nCNC-7b was evaluated on a test set of human-human conversations to measure whether responses were clear, concise, and on-topic.## Ethical Considerations\n\n- CNC-7b has limited conversational abilities and is not intended for complex conversations.\n- The training data was filtered to remove harmful, unethical, or dangerous content.\n- The model has no notion of facts about the real world. Any factual statements generated should not be assumed to be true.## Caveats and Recommendations\n\n- Only the Peft adapter parameters are released for CNC-7b. The full model is not released.\n- CNC-7b has limited knowledge outside of conversational abilities. Do not use for anything requiring real world knowledge.\n- Monitor CNC-7b conversations for harmful content generated, and re-train the model as needed." ]
[ -0.03802156820893288, 0.10403156280517578, -0.0038227131590247154, 0.04984186589717865, 0.06264986097812653, -0.020623620599508286, 0.06629223376512527, 0.0455867201089859, 0.025111690163612366, 0.106367327272892, 0.014057626016438007, 0.010684250853955746, 0.022875262424349785, -0.0014400057261809707, -0.043006204068660736, -0.08097219467163086, 0.0693630501627922, -0.05471200495958328, 0.22237950563430786, 0.13421547412872314, 0.052580226212739944, -0.07802882045507431, 0.06473536789417267, -0.05973023548722267, -0.041819602251052856, -0.07586939632892609, 0.04021608829498291, 0.013141320087015629, 0.09635217487812042, 0.028480008244514465, 0.08590549230575562, 0.039749011397361755, -0.02635367028415203, -0.17196330428123474, 0.029165193438529968, 0.05597642436623573, -0.015044362284243107, 0.0686464011669159, -0.03648778051137924, 0.02787688933312893, 0.11812039464712143, -0.03726952522993088, 0.11365887522697449, 0.09404928982257843, -0.113136425614357, -0.0808330699801445, -0.13575997948646545, 0.039747532457113266, 0.11921224743127823, 0.18696245551109314, -0.06837780773639679, 0.1643185317516327, -0.06240834295749664, 0.03790943697094917, 0.06978248804807663, -0.1095094159245491, -0.020140787586569786, 0.0792788416147232, -0.024046553298830986, 0.1472833752632141, -0.08757323771715164, 0.0144644258543849, 0.0447794571518898, 0.015755172818899155, -0.01992839202284813, -0.040033578872680664, -0.005735814571380615, -0.08966604620218277, -0.09085992723703384, -0.017673103138804436, 0.18540239334106445, 0.05527850612998009, -0.033002566546201706, -0.12938620150089264, -0.04725025221705437, 0.026859810575842857, 0.03368641063570976, -0.05221845954656601, -0.038107842206954956, -0.019377514719963074, -0.040812015533447266, -0.044445816427469254, -0.07839907705783844, -0.02663888782262802, -0.04487420618534088, 0.08959733694791794, 0.0021257020998746157, 0.064745232462883, -0.049455296248197556, 0.04745981842279434, 0.0554627887904644, -0.053907543420791626, -0.06558217108249664, -0.03880999609827995, -0.07855106145143509, -0.05761852115392685, -0.03451605513691902, -0.0001194203578052111, 0.03542834520339966, -0.053854044526815414, -0.1989445984363556, 0.055190976709127426, -0.04377423971891403, 0.020781179890036583, 0.0465150848031044, 0.004029298201203346, -0.02929592691361904, 0.08739997446537018, -0.004993196576833725, 0.03270503878593445, 0.07005053013563156, -0.028775008395314217, -0.03325456753373146, 0.08559268712997437, 0.022918393835425377, 0.14308124780654907, 0.017472025007009506, 0.013203493319451809, -0.070957712829113, -0.023893648758530617, 0.1240275651216507, -0.07159258425235748, -0.003437252016738057, 0.10171538591384888, -0.0120021291077137, 0.019116153940558434, -0.01416848972439766, 0.0014552128268405795, -0.035496536642313004, -0.03047439455986023, -0.06812455505132675, -0.006888668984174728, -0.09384393692016602, -0.06734641641378403, 0.06603172421455383, 0.12780386209487915, -0.09686826169490814, -0.10387495905160904, -0.12188338488340378, -0.06712675094604492, -0.005199830047786236, -0.03947790712118149, 0.029565878212451935, -0.036514054983854294, -0.025137094780802727, -0.041859012097120285, 0.003555722301825881, -0.1442980170249939, -0.01546574104577303, 0.05408855155110359, -0.11298699676990509, 0.01812528632581234, -0.028501493856310844, 0.002428270410746336, -0.08202050626277924, 0.023620469495654106, -0.04391786828637123, 0.09050782769918442, -0.06455440819263458, -0.0076478272676467896, -0.06496524810791016, -0.06245429441332817, -0.0898379310965538, -0.026120692491531372, -0.022583026438951492, 0.18202410638332367, -0.20948582887649536, 0.017107095569372177, 0.12452831119298935, -0.1504373401403427, -0.06914836913347244, 0.12644846737384796, -0.062041692435741425, 0.06672319769859314, 0.14412295818328857, 0.027412807568907738, -0.023740822449326515, -0.04123923182487488, -0.00956509169191122, -0.06532510370016098, 0.01637846790254116, 0.20302283763885498, -0.008412932977080345, -0.10743201524019241, -0.07807799428701401, 0.008733265101909637, -0.05421791970729828, -0.05185054615139961, -0.02600451558828354, -0.07250310480594635, 0.0010370828676968813, -0.006464021746069193, 0.10445946455001831, -0.011076940223574638, -0.0625225082039833, -0.010296272113919258, -0.04887937754392624, -0.04940323159098625, 0.13637293875217438, -0.026180338114500046, 0.02305319346487522, -0.08546620607376099, 0.06443768739700317, -0.02377942204475403, 0.0060731894336640835, -0.14275698363780975, -0.11852199584245682, 0.018976444378495216, -0.06390586495399475, 0.069176085293293, 0.10676196217536926, 0.02041739970445633, 0.011194465681910515, -0.04772305116057396, 0.021371832117438316, -0.03683697432279587, -0.00343278213404119, -0.052610866725444794, -0.1132199689745903, 0.04576170817017555, -0.048314668238162994, 0.20938071608543396, -0.19592775404453278, 0.00840905774384737, 0.12666192650794983, 0.03778871148824692, 0.05362170562148094, -0.0786275714635849, 0.028369378298521042, -0.008628392592072487, 0.01961064524948597, -0.03931549936532974, 0.022376343607902527, -0.006446086801588535, -0.07590295374393463, -0.044642373919487, -0.22127491235733032, -0.07076966762542725, 0.05927015468478203, -0.029996002092957497, -0.11780315637588501, -0.002559739165008068, -0.010387355461716652, 0.04928088188171387, -0.11306583881378174, -0.07066896557807922, 0.17376826703548431, 0.020855652168393135, 0.020397288724780083, -0.08019367605447769, -0.04305603727698326, -0.01761442795395851, 0.008503977209329605, -0.04521527886390686, 0.02729508839547634, -0.0064633856527507305, -0.19042564928531647, 0.037923287600278854, 0.037447717040777206, -0.030156219378113747, 0.13419121503829956, 0.06861620396375656, -0.040412336587905884, -0.06992952525615692, 0.11289943754673004, -0.011434431187808514, 0.12783822417259216, -0.06477561593055725, 0.039108600467443466, 0.03940724581480026, 0.012497222051024437, 0.01994864083826542, -0.07653940469026566, 0.017568105831742287, 0.02719932608306408, -0.04343597963452339, -0.0736035630106926, -0.005602122750133276, -0.0092772226780653, 0.14506113529205322, -0.008552294224500656, 0.012273591011762619, 0.021175773814320564, -0.04658692330121994, -0.18347296118736267, 0.1411295086145401, -0.07452531903982162, -0.2534262537956238, -0.008141692727804184, 0.13334760069847107, -0.07093757390975952, 0.018610749393701553, 0.045722123235464096, -0.14191477000713348, -0.06598244607448578, -0.11469662934541702, -0.04072537645697594, 0.031156815588474274, -0.10437273979187012, 0.03958940878510475, -0.04107307270169258, 0.036985523998737335, -0.08290423452854156, 0.050983525812625885, -0.039998944848775864, -0.07512917369604111, 0.006753464695066214, -0.03724364563822746, 0.05758378654718399, 0.21549402177333832, 0.05633041635155678, -0.0027153834234923124, -0.016458095982670784, 0.22052542865276337, -0.1312652826309204, 0.026016438379883766, 0.0737418457865715, -0.03927282243967056, 0.02033137157559395, 0.10795942693948746, 0.03073461540043354, -0.06193084269762039, 0.051632869988679886, 0.09109555929899216, -0.030363716185092926, -0.19148816168308258, -0.14149212837219238, -0.02967698499560356, -0.09929247200489044, -0.03245680406689644, 0.039969198405742645, 0.15168166160583496, 0.03491263836622238, -0.1434619128704071, -0.12808118760585785, 0.12848803400993347, 0.06549617648124695, 0.05876659229397774, -0.08405740559101105, 0.057787857949733734, -0.002063687890768051, 0.0036741402000188828, 0.062848761677742, -0.05182685703039169, 0.25969237089157104, 0.035463251173496246, 0.2149062305688858, 0.08078425377607346, 0.06716041266918182, 0.013998167589306831, -0.023396912962198257, 0.011410658247768879, 0.020236704498529434, -0.009130051359534264, -0.08543354272842407, -0.01894409768283367, 0.03320637717843056, 0.02579432725906372, 0.04421784356236458, -0.007469321135431528, -0.038790106773376465, 0.14387871325016022, 0.09534572064876556, 0.00834301020950079, -0.103197380900383, -0.11504372209310532, 0.05707114189863205, 0.03145429491996765, -0.04724154248833656, 0.035964928567409515, 0.16179883480072021, -0.0808260589838028, -0.033868853002786636, 0.010495821014046669, 0.06543087959289551, -0.11983159184455872, 0.011769776232540607, -0.05064302310347557, -0.06262605637311935, -0.031890422105789185, 0.06642816215753555, -0.1190216913819313, 0.1322847604751587, 0.02464515157043934, 0.022195693105459213, -0.06826313585042953, -0.08106651902198792, -0.008595848456025124, 0.09382254630327225, 0.1475033164024353, 0.03974652290344238, -0.04193150997161865, -0.05423298105597496, -0.01987602189183235, 0.05455075949430466, 0.0694463849067688, -0.029273521155118942, 0.04374058544635773, 0.009141316637396812, 0.001679789973422885, 0.010843636468052864, -0.008784881792962551, -0.16323541104793549, -0.0924765095114708, 0.1143660694360733, 0.04619675129652023, -0.016828574240207672, -0.04952813312411308, -0.04824083298444748, -0.013809027150273323, 0.13411369919776917, -0.1386929601430893, -0.08677799254655838, -0.10177017748355865, -0.0829082652926445, 0.09023533016443253, -0.057706817984580994, -0.017367947846651077, 0.02169206738471985, 0.139324352145195, -0.01537261065095663, -0.03897303715348244, 0.03164470195770264, -0.06638438999652863, -0.20052969455718994, -0.058894332498311996, 0.1260243058204651, 0.1245570033788681, 0.09543994814157486, 0.028727391734719276, -0.02304597571492195, 0.00660445261746645, -0.1658315658569336, -0.044907767325639725, 0.17734067142009735, -0.06782340258359909, 0.08033498376607895, -0.04454709216952324, -0.11781389266252518, -0.14403800666332245, -0.007536113262176514, 0.08455672860145569, 0.15684247016906738, -0.03686389699578285, 0.08971333503723145, 0.2515536844730377, -0.04467448964715004, -0.1335686296224594, 0.053997352719306946, -0.0636674091219902, -0.058870602399110794, -0.006085450295358896, -0.06971746683120728, -0.03916599228978157, 0.05201815441250801, -0.0032680414151400328, 0.09308438003063202, -0.2617611885070801, -0.08775842934846878, 0.10460618883371353, 0.07179663330316544, 0.20223499834537506, -0.06023496389389038, -0.06001134216785431, -0.018757227808237076, -0.07255282253026962, 0.09732550382614136, -0.2684986889362335, 0.05340968072414398, 0.04003214091062546, 0.11307721585035324, 0.029958397150039673, -0.037728939205408096, 0.1479177325963974, 0.03572209179401398, 0.09659752994775772, -0.06634286046028137, -0.15515348315238953, 0.008982221595942974, -0.04255321994423866, 0.08883429318666458, 0.055765047669410706, 0.09034470468759537, -0.019011691212654114, -0.030201472342014313, -0.14865198731422424, -0.0028244308196008205, -0.08430175483226776, -0.038595668971538544, -0.10897761583328247, 0.09572088718414307, 0.13935741782188416, 0.00803602859377861, -0.023631948977708817, -0.1298517882823944, 0.09514838457107544, 0.1488780975341797, 0.10763775557279587, -0.023449484258890152, -0.049041975289583206, 0.01404334045946598, -0.04981374740600586, 0.12867358326911926, -0.032264210283756256, 0.04632529243826866, 0.08175802230834961, 0.03183490037918091, 0.19119147956371307, 0.006747179199010134, -0.10966551303863525, 0.05537402257323265, -0.039922282099723816, -0.0852966383099556, -0.13653971254825592, -0.01798417791724205, 0.14822505414485931, -0.1255863904953003, 0.010026187635958195, 0.15359432995319366, -0.02000252716243267, 0.005454817321151495, -0.0008953369688242674, 0.07086753100156784, -0.01722974330186844, 0.05994994565844536, -0.031145058572292328, 0.046411652117967606, -0.09663820266723633, 0.013389816507697105, 0.08346544206142426, -0.07885599881410599, -0.0007594229537062347, -0.004470692481845617, -0.12334056198596954, -0.09108808636665344, -0.09041344374418259, 0.03772220388054848, -0.042403072118759155, -0.04134613275527954, 0.007690526079386473, -0.16826841235160828, -0.010661639273166656, 0.13285762071609497, -0.01644762046635151, 0.09666725993156433, 0.0004293847887311131, 0.02081727795302868, -0.04767634719610214, 0.04501274228096008, -0.061267368495464325, -0.040273524820804596, -0.034976303577423096, 0.10504504293203354, 0.027151983231306076, -0.04092874750494957, -0.0457087866961956, -0.07281161844730377, -0.09497465938329697, 0.01928827166557312, -0.1509200930595398, -0.014627886936068535, -0.07788334041833878, 0.008287367410957813, -0.011838548816740513, -0.03790493682026863, -0.0023931963369250298, 0.046186331659555435, -0.03644419461488724, 0.00030402731499634683, 0.02721930854022503, 0.030999379232525826, -0.10778097808361053, 0.009961429983377457, 0.020165283232927322, -0.041169773787260056, 0.09889692813158035, 0.03989643603563309, -0.07210861146450043, 0.04098409786820412, -0.24146828055381775, 0.08320952206850052, -0.02136361226439476, 0.01561197079718113, 0.019458826631307602, -0.17437322437763214, -0.05916757509112358, -0.005110444501042366, -0.017195142805576324, 0.05118522420525551, 0.05356930196285248, -0.012920517474412918, -0.0407777763903141, -0.02105148322880268, -0.0810709148645401, -0.020171843469142914, 0.01386138703674078, 0.011455315165221691, 0.0011229220544919372, 0.1145876944065094, -0.01982390135526657, 0.05709503963589668, -0.14359496533870697, -0.0016589384758844972, 0.05629068613052368, 0.07436736673116684, -0.04736693948507309, 0.02718743495643139, 0.059011735022068024, -0.019059693440794945, 0.08523508906364441, -0.12891612946987152, 0.015762774273753166, 0.027061671018600464, 0.043340716511011124, -0.09002815932035446, -0.03740791976451874, 0.0038927088025957346, -0.026830026879906654, -0.025634609162807465, -0.011293472722172737, -0.04887877032160759, -0.03284626826643944, -0.05695180222392082, 0.19632171094417572, 0.11193054914474487, 0.14787892997264862, -0.004525911994278431, 0.04360487684607506, -0.02790573239326477, 0.06545992940664291, -0.019017115235328674, -0.05259905755519867, -0.02361222542822361, -0.03764934465289116, -0.06133512780070305, 0.21606707572937012, -0.1125158965587616, 0.06622139364480972, -0.03521287068724632, -0.07713860273361206, -0.03814327344298363, -0.16629351675510406, -0.0540803037583828, -0.014777418226003647, -0.0114507544785738, -0.1115262433886528, 0.006403191946446896, 0.03966449573636055, 0.022466739639639854, -0.04904772341251373, 0.20710822939872742, -0.08665129542350769, -0.08169377595186234, 0.043052881956100464, 0.04047861695289612, 0.006911477539688349, 0.016011090949177742, 0.03245268389582634, 0.07915925234556198, 0.014860634692013264, 0.08128023147583008, 0.057106345891952515, 0.0568871833384037, -0.022912973538041115, -0.021759850904345512, -0.07038091868162155, 0.01909947767853737, 0.044470153748989105, 0.03410574048757553, 0.2900153696537018, 0.03688233718276024, 0.013346336781978607, -0.007518107071518898, 0.12723392248153687, -0.07184138149023056, 0.01023270282894373, -0.16757972538471222, 0.12890109419822693, -0.021303977817296982, -0.009447461925446987, 0.015589356422424316, -0.09675715863704681, 0.07642645388841629, 0.15107983350753784, 0.13734611868858337, -0.08323298394680023, 0.025512786582112312, -0.033549949526786804, 0.016995402052998543, -0.02869420312345028, 0.015499240718781948, 0.04951931908726692, 0.31553953886032104, -0.053870633244514465, 0.096666119992733, 0.02184842713177204, 0.03093479573726654, 0.014460001140832901, 0.02170637436211109, -0.022493069991469383, -0.006427729967981577, -0.0794663056731224, 0.09301363676786423, -0.11533170938491821, -0.1543462723493576, 0.029542770236730576, -0.05697194114327431, -0.037876106798648834, 0.03542264923453331, -0.024810390546917915, 0.010559800080955029, 0.052926238626241684, -0.014270743355154991, -0.03249139338731766, 0.19536471366882324, 0.002908329013735056, -0.05602037534117699, -0.1171477735042572, 0.10171076655387878, 0.0353567898273468, 0.16545608639717102, 0.029712585732340813, 0.15532396733760834, 0.0726882666349411, -0.05361095443367958, -0.023081906139850616, 0.1767149269580841, 0.040142036974430084, -0.08220909535884857, -0.008716175332665443, 0.17760224640369415, 0.006183035671710968, 0.2130105197429657, 0.1517312377691269, -0.07053712755441666, 0.05140317976474762, 0.07388519495725632, -0.030515113845467567, -0.10419559478759766, 0.0952015221118927, -0.06324345618486404, 0.1320500373840332, 0.14891298115253448, -0.034789469093084335, -0.003740194020792842, 0.030690031126141548, 0.022031735628843307, -0.03828395903110504, 0.04415587708353996, -0.012026102282106876, -0.17767883837223053, 0.09486063569784164, -0.07948868721723557, 0.04014154523611069, -0.22450213134288788, -0.04770440608263016, 0.023670300841331482, -0.05760294198989868, 0.041938867419958115, 0.06015969440340996, -0.0330284908413887, 0.0030645125079900026, -0.014419415034353733, -0.06234428286552429, 0.046666353940963745, 0.11122210323810577, -0.06412259489297867, -0.026722420006990433 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # FakeNews-bert-large-cased-stable This model is a fine-tuned version of [bert-large-cased](https://huggingface.co/bert-large-cased) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1020 - Accuracy: 0.9827 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 8 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-06 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3549 | 1.0 | 802 | 0.3255 | 0.9453 | | 0.1063 | 2.0 | 1605 | 0.1305 | 0.9771 | | 0.0412 | 3.0 | 2407 | 0.1020 | 0.9827 | | 0.0096 | 4.0 | 3210 | 0.1242 | 0.9822 | | 0.0001 | 5.0 | 4010 | 0.1315 | 0.9827 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "metrics": ["accuracy"], "base_model": "bert-large-cased", "model-index": [{"name": "FakeNews-bert-large-cased-stable", "results": []}]}
text-classification
Denyol/FakeNews-bert-large-cased-stable
[ "transformers", "tensorboard", "safetensors", "bert", "text-classification", "generated_from_trainer", "base_model:bert-large-cased", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:39:25+00:00
[]
[]
TAGS #transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-bert-large-cased #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
FakeNews-bert-large-cased-stable ================================ This model is a fine-tuned version of bert-large-cased on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 0.1020 * Accuracy: 0.9827 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 3e-05 * train\_batch\_size: 4 * eval\_batch\_size: 4 * seed: 42 * gradient\_accumulation\_steps: 2 * total\_train\_batch\_size: 8 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-06 * lr\_scheduler\_type: linear * lr\_scheduler\_warmup\_ratio: 0.1 * num\_epochs: 5 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu118 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 3e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-06\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-bert-large-cased #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 3e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-06\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 68, 144, 4, 33 ]
[ "passage: TAGS\n#transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-bert-large-cased #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 3e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-06\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.13622143864631653, 0.10832702368497849, -0.0013251675991341472, 0.08092275261878967, 0.14709462225437164, 0.016844678670167923, 0.12600374221801758, 0.12634369730949402, -0.12376739829778671, 0.0713043212890625, 0.10893415659666061, 0.07735035568475723, 0.029939601197838783, 0.13497595489025116, -0.04044714942574501, -0.2900691628456116, -0.001650635851547122, 0.020684370771050453, -0.17150850594043732, 0.12575723230838776, 0.1063055545091629, -0.1274319738149643, 0.06749046593904495, 0.025349289178848267, -0.1730470359325409, -0.0017920657992362976, -0.006476173643022776, -0.07661715894937515, 0.12164643406867981, 0.03035437874495983, 0.12452451884746552, 0.034742556512355804, 0.11298993974924088, -0.17529094219207764, 0.004448594059795141, 0.07278916984796524, 0.019246846437454224, 0.10356540232896805, 0.08204023540019989, 0.0031113331206142902, 0.12258458882570267, -0.07178757339715958, 0.08554994314908981, 0.04114539176225662, -0.10736438632011414, -0.2880430519580841, -0.09673027694225311, 0.09373269230127335, 0.1218402162194252, 0.06844129413366318, -0.01114147249609232, 0.10388034582138062, -0.07876619696617126, 0.08086510002613068, 0.2507927417755127, -0.2935090661048889, -0.0931299477815628, 0.018103988841176033, 0.04399682581424713, 0.021945036947727203, -0.13124780356884003, -0.017117343842983246, 0.06978506594896317, 0.027606649324297905, 0.11942535638809204, 0.010641627945005894, -0.014771762304008007, 0.007933279499411583, -0.14682786166667938, -0.0395718552172184, 0.12258773297071457, 0.05893966928124428, -0.051173869520425797, -0.06995599716901779, -0.04114453122019768, -0.20383986830711365, -0.035144008696079254, -0.0037640163209289312, 0.037811145186424255, -0.07419242709875107, -0.12378986179828644, 0.022609135136008263, -0.09867586940526962, -0.10434820502996445, 0.004425793420523405, 0.18922464549541473, 0.05451629310846329, 0.0006437544361688197, -0.013588686473667622, 0.1378295123577118, 0.047055769711732864, -0.16705501079559326, 0.006378313060849905, 0.024268943816423416, -0.04478941857814789, -0.03208795562386513, -0.04150055721402168, -0.014019698835909367, 0.005072345025837421, 0.1622665822505951, -0.08360908180475235, 0.029351040720939636, 0.034447137266397476, 0.02295653708279133, -0.09609776735305786, 0.19199679791927338, -0.07351403683423996, -0.03355436027050018, -0.014660509303212166, 0.11569564044475555, 0.022524245083332062, -0.004078416619449854, -0.08863291144371033, 0.026512127369642258, 0.11027251183986664, 0.0516665056347847, -0.034609105437994, 0.0387955941259861, -0.033957868814468384, -0.023425566032528877, 0.06919112056493759, -0.08566223829984665, 0.030695384368300438, 0.01673894189298153, -0.09852689504623413, -0.04126797243952751, 0.013241954147815704, 0.008713293820619583, 0.017641901969909668, 0.17263923585414886, -0.09797462075948715, -0.009120878763496876, -0.10232451558113098, -0.10442764312028885, 0.024834802374243736, -0.04700985178351402, -0.005156930070370436, -0.0878976434469223, -0.13275372982025146, -0.02692079171538353, 0.03437194973230362, -0.03972695395350456, -0.07420143485069275, -0.02921159192919731, -0.1075720563530922, 0.033370744436979294, -0.01576339639723301, 0.16256090998649597, -0.049911823123693466, 0.11879025399684906, 0.06124080717563629, 0.051361825317144394, 0.016816450282931328, 0.040126487612724304, -0.08194573223590851, 0.04149767383933067, -0.1862155795097351, 0.021609121933579445, -0.0730518326163292, 0.06622998416423798, -0.10082405805587769, -0.13041731715202332, 0.025390537455677986, -0.007811336312443018, 0.08456403762102127, 0.118616983294487, -0.14142368733882904, -0.09487524628639221, 0.1572830080986023, -0.09944567829370499, -0.14479894936084747, 0.11294008791446686, -0.018175113946199417, -0.004405748099088669, 0.035381123423576355, 0.10372784733772278, 0.08293978869915009, -0.09428326785564423, -0.03323529288172722, -0.04115309566259384, 0.1242377832531929, -0.013459201902151108, 0.10185486823320389, -0.013112315908074379, 0.025816544890403748, 0.017046596854925156, -0.07630515843629837, 0.03133515641093254, -0.11407602578401566, -0.0902017280459404, -0.03248486667871475, -0.10183262079954147, 0.06517069041728973, 0.06898685544729233, 0.06984955817461014, -0.1047503650188446, -0.12643209099769592, 0.06434234231710434, 0.11558277159929276, -0.06318636238574982, 0.014834328554570675, -0.06586302071809769, 0.0845935195684433, -0.05327822268009186, -0.016499018296599388, -0.1779966801404953, -0.07844062149524689, 0.02466726303100586, -0.03130451589822769, -0.0094557274132967, -0.03031281754374504, 0.07671185582876205, 0.09063227474689484, -0.08724180608987808, -0.06364285200834274, -0.08882556110620499, -0.012843932956457138, -0.09964805841445923, -0.2378973364830017, -0.08962913602590561, -0.01993866264820099, 0.14463675022125244, -0.22988659143447876, 0.042621731758117676, 0.014453334733843803, 0.1485973745584488, 0.043112896382808685, -0.026201028376817703, -0.034500621259212494, 0.07561513036489487, -0.03709983453154564, -0.06377214193344116, 0.029744189232587814, 0.0005477563827298582, -0.09903532266616821, -0.023302920162677765, -0.10129125416278839, 0.1805126965045929, 0.11612452566623688, -0.006675556767731905, -0.10508106648921967, -0.00610288605093956, -0.09587888419628143, -0.04293091222643852, -0.05114264413714409, -0.005807696841657162, 0.10352043062448502, 0.023904619738459587, 0.14596866071224213, -0.08359111845493317, -0.07027360796928406, 0.038340043276548386, -0.006561316084116697, 0.004873588215559721, 0.11025363951921463, 0.08695666491985321, -0.05818710848689079, 0.13888292014598846, 0.1336652934551239, -0.0964466854929924, 0.13081911206245422, -0.07442855089902878, -0.0915239080786705, -0.021148575469851494, 0.0032659913413226604, 0.032422203570604324, 0.15303705632686615, -0.08109591901302338, -0.012830604799091816, 0.016485227271914482, 0.00688563846051693, 0.01449383795261383, -0.21434980630874634, -0.01558882836252451, 0.026781722903251648, -0.04718022793531418, -0.017634056508541107, -0.015654556453227997, 0.006518620997667313, 0.1095452532172203, 0.007573322858661413, -0.04840463027358055, 0.00910534244030714, 0.005646300967782736, -0.06345925480127335, 0.2190183401107788, -0.06389988958835602, -0.12549574673175812, -0.1564755141735077, 0.010008235462009907, -0.06253962963819504, -0.005776070989668369, 0.03982238098978996, -0.10213128477334976, -0.027341356500983238, -0.05721236765384674, 0.03894323110580444, -0.0036053152289241552, 0.049082595854997635, -0.0031703407876193523, 0.028830109164118767, 0.09132377803325653, -0.11567220836877823, 0.027923550456762314, -0.03408386930823326, -0.07500817626714706, 0.017948981374502182, 0.05878116562962532, 0.11228915303945541, 0.15033702552318573, -0.007348120678216219, 0.007361868862062693, -0.03678495064377785, 0.16169673204421997, -0.09243063628673553, -0.041893914341926575, 0.12433522194623947, -0.000008680133760208264, 0.04445362091064453, 0.1280418187379837, 0.06930028647184372, -0.08067496865987778, 0.018678581342101097, 0.0548323318362236, -0.015182758681476116, -0.23563191294670105, -0.022715723142027855, -0.028077159076929092, 0.00814307201653719, 0.11536793410778046, 0.04062389209866524, 0.018737051635980606, 0.05522122234106064, -0.028313497081398964, 0.03981073200702667, -0.04107586666941643, 0.08299916237592697, 0.05878441780805588, 0.05242575705051422, 0.1396494209766388, -0.036067117005586624, -0.05023907497525215, 0.02952655404806137, -0.03176138177514076, 0.21010121703147888, -0.04456307739019394, 0.11510186642408371, 0.05008431524038315, 0.16546675562858582, 0.0037329187616705894, 0.08762861788272858, 0.01150160189718008, -0.040953174233436584, 0.021119682118296623, -0.061440832912921906, -0.012056591920554638, 0.03295736759901047, -0.00786107499152422, 0.09269110858440399, -0.15222470462322235, -0.006250220816582441, 0.05308784916996956, 0.3239600956439972, 0.06473486125469208, -0.326954185962677, -0.1224166750907898, 0.0004568391013890505, -0.051850296556949615, -0.031177658587694168, 0.015558628365397453, 0.12868373095989227, -0.09882961958646774, 0.06174202635884285, -0.08303960412740707, 0.0880187526345253, -0.02396317571401596, 0.01052600983530283, 0.11029776185750961, 0.09095355123281479, -0.011601640842854977, 0.0634184405207634, -0.2471865564584732, 0.2911796271800995, 0.0026582307182252407, 0.08250569552183151, -0.039907701313495636, 0.028600182384252548, 0.03656239062547684, 0.055153004825115204, 0.052815984934568405, -0.028472453355789185, -0.04184645786881447, -0.1961124837398529, -0.0861872211098671, 0.021835671737790108, 0.1237502247095108, -0.10241687297821045, 0.13196896016597748, -0.026391692459583282, -0.025928908959031105, 0.055969495326280594, -0.05576491728425026, -0.07885155826807022, -0.07316835969686508, 0.012395487166941166, -0.01974816806614399, 0.03650772571563721, -0.1212816834449768, -0.13995489478111267, -0.07438519597053528, 0.1722123771905899, -0.08183415234088898, -0.04207262396812439, -0.12979243695735931, 0.11083763092756271, 0.14272701740264893, -0.07190321385860443, 0.05099927634000778, 0.0020685791969299316, 0.13558273017406464, 0.01950996369123459, -0.025379985570907593, 0.09656868875026703, -0.08633742481470108, -0.26628419756889343, -0.0593857616186142, 0.1530088186264038, 0.01634146459400654, 0.05424105376005173, -0.026005418971180916, 0.030700549483299255, -0.0027688138652592897, -0.08882696181535721, 0.023158371448516846, -0.014037983492016792, 0.0663943886756897, 0.022318605333566666, -0.04757096245884895, 0.007520480081439018, -0.04488430917263031, -0.04406561702489853, 0.10883232951164246, 0.29779210686683655, -0.10226108878850937, -0.010091641917824745, 0.053696852177381516, -0.035114943981170654, -0.16765202581882477, 0.04349992424249649, 0.11732368916273117, 0.024607498198747635, 0.01316091325134039, -0.16742946207523346, 0.09602686017751694, 0.10011701285839081, -0.050003841519355774, 0.11981936544179916, -0.2792549729347229, -0.14494892954826355, 0.09391557425260544, 0.12730899453163147, -0.004763536620885134, -0.18389898538589478, -0.05868062004446983, -0.014314436353743076, -0.10933738201856613, 0.10934127122163773, -0.06772268563508987, 0.10194217413663864, -0.02205062098801136, 0.04168815538287163, 0.017811426892876625, -0.05982322245836258, 0.14311352372169495, -0.030632860958576202, 0.08140236884355545, -0.0154231833294034, -0.006616709753870964, 0.08254428952932358, -0.06030936911702156, 0.007752405479550362, -0.07872440665960312, 0.03754502907395363, -0.08305983990430832, -0.016081085428595543, -0.10215353965759277, 0.04520108550786972, -0.07107727974653244, -0.0565691813826561, -0.023667030036449432, 0.046521395444869995, 0.007396528497338295, -0.02765454165637493, 0.1584896594285965, 0.012524520978331566, 0.2065180540084839, 0.11267764866352081, 0.06863902509212494, -0.006200431380420923, -0.06651940196752548, -0.001193907344713807, -0.029260670766234398, 0.07901531457901001, -0.15918207168579102, 0.017665276303887367, 0.13021691143512726, 0.06371310353279114, 0.11851166933774948, 0.07209724187850952, -0.0559963658452034, 0.0031425640918314457, 0.09824128448963165, -0.13293029367923737, -0.07714305073022842, -0.020624810829758644, 0.008331408724188805, -0.17765964567661285, 0.07958657294511795, 0.10012942552566528, -0.08473151177167892, -0.019496332854032516, 0.009043285623192787, 0.006187996361404657, -0.029812270775437355, 0.22709552943706512, 0.07209781557321548, 0.09319458901882172, -0.09589840471744537, 0.07909374684095383, 0.04848664253950119, -0.12714697420597076, -0.0057270280085504055, 0.07210776954889297, -0.05959481745958328, -0.011177442967891693, 0.03887719660997391, 0.09923412650823593, -0.040780775249004364, -0.05922909453511238, -0.16861492395401, -0.132437065243721, 0.06328950077295303, 0.14962127804756165, 0.07841522246599197, 0.037526942789554596, -0.012706613168120384, 0.05603185296058655, -0.13272571563720703, 0.12754039466381073, 0.06956496834754944, 0.10564211755990982, -0.16139531135559082, 0.1717895120382309, 0.007568419445306063, 0.02337685041129589, -0.006661569699645042, 0.040083881467580795, -0.11321809142827988, -0.008646136149764061, -0.12284479290246964, -0.04711460694670677, -0.031632281839847565, -0.006955963093787432, -0.00006544453208334744, -0.04998844116926193, -0.06526198983192444, 0.02146482840180397, -0.11386082321405411, -0.04151822626590729, 0.013204444199800491, 0.041833169758319855, -0.13971897959709167, -0.008595479652285576, 0.036977410316467285, -0.11134974658489227, 0.08682017773389816, 0.03690828010439873, 0.05472312867641449, 0.05537155270576477, -0.07764991372823715, 0.01702473871409893, 0.041473373770713806, -0.015690119937062263, 0.054760366678237915, -0.1169360801577568, -0.004813831765204668, -0.03675725683569908, 0.04722094163298607, 0.010435215197503567, 0.04041797295212746, -0.1515752077102661, -0.009298978373408318, -0.0004097149649169296, -0.050683293491601944, -0.05041225627064705, 0.034107595682144165, 0.06457269936800003, 0.018942156806588173, 0.18059125542640686, -0.08164027333259583, 0.026792943477630615, -0.2282077521085739, -0.0008044239366427064, -0.031946245580911636, -0.09248047322034836, -0.11521407216787338, -0.01211195532232523, 0.07410555332899094, -0.061820026487112045, 0.08912555128335953, -0.026793593540787697, 0.0996566116809845, 0.04886603727936745, -0.05120455101132393, 0.03481615334749222, 0.05545368045568466, 0.19509300589561462, 0.03215046972036362, -0.018842170014977455, 0.04571685567498207, 0.028284022584557533, 0.06851134449243546, 0.07262628525495529, 0.19655565917491913, 0.114931620657444, -0.02005389891564846, 0.09376164525747299, 0.06409456580877304, -0.09143050014972687, -0.15961022675037384, 0.04901648312807083, -0.03740547224879265, 0.11585725843906403, -0.020720526576042175, 0.17400692403316498, 0.11596637219190598, -0.1873561590909958, 0.021009476855397224, -0.04602295160293579, -0.07071857899427414, -0.10175451636314392, 0.009801485575735569, -0.07164841890335083, -0.17144490778446198, 0.016703715547919273, -0.12304110825061798, 0.016418462619185448, 0.07439973950386047, 0.015797169879078865, 0.008854378014802933, 0.1664404273033142, 0.03961867094039917, 0.03815040737390518, 0.08622182905673981, 0.03336436673998833, -0.01024820376187563, -0.043518148362636566, -0.0803685411810875, -0.0021498962305486202, -0.02402854897081852, 0.032072436064481735, -0.07317466288805008, -0.10964885354042053, 0.06166083738207817, 0.03658343106508255, -0.10214179754257202, 0.033110249787569046, 0.0010155236814171076, 0.08808005601167679, 0.05518699064850807, 0.003694891696795821, 0.027381950989365578, -0.027034547179937363, 0.261735737323761, -0.11677906662225723, -0.05585198476910591, -0.14034834504127502, 0.282959908246994, 0.01662212610244751, -0.02896217256784439, 0.041586775332689285, -0.0958094447851181, -0.024738768115639687, 0.15782184898853302, 0.16380083560943604, -0.023257168009877205, -0.017191143706440926, 0.01619056984782219, -0.02301553636789322, -0.05803917348384857, 0.08468751609325409, 0.10753961652517319, 0.08848544210195541, -0.07952596992254257, -0.04026661813259125, -0.03239137679338455, -0.04136683791875839, -0.008467836305499077, 0.08544673025608063, 0.013416719622910023, -0.011037240736186504, -0.045457303524017334, 0.07718843966722488, -0.024785170331597328, -0.1462007313966751, 0.07919901609420776, -0.2054501175880432, -0.19184741377830505, -0.02578708715736866, 0.08659825474023819, 0.003068003337830305, 0.07240613549947739, 0.004872404504567385, -0.029742328450083733, 0.08990361541509628, -0.014855730347335339, -0.041491810232400894, -0.13624751567840576, 0.10137315839529037, -0.06955939531326294, 0.22950327396392822, -0.047311652451753616, 0.039517443627119064, 0.1270684003829956, 0.03293723985552788, -0.10247359424829483, 0.012776868417859077, 0.07805858552455902, -0.14256995916366577, 0.030257798731327057, 0.16245199739933014, -0.03401532769203186, 0.09799139946699142, 0.02500932291150093, -0.14897088706493378, 0.007130754645913839, -0.0701923593878746, -0.05869687348604202, -0.07710591703653336, -0.0023668049834668636, -0.03770775720477104, 0.12471789121627808, 0.24095675349235535, -0.05641204118728638, -0.0027487431652843952, -0.05992086976766586, 0.033584024757146835, 0.06673245131969452, 0.08382243663072586, -0.017831720411777496, -0.2847280204296112, 0.030391937121748924, 0.049179527908563614, -0.009244152344763279, -0.2812321186065674, -0.07817690074443817, 0.05487522482872009, -0.06031074747443199, -0.08202002942562103, 0.08711827546358109, 0.07535568624734879, 0.05131061002612114, -0.049969758838415146, -0.07019377499818802, -0.08019769191741943, 0.16727598011493683, -0.18425734341144562, -0.08366869390010834 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gpt-neo-125M-couples_therapist_full_renamed This model is a fine-tuned version of [EleutherAI/gpt-neo-125M](https://huggingface.co/EleutherAI/gpt-neo-125M) on the None dataset. It achieves the following results on the evaluation set: - Loss: 3.0235 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | |:-------------:|:-----:|:----:|:---------------:| | No log | 1.0 | 75 | 3.1626 | | No log | 2.0 | 150 | 3.0488 | | No log | 3.0 | 225 | 3.0235 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "mit", "tags": ["generated_from_trainer"], "base_model": "EleutherAI/gpt-neo-125M", "model-index": [{"name": "gpt-neo-125M-couples_therapist_full_renamed", "results": []}]}
text-generation
ColleenMacklin/gpt-neo-125M-couples_therapist_full_renamed
[ "transformers", "tensorboard", "safetensors", "gpt_neo", "text-generation", "generated_from_trainer", "base_model:EleutherAI/gpt-neo-125M", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:41:44+00:00
[]
[]
TAGS #transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us
gpt-neo-125M-couples\_therapist\_full\_renamed ============================================== This model is a fine-tuned version of EleutherAI/gpt-neo-125M on the None dataset. It achieves the following results on the evaluation set: * Loss: 3.0235 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 2e-05 * train\_batch\_size: 8 * eval\_batch\_size: 8 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 3.0 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu118 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 73, 98, 4, 33 ]
[ "passage: TAGS\n#transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.09340846538543701, 0.06741952151060104, -0.0018559551099315286, 0.10453764349222183, 0.13651709258556366, 0.016949983313679695, 0.1594475656747818, 0.11260412633419037, -0.07910064607858658, 0.060557398945093155, 0.13735359907150269, 0.11269506812095642, 0.018921131268143654, 0.13898272812366486, -0.05404054373502731, -0.22676658630371094, 0.015852589160203934, 0.039878424257040024, -0.05083145573735237, 0.10743747651576996, 0.09119011461734772, -0.13102523982524872, 0.10498366504907608, -0.004528381861746311, -0.18653756380081177, 0.01274097990244627, 0.02288978174328804, -0.053470537066459656, 0.13368836045265198, 0.050591450184583664, 0.12817592918872833, 0.026488762348890305, 0.07816201448440552, -0.16509567201137543, 0.013843819499015808, 0.059130266308784485, -0.0034194367472082376, 0.08001767098903656, 0.046467285603284836, 0.0032968269661068916, 0.09952326864004135, -0.07144073396921158, 0.05813327431678772, 0.016279153525829315, -0.13425257802009583, -0.20619845390319824, -0.08709648996591568, 0.026502031832933426, 0.09018822759389877, 0.08982151001691818, -0.016340691596269608, 0.1508723944425583, -0.028957754373550415, 0.09080027788877487, 0.21699802577495575, -0.3209638297557831, -0.07030823826789856, 0.05039776861667633, 0.0564708448946476, 0.10388913750648499, -0.09976650774478912, 0.00947798416018486, 0.07137077301740646, 0.02705993689596653, 0.12904521822929382, -0.026136990636587143, -0.022079378366470337, 0.014819343574345112, -0.15040358901023865, -0.007206631824374199, 0.15805460512638092, 0.042646411806344986, -0.04538669437170029, -0.04375893250107765, -0.07112518697977066, -0.14443308115005493, -0.03805292770266533, -0.028159506618976593, 0.03866248205304146, -0.0255644079297781, -0.0781865045428276, -0.051967374980449677, -0.10761233419179916, -0.08368779718875885, -0.06781763583421707, 0.16275060176849365, 0.03216961771249771, -0.0000031286228932003723, -0.010622152127325535, 0.1012996956706047, -0.04781263321638107, -0.12811994552612305, 0.011781255714595318, 0.023629438132047653, 0.01663234271109104, -0.05211063474416733, -0.05462479218840599, -0.09996023774147034, 0.011203157715499401, 0.13395370543003082, -0.04396717995405197, 0.0391448549926281, 0.004313960671424866, 0.04769232124090195, -0.10892993956804276, 0.16989482939243317, -0.04974313825368881, -0.0473102405667305, 0.01519484631717205, 0.08105960488319397, 0.05126696452498436, -0.020533280447125435, -0.13336916267871857, 0.024664228782057762, 0.10118542611598969, 0.00492361793294549, -0.05741296336054802, 0.07821020483970642, -0.040083903819322586, -0.002169807907193899, 0.031586240977048874, -0.08138757199048996, 0.02167546935379505, -0.018036292865872383, -0.06304176151752472, -0.05257497355341911, 0.024433020502328873, 0.025598326697945595, 0.020732631906867027, 0.09700154513120651, -0.10882692784070969, 0.0013187802396714687, -0.08617467433214188, -0.13152378797531128, 0.00845503993332386, -0.07547764480113983, 0.020208533853292465, -0.12361025810241699, -0.18587906658649445, -0.004996732342988253, 0.045420993119478226, -0.03449633717536926, -0.01628039963543415, -0.05108046159148216, -0.07622867822647095, 0.02143900655210018, -0.017577799037098885, 0.07106568664312363, -0.05982520058751106, 0.0950159803032875, 0.06359951943159103, 0.06471728533506393, -0.06557336449623108, 0.03034041076898575, -0.09883438050746918, 0.031212320551276207, -0.1905936300754547, 0.011738793924450874, -0.05181790888309479, 0.06645064800977707, -0.07647807896137238, -0.0707361176609993, -0.011806601658463478, 0.018269404768943787, 0.07554299384355545, 0.08901361376047134, -0.1610804945230484, -0.07812689989805222, 0.16975097358226776, -0.09425216168165207, -0.15120205283164978, 0.14144089818000793, -0.04655912518501282, 0.038469959050416946, 0.07669690996408463, 0.18893781304359436, 0.05750339478254318, -0.09655655175447464, -0.021409479901194572, -0.022391660138964653, 0.05393349379301071, -0.04626837372779846, 0.07960451394319534, -0.001834406633861363, 0.014187231659889221, 0.005642542615532875, -0.051191285252571106, 0.05479926988482475, -0.08422450721263885, -0.0746871829032898, -0.03635208681225777, -0.10902412235736847, 0.06326941400766373, 0.03998046740889549, 0.0690087303519249, -0.12317600101232529, -0.10530778765678406, 0.06538917124271393, 0.07192122936248779, -0.07127262651920319, 0.01969650574028492, -0.08594193309545517, 0.09296225011348724, -0.09405974298715591, -0.020548345521092415, -0.12938198447227478, -0.07954070717096329, 0.013998794369399548, 0.010140396654605865, 0.032743342220783234, 0.01717161200940609, 0.08348057419061661, 0.09627056866884232, -0.07146988064050674, -0.031029915437102318, -0.01747305877506733, 0.007084723096340895, -0.12753504514694214, -0.18763010203838348, -0.008464720100164413, -0.03160985931754112, 0.12455727159976959, -0.23301421105861664, 0.05018661543726921, 0.007964402437210083, 0.09732412546873093, 0.041550081223249435, -0.014489129185676575, -0.05069836229085922, 0.059105437248945236, -0.049264758825302124, -0.06924644857645035, 0.04680664837360382, 0.007367973681539297, -0.1068475991487503, -0.021459177136421204, -0.19656150043010712, 0.21375223994255066, 0.1451265513896942, -0.07221601903438568, -0.07543722540140152, 0.010497531853616238, -0.03499339520931244, -0.02894485369324684, -0.0280768945813179, -0.020031947642564774, 0.1394992172718048, -0.017328621819615364, 0.15222439169883728, -0.0817989781498909, -0.03855075314640999, 0.02984793484210968, -0.0443076491355896, -0.003082706592977047, 0.09590935707092285, 0.08426044881343842, -0.10046994686126709, 0.1541910022497177, 0.16831561923027039, -0.09468808770179749, 0.15616044402122498, -0.01868322491645813, -0.06057994067668915, -0.02814668044447899, 0.004406376276165247, 0.021690277382731438, 0.12330693751573563, -0.10552427172660828, -0.009125680662691593, 0.004280764609575272, 0.016137246042490005, 0.025889787822961807, -0.2154330462217331, -0.03348574787378311, 0.038347143679857254, -0.044342365115880966, 0.03287487104535103, -0.01340689230710268, -0.0217022392898798, 0.10206874459981918, 0.006488382816314697, -0.069630928337574, 0.03564329445362091, 0.01146662887185812, -0.07677417993545532, 0.2022230625152588, -0.08244609832763672, -0.13729771971702576, -0.1442173272371292, -0.06720718741416931, -0.05637754127383232, 0.041667405515909195, 0.05736604705452919, -0.07967952638864517, -0.041927460581064224, -0.10909425467252731, 0.038776129484176636, 0.010577295906841755, 0.03329331427812576, 0.025357119739055634, -0.023368796333670616, 0.05984845757484436, -0.1046847552061081, -0.010047974064946175, -0.03786487504839897, -0.06554718315601349, 0.03044622763991356, 0.03158445283770561, 0.1250663548707962, 0.1464512050151825, -0.022802455350756645, -0.0020338804461061954, -0.0391121506690979, 0.22151397168636322, -0.0803830474615097, -0.008974621072411537, 0.1356273591518402, -0.012967083603143692, 0.04924776405096054, 0.12537819147109985, 0.05348687246441841, -0.10191402584314346, 0.02714756317436695, 0.03907126560807228, -0.03811272978782654, -0.1986304074525833, -0.03459606319665909, -0.03775512054562569, 0.015825433656573296, 0.0799705982208252, 0.0410473607480526, 0.047751445323228836, 0.0739952102303505, 0.013063747435808182, 0.08723106235265732, -0.026063328608870506, 0.08231503516435623, 0.1096150204539299, 0.04007735475897789, 0.14366760849952698, -0.046384744346141815, -0.06784116476774216, 0.041722044348716736, 0.006694893818348646, 0.20351086556911469, 0.02464771270751953, 0.13759581744670868, 0.04618169367313385, 0.13352333009243011, 0.005352546460926533, 0.04910392686724663, -0.008897489868104458, -0.05774754285812378, -0.014617325738072395, -0.05053771659731865, -0.00980229303240776, 0.044764790683984756, -0.07679665088653564, 0.051044438034296036, -0.10045947879552841, 0.017545759677886963, 0.05579815432429314, 0.18073302507400513, 0.05066103860735893, -0.36032554507255554, -0.09265457093715668, 0.0329725481569767, -0.01568187028169632, -0.028392843902111053, 0.020241500809788704, 0.1315590888261795, -0.043362684547901154, 0.04141960293054581, -0.08013132214546204, 0.06857790052890778, -0.031217947602272034, 0.04384729266166687, 0.056724801659584045, 0.10378989577293396, -0.026122156530618668, 0.05543261766433716, -0.2731448709964752, 0.27295929193496704, 0.022436536848545074, 0.08247867226600647, -0.036970868706703186, 0.004425073508173227, 0.021336624398827553, 0.08601050823926926, 0.07176819443702698, -0.02572859637439251, -0.07365477085113525, -0.1883000135421753, -0.05177466943860054, 0.02872440218925476, 0.11380628496408463, -0.04572289437055588, 0.12018269300460815, -0.03356243669986725, 0.006751517299562693, 0.08512526005506516, -0.0024880089331418276, -0.08021244406700134, -0.10090342909097672, 0.006530867423862219, 0.03743591532111168, -0.03016456589102745, -0.08261390030384064, -0.101606085896492, -0.13599997758865356, 0.164457306265831, -0.046492308378219604, -0.030099673196673393, -0.10422491282224655, 0.0587075836956501, 0.059897229075431824, -0.08575237542390823, 0.043306101113557816, 0.014249416068196297, 0.09150288999080658, 0.011601768434047699, -0.052446238696575165, 0.13480351865291595, -0.07492634654045105, -0.1688240021467209, -0.0641443207859993, 0.10439112782478333, 0.014776676893234253, 0.045920826494693756, -0.007243471685796976, 0.019626092165708542, -0.01902625896036625, -0.07756128162145615, 0.037132177501916885, -0.014819519594311714, 0.05136004835367203, -0.0020726819057017565, -0.032307419925928116, 0.012638232670724392, -0.052548859268426895, -0.04532736539840698, 0.1508426070213318, 0.2917105555534363, -0.07425422966480255, -0.012764573097229004, 0.057932645082473755, -0.06557603925466537, -0.1930941641330719, 0.05507703498005867, 0.009159471839666367, 0.0019605448469519615, 0.04071231186389923, -0.14343766868114471, 0.09059084206819534, 0.1084979996085167, -0.025578582659363747, 0.12599404156208038, -0.2924797534942627, -0.13976943492889404, 0.11585987359285355, 0.15357418358325958, 0.1322532296180725, -0.1788998246192932, -0.04285441339015961, -0.0345904566347599, -0.11509257555007935, 0.09881594777107239, -0.12110810726881027, 0.11153460294008255, -0.0091604795306921, 0.06458908319473267, 0.0048758890479803085, -0.0603545717895031, 0.1321655660867691, -0.02762565016746521, 0.10463788360357285, -0.07317300885915756, -0.011749879457056522, 0.06008821353316307, -0.04831390082836151, 0.02142236940562725, -0.10984993726015091, 0.02168295904994011, -0.048149775713682175, -0.03848946467041969, -0.04759328439831734, 0.03930085524916649, -0.023697052150964737, -0.0774875059723854, -0.05508623644709587, 0.027984485030174255, 0.015051442198455334, -0.017106065526604652, 0.15100157260894775, 0.007581754121929407, 0.17421592772006989, 0.09508845955133438, 0.07367496192455292, -0.08047401905059814, -0.011921401135623455, 0.006499194540083408, -0.034518539905548096, 0.06247299909591675, -0.14004090428352356, 0.02548781782388687, 0.125431627035141, -0.0021535123232752085, 0.1403299868106842, 0.0781966969370842, -0.04844016209244728, 0.031575269997119904, 0.07599583268165588, -0.1684030294418335, -0.12675099074840546, -0.021355295553803444, -0.02933439239859581, -0.10770855098962784, 0.07892489433288574, 0.13741426169872284, -0.07782255113124847, 0.007445192895829678, -0.010431156493723392, 0.003839283250272274, -0.03459110110998154, 0.18192201852798462, 0.0639912337064743, 0.04010182246565819, -0.07108857482671738, 0.0699920728802681, 0.039367448538541794, -0.07404622435569763, 0.021249467507004738, 0.052507005631923676, -0.07281890511512756, -0.04019024595618248, 0.029114702716469765, 0.1925891637802124, -0.07315678894519806, -0.04289703071117401, -0.15693321824073792, -0.11421196162700653, 0.046813398599624634, 0.1841183453798294, 0.082201287150383, 0.007361748721450567, -0.02820504456758499, 0.036829620599746704, -0.12013575434684753, 0.10982444137334824, 0.028777029365301132, 0.10306892544031143, -0.16643285751342773, 0.1549527794122696, -0.00906008668243885, 0.0043395203538239, -0.028265226632356644, 0.052353017032146454, -0.11301163583993912, -0.008731534704566002, -0.11477985233068466, -0.02545410767197609, -0.027561092749238014, -0.009619781747460365, -0.0029954195488244295, -0.0629439428448677, -0.07686851918697357, 0.007254456169903278, -0.09860145300626755, -0.01874585449695587, 0.043538179248571396, 0.03837084397673607, -0.11994215101003647, -0.0306144617497921, 0.0205730888992548, -0.058554138988256454, 0.06579739600419998, 0.013868069276213646, 0.03933541476726532, 0.06185401603579521, -0.15681473910808563, 0.04533541947603226, 0.06000258028507233, 0.006733884569257498, 0.04607157036662102, -0.05946559086441994, -0.017362922430038452, -0.012071390636265278, 0.06558287143707275, 0.02330869995057583, 0.0707218274474144, -0.11557678878307343, 0.007298069540411234, -0.03161466121673584, -0.04715980216860771, -0.05752509459853172, 0.03457074239850044, 0.07785018533468246, 0.00799924973398447, 0.18410679697990417, -0.09931176155805588, 0.005068773403763771, -0.20347140729427338, 0.012146473862230778, 0.006014558952301741, -0.126397505402565, -0.08179935067892075, -0.04838176816701889, 0.055297575891017914, -0.05813963711261749, 0.14317983388900757, 0.00497810821980238, 0.02294103614985943, 0.04032149910926819, -0.024688472971320152, 0.053278204053640366, 0.02024233527481556, 0.22505879402160645, 0.03532817214727402, -0.03996822237968445, 0.007914183661341667, 0.045991454273462296, 0.12241290509700775, 0.05316048115491867, 0.18579700589179993, 0.1387254297733307, -0.04172413796186447, 0.11575312912464142, 0.051794230937957764, -0.06068650260567665, -0.16796915233135223, 0.029009757563471794, -0.049279723316431046, 0.084405317902565, -0.020798994228243828, 0.2037607878446579, 0.13845396041870117, -0.1434938609600067, 0.000916650053113699, -0.04460619390010834, -0.07996907830238342, -0.1096401959657669, -0.06307537108659744, -0.10052996873855591, -0.15423540771007538, 0.004570727236568928, -0.11458595842123032, 0.015522954985499382, 0.09481615573167801, 0.010820102877914906, -0.018529172986745834, 0.20141571760177612, 0.012132640928030014, 0.02797817997634411, 0.03360072895884514, -0.0013110163854435086, -0.026949442923069, -0.07010170072317123, -0.0924760103225708, 0.0008309579570777714, -0.018251029774546623, 0.025870561599731445, -0.05353248119354248, -0.038007210940122604, 0.04868901148438454, -0.004736776929348707, -0.10316608846187592, -0.0009608439286239445, 0.031804461032152176, 0.05202686786651611, 0.026270641013979912, 0.001850993256084621, -0.0022691849153488874, -0.0050406730733811855, 0.2196269929409027, -0.0774664580821991, -0.05016356334090233, -0.08559688180685043, 0.1929437518119812, 0.023561011999845505, 0.022242993116378784, -0.0025323175359517336, -0.08952666074037552, 0.02533365599811077, 0.2191399186849594, 0.18828435242176056, -0.08493376523256302, -0.0025263044517487288, -0.003618644317612052, -0.007427150849252939, -0.04494783282279968, 0.09282826632261276, 0.09986437112092972, 0.04060587286949158, -0.0739351287484169, -0.05125167965888977, -0.04396805912256241, 0.006093745119869709, -0.03385583311319351, 0.05531302094459534, 0.03918347880244255, 0.028281649574637413, -0.04228479415178299, 0.054826077073812485, -0.031426724046468735, -0.09001324325799942, 0.01844901219010353, -0.20178304612636566, -0.14339803159236908, -0.006316816434264183, 0.12237155437469482, -0.028107961639761925, 0.06417646259069443, -0.02914784476161003, -0.008420825935900211, 0.03776328265666962, -0.011319831013679504, -0.08166877180337906, -0.07727497816085815, 0.05907245725393295, -0.08372912555932999, 0.2396562546491623, -0.04560381919145584, 0.03857458382844925, 0.1339457631111145, 0.028760787099599838, -0.07757771760225296, 0.09674894064664841, 0.04623831436038017, -0.07357596606016159, 0.039675042033195496, 0.0954752117395401, -0.036856766790151596, 0.12414141744375229, 0.05781061202287674, -0.13224495947360992, 0.019490057602524757, -0.049868665635585785, -0.07577546685934067, -0.04669976979494095, -0.04173165559768677, -0.07404816895723343, 0.13999074697494507, 0.16924120485782623, -0.03157460689544678, 0.006163037847727537, -0.03727591782808304, 0.027786999940872192, 0.07878934592008591, 0.08292633295059204, -0.02406875602900982, -0.2487703412771225, 0.01202327199280262, 0.07483787089586258, -0.011740419082343578, -0.3095781207084656, -0.08348195999860764, -0.016715215519070625, -0.03761017695069313, -0.09398409724235535, 0.08110729604959488, 0.1442752480506897, 0.049353018403053284, -0.06592199951410294, -0.09621013700962067, -0.08011601120233536, 0.15307030081748962, -0.1397988200187683, -0.10149189084768295 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ICU_Returns_ClinicalBERT This model is a fine-tuned version of [medicalai/ClinicalBERT](https://huggingface.co/medicalai/ClinicalBERT) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.3201 - F1:: 0.7134 - Roc Auc: 0.7225 - Precision with 0:: 0.8462 - Precision with 1:: 0.6640 - Recall with 0:: 0.5440 - Recal with 1:: 0.9011 - Accuracy:: 0.7225 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 13 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1: | Roc Auc | Precision with 0: | Precision with 1: | Recall with 0: | Recal with 1: | Accuracy: | |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:-----------------:|:-----------------:|:--------------:|:--------------:|:---------:| | No log | 1.0 | 46 | 0.7057 | 0.3454 | 0.5055 | 1.0 | 0.5028 | 0.0110 | 1.0 | 0.5055 | | No log | 2.0 | 92 | 0.6827 | 0.5715 | 0.5742 | 0.5882 | 0.5640 | 0.4945 | 0.6538 | 0.5742 | | No log | 3.0 | 138 | 0.7221 | 0.4612 | 0.5467 | 0.7297 | 0.5260 | 0.1484 | 0.9451 | 0.5467 | | No log | 4.0 | 184 | 0.6284 | 0.6693 | 0.6841 | 0.6293 | 0.8190 | 0.8956 | 0.4725 | 0.6841 | | No log | 5.0 | 230 | 0.9235 | 0.6283 | 0.6401 | 0.7179 | 0.6032 | 0.4615 | 0.8187 | 0.6401 | | No log | 6.0 | 276 | 0.8772 | 0.6534 | 0.6648 | 0.7586 | 0.6210 | 0.4835 | 0.8462 | 0.6648 | | No log | 7.0 | 322 | 0.7968 | 0.7677 | 0.7692 | 0.8224 | 0.7311 | 0.6868 | 0.8516 | 0.7692 | | No log | 8.0 | 368 | 0.6826 | 0.8132 | 0.8132 | 0.8167 | 0.8098 | 0.8077 | 0.8187 | 0.8132 | | No log | 9.0 | 414 | 1.2195 | 0.6950 | 0.7033 | 0.8033 | 0.6529 | 0.5385 | 0.8681 | 0.7033 | | No log | 10.0 | 460 | 0.9542 | 0.7617 | 0.7637 | 0.8243 | 0.7222 | 0.6703 | 0.8571 | 0.7637 | | 0.3635 | 11.0 | 506 | 1.3032 | 0.7079 | 0.7143 | 0.8047 | 0.6653 | 0.5659 | 0.8626 | 0.7143 | | 0.3635 | 12.0 | 552 | 1.4170 | 0.7063 | 0.7143 | 0.8197 | 0.6612 | 0.5495 | 0.8791 | 0.7143 | | 0.3635 | 13.0 | 598 | 1.3201 | 0.7134 | 0.7225 | 0.8462 | 0.6640 | 0.5440 | 0.9011 | 0.7225 | ### Framework versions - Transformers 4.34.0 - Pytorch 2.1.0+cu121 - Datasets 2.14.5 - Tokenizers 0.14.1
{"tags": ["generated_from_trainer"], "base_model": "medicalai/ClinicalBERT", "model-index": [{"name": "ICU_Returns_ClinicalBERT", "results": []}]}
text-classification
moro01525/ICU_Returns_ClinicalBERT
[ "transformers", "pytorch", "distilbert", "text-classification", "generated_from_trainer", "base_model:medicalai/ClinicalBERT", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:43:41+00:00
[]
[]
TAGS #transformers #pytorch #distilbert #text-classification #generated_from_trainer #base_model-medicalai/ClinicalBERT #autotrain_compatible #endpoints_compatible #region-us
ICU\_Returns\_ClinicalBERT ========================== This model is a fine-tuned version of medicalai/ClinicalBERT on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 1.3201 * F1:: 0.7134 * Roc Auc: 0.7225 * Precision with 0:: 0.8462 * Precision with 1:: 0.6640 * Recall with 0:: 0.5440 * Recal with 1:: 0.9011 * Accuracy:: 0.7225 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 0.0001 * train\_batch\_size: 32 * eval\_batch\_size: 16 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 13 ### Training results ### Framework versions * Transformers 4.34.0 * Pytorch 2.1.0+cu121 * Datasets 2.14.5 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #pytorch #distilbert #text-classification #generated_from_trainer #base_model-medicalai/ClinicalBERT #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ 58, 97, 4, 33 ]
[ "passage: TAGS\n#transformers #pytorch #distilbert #text-classification #generated_from_trainer #base_model-medicalai/ClinicalBERT #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13### Training results### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ -0.1032809242606163, 0.05841078236699104, -0.0018423302099108696, 0.1014985516667366, 0.18858270347118378, 0.034419309347867966, 0.11507382243871689, 0.12837035953998566, -0.10963667929172516, 0.031722672283649445, 0.13512365520000458, 0.15409676730632782, -0.004192421678453684, 0.11714092642068863, -0.05456770956516266, -0.2817639410495758, -0.020948972553014755, 0.07106881588697433, -0.06207147613167763, 0.1352255940437317, 0.096841961145401, -0.14035093784332275, 0.08088080585002899, 0.005514374002814293, -0.21781833469867706, 0.02094617299735546, 0.04031636193394661, -0.05369657650589943, 0.15229222178459167, 0.006686375476419926, 0.13457810878753662, 0.009945755824446678, 0.09728844463825226, -0.16650161147117615, 0.008752803318202496, 0.03550272434949875, 0.016820017248392105, 0.09293657541275024, 0.036311421543359756, -0.044094834476709366, 0.12354619055986404, -0.10697522759437561, 0.07133462280035019, 0.018132749944925308, -0.1397426277399063, -0.22138141095638275, -0.07023436576128006, 0.03431855887174606, 0.07054496556520462, 0.10415253043174744, -0.006872047670185566, 0.14378464221954346, -0.09943278133869171, 0.09303237497806549, 0.219284325838089, -0.26084816455841064, -0.07971706241369247, 0.03286369517445564, 0.02651626616716385, 0.07334581017494202, -0.1091691181063652, -0.007797046564519405, 0.06147969514131546, 0.05219715088605881, 0.12127823382616043, -0.02653307281434536, -0.07775720953941345, 0.020420564338564873, -0.13290923833847046, -0.007892544381320477, 0.14274758100509644, 0.018990743905305862, -0.02444002777338028, -0.021728290244936943, -0.04008468985557556, -0.157234326004982, -0.03742307424545288, -0.036715004593133926, 0.03263837844133377, -0.060093820095062256, -0.06482218205928802, 0.01148376427590847, -0.09256337583065033, -0.08167712390422821, -0.0633295476436615, 0.15618154406547546, 0.03261171653866768, 0.002187918871641159, -0.009056413546204567, 0.11564378440380096, -0.009445364587008953, -0.12769199907779694, 0.03665345907211304, 0.029228685423731804, -0.010851438157260418, -0.0794486552476883, -0.08547870069742203, -0.03102482482790947, 0.0029675026889890432, 0.11101391166448593, -0.05290423333644867, 0.03535278141498566, 0.04773585870862007, 0.022493138909339905, -0.0974297747015953, 0.1768604815006256, -0.05572042241692543, -0.015388473868370056, -0.015148136764764786, 0.06932790577411652, -0.01986633986234665, 0.008174366317689419, -0.10652093589305878, -0.01554425060749054, 0.10907870531082153, 0.005038317758589983, -0.10992968082427979, 0.07106251269578934, -0.04939853027462959, -0.03789755702018738, 0.008320016786456108, -0.08755948394536972, 0.02852192148566246, -0.006130459252744913, -0.0834236666560173, 0.0012718020007014275, 0.03354348987340927, 0.023168841376900673, -0.020439861342310905, 0.1236492395401001, -0.09116009622812271, 0.045897115021944046, -0.09170983731746674, -0.10626491904258728, -0.017207162454724312, -0.08283846080303192, 0.029898349195718765, -0.0892595425248146, -0.1351402848958969, -0.03232995420694351, 0.03953487426042557, -0.03713485226035118, -0.042754724621772766, -0.06765305995941162, -0.048722315579652786, 0.015810582786798477, 0.005295832175761461, 0.10811541974544525, -0.05846076086163521, 0.10717068612575531, 0.04417022317647934, 0.08855641633272171, -0.06457973271608353, 0.05767320469021797, -0.09312613308429718, 0.002223522635176778, -0.21545521914958954, 0.0524924173951149, -0.0358283631503582, 0.061187900602817535, -0.09019801765680313, -0.11958958953619003, 0.014092975296080112, -0.01418442465364933, 0.08367277681827545, 0.10944539308547974, -0.15202614665031433, -0.09394866973161697, 0.15604658424854279, -0.05270406976342201, -0.11768372356891632, 0.1066119596362114, -0.07513139396905899, 0.0731021910905838, 0.08388820290565491, 0.16592401266098022, 0.08757343143224716, -0.06994256377220154, 0.03844703733921051, -0.039898715913295746, 0.05501234531402588, -0.041032493114471436, 0.059500694274902344, 0.0018377562519162893, -0.026128100231289864, 0.030612820759415627, -0.04623403400182724, 0.046486590057611465, -0.12833446264266968, -0.08400598168373108, -0.02681121602654457, -0.11890925467014313, 0.08224040269851685, 0.06872858107089996, 0.10248593986034393, -0.10779270529747009, -0.046703603118658066, 0.09887035936117172, 0.08172816038131714, -0.05002282187342644, 0.013909506611526012, -0.05437545105814934, 0.03102993778884411, -0.02971743233501911, -0.031313370913267136, -0.18849004805088043, -0.020558781921863556, 0.0030016868840903044, 0.05448422580957413, 0.015803681686520576, 0.00310977129265666, 0.07559644430875778, 0.07838889211416245, -0.07087031751871109, -0.01726255565881729, -0.02810901217162609, 0.0029510948807001114, -0.1434188336133957, -0.20149509608745575, -0.01112182717770338, -0.020231440663337708, 0.14613358676433563, -0.2147139310836792, 0.03684896603226662, -0.04761205241084099, 0.08566561341285706, 0.003635224187746644, -0.016401853412389755, -0.04158535227179527, 0.08830021321773529, -0.019007159397006035, -0.0330420657992363, 0.07612856477499008, -0.025064243003726006, -0.08991780877113342, -0.05004395171999931, -0.08514919131994247, 0.17704759538173676, 0.11649126559495926, -0.1458543837070465, -0.1077437475323677, -0.03704603761434555, -0.05458298325538635, -0.029502445831894875, -0.05843956768512726, 0.018037060275673866, 0.1911066174507141, -0.017068080604076385, 0.1579982489347458, -0.049871865659952164, -0.03771059215068817, 0.013369236141443253, -0.04585395008325577, 0.04629866033792496, 0.11615095287561417, 0.10739833116531372, -0.0844951868057251, 0.11754236370325089, 0.13734978437423706, -0.08871456235647202, 0.1531725972890854, -0.022091958671808243, -0.06946130096912384, -0.018818913027644157, -0.04334786906838417, 0.0008765985840000212, 0.07330126315355301, -0.16430287063121796, -0.018481982871890068, 0.010339202359318733, 0.020533086732029915, 0.01744813844561577, -0.20399364829063416, -0.05168488249182701, 0.04315465688705444, -0.021985914558172226, -0.023203562945127487, -0.010264717042446136, 0.014531123451888561, 0.13069631159305573, 0.015488415956497192, -0.06799617409706116, 0.02569744735956192, 0.010163434781134129, -0.09338691830635071, 0.21540504693984985, -0.08482179045677185, -0.12307041138410568, -0.11935018748044968, -0.08038418740034103, -0.03675960749387741, 0.026249833405017853, 0.03889884054660797, -0.12201879173517227, -0.005101237446069717, -0.035506460815668106, 0.029381753876805305, 0.017172623425722122, 0.049015600234270096, 0.005749993026256561, 0.01039680652320385, 0.07398117333650589, -0.08915689587593079, -0.017064768821001053, -0.0781298354268074, -0.07595479488372803, 0.05873595550656319, -0.0023078974336385727, 0.10622433573007584, 0.1785266101360321, -0.033155154436826706, 0.014238760806620121, -0.02888721600174904, 0.24185410141944885, -0.06890993565320969, -0.031019238755106926, 0.1262485533952713, -0.004103658255189657, 0.033497996628284454, 0.10321100801229477, 0.0819992795586586, -0.10109198093414307, 0.024841908365488052, 0.037640683352947235, -0.023035403341054916, -0.2243444323539734, -0.05513560771942139, -0.049459267407655716, -0.06466268002986908, 0.09331105649471283, 0.0018632138380780816, 0.05192650482058525, 0.07912054657936096, 0.050950393080711365, 0.08851531893014908, -0.05510208383202553, 0.048012636601924896, 0.11534219235181808, 0.036348555237054825, 0.13486677408218384, -0.031146688386797905, -0.08927751332521439, 0.02515970543026924, -0.03919300064444542, 0.2256067842245102, 0.01262298971414566, 0.08026249706745148, 0.06293047964572906, 0.1516181081533432, 0.006458077114075422, 0.08800442516803741, 0.021694665774703026, -0.059604812413454056, 0.0038549017626792192, -0.041733238846063614, -0.056578379124403, 0.01546162273734808, -0.06370279937982559, 0.05933649092912674, -0.1532869189977646, -0.013007978908717632, 0.051795221865177155, 0.2302311360836029, 0.05035899206995964, -0.3296174108982086, -0.0976576954126358, -0.011992366053164005, -0.023289591073989868, -0.028127595782279968, 0.007555113639682531, 0.08994056284427643, -0.1046098917722702, 0.03381730988621712, -0.05515718087553978, 0.09098540991544724, -0.0448499359190464, 0.06296070665121078, 0.055634357035160065, 0.0592949204146862, -0.015060513280332088, 0.07521317899227142, -0.28625455498695374, 0.2892460227012634, 0.0007448018877767026, 0.05711784213781357, -0.06633998453617096, -0.02064323239028454, 0.042421676218509674, 0.12831102311611176, 0.05139149725437164, 0.0028298543766140938, -0.0037446480710059404, -0.23599672317504883, -0.03922917693853378, 0.027534829452633858, 0.12298836559057236, -0.06146848946809769, 0.10174267739057541, -0.017172185704112053, 0.014598789624869823, 0.06597093492746353, -0.05746384710073471, -0.08062677830457687, -0.07476766407489777, -0.0122684882953763, -0.009521187283098698, -0.0037975560408085585, -0.056174322962760925, -0.11996898800134659, -0.10445981472730637, 0.14312800765037537, 0.00640247343108058, -0.029353171586990356, -0.1274438053369522, 0.10941991955041885, 0.0854310616850853, -0.08534443378448486, 0.04615757241845131, 0.017049377784132957, 0.04921148717403412, 0.031239768490195274, -0.05701807886362076, 0.1025991141796112, -0.06204405426979065, -0.16406773030757904, -0.06938426196575165, 0.1293199211359024, 0.05807892978191376, 0.06618636846542358, 0.0006936790305189788, 0.014471658505499363, -0.024770282208919525, -0.08391466736793518, 0.057710420340299606, -0.011187725700438023, 0.0687895119190216, 0.051389891654253006, -0.07708770781755447, 0.012450378388166428, -0.07145708799362183, -0.019003242254257202, 0.18967141211032867, 0.23348237574100494, -0.09163094311952591, 0.004957542754709721, 0.01643277145922184, -0.07141448557376862, -0.1851741522550583, 0.08153213560581207, 0.10063710808753967, 0.0090743163600564, 0.03504785895347595, -0.19312889873981476, 0.1428188979625702, 0.09652014821767807, 0.0027332312893122435, 0.07785570621490479, -0.2806274890899658, -0.12439212948083878, 0.11053930968046188, 0.146082803606987, 0.15484577417373657, -0.13108763098716736, -0.023728661239147186, -0.016153039410710335, -0.1292010098695755, 0.08421996235847473, -0.0613190233707428, 0.12540647387504578, -0.04651753976941109, 0.08487191051244736, 0.01931418664753437, -0.048933979123830795, 0.11839776486158371, 0.030217470601201057, 0.09590508043766022, -0.059371720999479294, -0.05828334763646126, 0.040452297776937485, -0.03176354244351387, 0.005956055596470833, -0.024559099227190018, 0.018533628433942795, -0.11805399507284164, -0.026443537324666977, -0.09448051452636719, 0.030674485489726067, -0.0316019207239151, -0.07019148766994476, -0.024675345048308372, 0.036338865756988525, 0.036112185567617416, -0.013680355623364449, 0.07763179391622543, -0.02423304319381714, 0.17466048896312714, 0.0744510293006897, 0.10057658702135086, -0.058964792639017105, -0.028522029519081116, 0.013841846957802773, -0.019928058609366417, 0.051228027790784836, -0.14013122022151947, 0.030984269455075264, 0.15459896624088287, 0.02317162975668907, 0.13976486027240753, 0.08820680528879166, -0.0066579305566847324, 0.0029393278528004885, 0.07655637711286545, -0.16577313840389252, -0.04845535755157471, -0.03296082466840744, -0.08179854601621628, -0.11827318370342255, 0.06196710094809532, 0.09351413697004318, -0.06887944042682648, -0.019206615164875984, -0.03442394733428955, -0.018324503675103188, -0.08494649827480316, 0.2292172610759735, 0.07389293611049652, 0.05397647246718407, -0.09070860594511032, 0.03195665404200554, 0.056377243250608444, -0.052936214953660965, 0.00099492899607867, 0.07514386624097824, -0.09417961537837982, -0.04080288112163544, 0.08975629508495331, 0.20927490293979645, -0.0736875906586647, -0.027880677953362465, -0.15256629884243011, -0.12786802649497986, 0.08092054724693298, 0.19275344908237457, 0.11541306972503662, -0.0009722763206809759, -0.06588081270456314, 0.005131118465214968, -0.13453905284404755, 0.07926614582538605, 0.04319755733013153, 0.0752047523856163, -0.1067952811717987, 0.2054731398820877, -0.005695309955626726, 0.04778270795941353, -0.03506321460008621, 0.01968134008347988, -0.11187586933374405, 0.019163303077220917, -0.1304655522108078, -0.05433963984251022, 0.004435408860445023, -0.0029760177712887526, -0.007718822453171015, -0.0700078010559082, -0.047922246158123016, 0.002464242745190859, -0.11958937346935272, -0.0189347043633461, 0.037150077521800995, 0.051543835550546646, -0.10607301443815231, -0.04327940568327904, 0.035879384726285934, -0.05967358872294426, 0.07182266563177109, 0.05087926238775253, 0.026414763182401657, 0.054446373134851456, -0.13220210373401642, -0.000254982674960047, 0.06710537523031235, 0.016693048179149628, 0.06289669871330261, -0.09747859835624695, -0.001102522248402238, -0.006991270463913679, 0.07503200322389603, 0.04508864879608154, 0.08541808277368546, -0.12290192395448685, 0.00202788389287889, -0.02116229385137558, -0.08114495873451233, -0.0646083876490593, 0.0270478967577219, 0.07482721656560898, 0.010565072298049927, 0.1691194772720337, -0.08577211201190948, 0.0468287467956543, -0.22049640119075775, -0.01893712766468525, -0.027104467153549194, -0.11361682415008545, -0.12478649616241455, -0.07394586503505707, 0.07966407388448715, -0.030334459617733955, 0.12703096866607666, 0.013440257869660854, 0.058916278183460236, 0.024126986041665077, 0.0045183454640209675, 0.03307787701487541, 0.023847172036767006, 0.2037721872329712, 0.05437876656651497, -0.05341765284538269, 0.05489131063222885, 0.07692552357912064, 0.10207526385784149, 0.12703493237495422, 0.20097319781780243, 0.12873855233192444, -0.03166360780596733, 0.05788232758641243, 0.029522893950343132, -0.029282106086611748, -0.14758971333503723, 0.004900616593658924, -0.025064747780561447, 0.06006663292646408, -0.021586304530501366, 0.1643555760383606, 0.06679926067590714, -0.17878472805023193, 0.04781767725944519, -0.05993204563856125, -0.1061963438987732, -0.10268021374940872, -0.0021414936054497957, -0.07666900753974915, -0.15635696053504944, -0.004555907100439072, -0.12005317956209183, 0.013524743728339672, 0.09818646311759949, 0.008685034699738026, -0.017671754583716393, 0.19169247150421143, 0.010251657105982304, 0.04188954085111618, 0.07171092182397842, -0.00466133002191782, -0.01198003813624382, -0.11201281100511551, -0.05714656040072441, -0.021009735763072968, -0.01834273152053356, 0.025873947888612747, -0.06744389981031418, -0.08527912199497223, 0.014778207056224346, -0.015297897160053253, -0.10915467888116837, 0.021322645246982574, 0.005691906437277794, 0.0538308285176754, 0.037967633455991745, 0.016433533281087875, 0.010561278089880943, -0.016574768349528313, 0.2317621260881424, -0.0744965672492981, -0.08660013973712921, -0.11352042853832245, 0.2836923599243164, 0.06764232367277145, 0.007995778694748878, 0.01248467992991209, -0.060386162251234055, 0.0210364181548357, 0.26131731271743774, 0.18590761721134186, -0.09367094188928604, -0.002624866785481572, 0.001171495532616973, -0.005970745347440243, 0.019894225522875786, 0.1221432313323021, 0.12285062670707703, -0.0033918065018951893, -0.10348028689622879, -0.04248030483722687, -0.06751543283462524, -0.017293164506554604, -0.03209442272782326, 0.045604292303323746, 0.08211859315633774, 0.013041462749242783, -0.0405011810362339, 0.06990497559309006, -0.06371009349822998, -0.06077409163117409, 0.06543324142694473, -0.22320456802845, -0.15529541671276093, -0.039166003465652466, 0.06700044125318527, 0.0018690276192501187, 0.06498761475086212, -0.024461371824145317, -0.004557711072266102, 0.0860026478767395, -0.016492286697030067, -0.08240088075399399, -0.10365442931652069, 0.10625125467777252, -0.10722576081752777, 0.15971918404102325, -0.04005054756999016, 0.06793256849050522, 0.12185539305210114, 0.0628676787018776, -0.04247330129146576, 0.07691485434770584, 0.020754819735884666, -0.05421965569257736, 0.034834008663892746, 0.10847882926464081, -0.02547713741660118, 0.046646106988191605, 0.029043979942798615, -0.1705590933561325, 0.02689916267991066, -0.08193482458591461, -0.05956943333148956, -0.04409099370241165, -0.018864866346120834, -0.04518434777855873, 0.1258683204650879, 0.24451151490211487, -0.030873114243149757, 0.02753363363444805, -0.07479268312454224, 0.012634276412427425, 0.06515337526798248, 0.03583269193768501, -0.09073564410209656, -0.25210946798324585, 0.009699879214167595, 0.06585516035556793, -0.028560152277350426, -0.24756795167922974, -0.09831017255783081, -0.010464249178767204, -0.06812532991170883, -0.07445608824491501, 0.09401969611644745, 0.06674853712320328, 0.06096377968788147, -0.05359514057636261, -0.09433303028345108, -0.06641639769077301, 0.1708596795797348, -0.1501094400882721, -0.09334613382816315 ]
null
null
stable-baselines3
# **PPO** Agent playing **LunarLander-v2** This is a trained model of a **PPO** agent playing **LunarLander-v2** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3). ## Usage (with Stable-baselines3) TODO: Add your code ```python from stable_baselines3 import ... from huggingface_sb3 import load_from_hub ... ```
{"library_name": "stable-baselines3", "tags": ["LunarLander-v2", "deep-reinforcement-learning", "reinforcement-learning", "stable-baselines3"], "model-index": [{"name": "PPO", "results": [{"task": {"type": "reinforcement-learning", "name": "reinforcement-learning"}, "dataset": {"name": "LunarLander-v2", "type": "LunarLander-v2"}, "metrics": [{"type": "mean_reward", "value": "250.61 +/- 17.77", "name": "mean_reward", "verified": false}]}]}]}
reinforcement-learning
vones/ppo-LunarLander-v2
[ "stable-baselines3", "LunarLander-v2", "deep-reinforcement-learning", "reinforcement-learning", "model-index", "region:us" ]
2023-11-12T17:44:15+00:00
[]
[]
TAGS #stable-baselines3 #LunarLander-v2 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us
# PPO Agent playing LunarLander-v2 This is a trained model of a PPO agent playing LunarLander-v2 using the stable-baselines3 library. ## Usage (with Stable-baselines3) TODO: Add your code
[ "# PPO Agent playing LunarLander-v2\nThis is a trained model of a PPO agent playing LunarLander-v2\nusing the stable-baselines3 library.", "## Usage (with Stable-baselines3)\nTODO: Add your code" ]
[ "TAGS\n#stable-baselines3 #LunarLander-v2 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us \n", "# PPO Agent playing LunarLander-v2\nThis is a trained model of a PPO agent playing LunarLander-v2\nusing the stable-baselines3 library.", "## Usage (with Stable-baselines3)\nTODO: Add your code" ]
[ 39, 41, 17 ]
[ "passage: TAGS\n#stable-baselines3 #LunarLander-v2 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us \n# PPO Agent playing LunarLander-v2\nThis is a trained model of a PPO agent playing LunarLander-v2\nusing the stable-baselines3 library.## Usage (with Stable-baselines3)\nTODO: Add your code" ]
[ 0.03942384943366051, 0.04900386184453964, -0.005304091144353151, 0.026427261531352997, 0.107408307492733, -0.026511888951063156, 0.11188238859176636, 0.0814051404595375, 0.10722193866968155, 0.04762078449130058, 0.08338645845651627, 0.06030960753560066, 0.05080918222665787, 0.2571701407432556, 0.04754156619310379, -0.22987541556358337, 0.036159250885248184, -0.04869936779141426, 0.12395193427801132, 0.07178173214197159, -0.0038484656251966953, -0.06485428661108017, 0.020415637642145157, -0.013290755450725555, 0.05367108806967735, 0.04282612353563309, -0.01716216839849949, -0.08207534998655319, 0.07169748842716217, -0.06345846503973007, 0.06986866891384125, 0.07677983492612839, 0.13218913972377777, -0.17832116782665253, 0.029566360637545586, 0.02571309357881546, -0.07189024239778519, 0.01342033501714468, 0.008019951172173023, 0.05120139941573143, 0.17303818464279175, 0.019879888743162155, 0.07844575494527817, -0.0025605305563658476, -0.15412317216396332, -0.018950799480080605, 0.0436202734708786, 0.12546207010746002, 0.08808347582817078, 0.04605821147561073, 0.01970590092241764, 0.17503218352794647, -0.054352790117263794, -0.028833400458097458, 0.21759237349033356, -0.2881564497947693, -0.031460098922252655, 0.321048766374588, 0.06997483223676682, 0.09725230932235718, -0.07540661096572876, -0.03619609400629997, 0.007783263456076384, -0.013137873262166977, -0.028666524216532707, -0.07447073608636856, 0.17313385009765625, 0.05152064561843872, -0.05057951435446739, -0.09541505575180054, 0.16948209702968597, 0.006921638268977404, 0.0018855923553928733, -0.019282981753349304, 0.009060598909854889, 0.07402525842189789, -0.016097044572234154, -0.07255112379789352, 0.057438433170318604, 0.05330665782094002, 0.019649166613817215, -0.1435653269290924, -0.10762494057416916, -0.022740179672837257, -0.008012006990611553, 0.17786912620067596, -0.009255532175302505, 0.042902372777462006, 0.003065188182517886, 0.10384012013673782, -0.12480384111404419, -0.03354184702038765, -0.0454259067773819, -0.07565800100564957, -0.0223417766392231, -0.02058211714029312, -0.03580251708626747, 0.07184842973947525, 0.11971849203109741, 0.027368178591132164, 0.09350208193063736, 0.047715865075588226, -0.03206788748502731, 0.06343851238489151, 0.05555703118443489, 0.14222665131092072, 0.05807621404528618, 0.012854371219873428, 0.13179877400398254, 0.055213116109371185, 0.033023182302713394, -0.0613492950797081, -0.18252409994602203, 0.07489913702011108, -0.07031869143247604, 0.007941240444779396, 0.12051256000995636, -0.04480670019984245, -0.1183447614312172, -0.037500523030757904, -0.017392054200172424, -0.06224250793457031, -0.025395862758159637, 0.0547584593296051, -0.02883218228816986, -0.03973718360066414, 0.0011496668448671699, 0.09384800493717194, 0.00953749567270279, -0.1752052903175354, 0.03303423151373863, -0.025042934343218803, -0.10782608389854431, 0.009975161403417587, 0.0022444494534283876, 0.03394931182265282, 0.04408763721585274, -0.11822668462991714, -0.30899152159690857, -0.07652641832828522, 0.05490870401263237, -0.06516939401626587, -0.18425025045871735, -0.13193942606449127, 0.02454492449760437, -0.09037084132432938, -0.044885024428367615, -0.12759265303611755, -0.028549788519740105, 0.01743689924478531, 0.011519349180161953, 0.10758619755506516, -0.0106219332665205, -0.012188062071800232, -0.1571401208639145, 0.008273907005786896, -0.20951123535633087, 0.0890483483672142, -0.019150104373693466, 0.037884220480918884, -0.032381169497966766, -0.07404014468193054, 0.030707746744155884, 0.052499737590551376, -0.01474119070917368, 0.13510210812091827, -0.15592676401138306, -0.03691192343831062, -0.007996266707777977, -0.13611900806427002, -0.04786273464560509, -0.10358831286430359, -0.04357128217816353, 0.13354332745075226, 0.018664736300706863, 0.15356586873531342, -0.08709818124771118, -0.0722038671374321, 0.20489206910133362, -0.010411538183689117, -0.12820468842983246, -0.076752208173275, 0.10165707021951675, 0.021510310471057892, -0.056606587022542953, -0.02523270808160305, -0.1839766949415207, -0.0152357779443264, -0.04550420492887497, -0.047039128839969635, 0.01796751655638218, -0.010888241231441498, 0.13837894797325134, 0.08494598418474197, 0.05018039792776108, -0.06086122244596481, -0.006730288732796907, 0.10779471695423126, 0.08823856711387634, 0.008680110797286034, 0.023406028747558594, -0.05774238705635071, 0.09552932530641556, -0.04003755748271942, -0.0142367510125041, -0.08283266425132751, -0.036246106028556824, -0.026256313547492027, 0.17507147789001465, 0.09440762549638748, 0.2257927656173706, 0.09567736834287643, 0.039160262793302536, 0.031270865350961685, -0.13181598484516144, -0.1425403207540512, -0.0017254541162401438, 0.09020978957414627, -0.14270411431789398, -0.04119925573468208, -0.08974775671958923, -0.17768175899982452, -0.12202505767345428, 0.0006432619411498308, -0.17960017919540405, 0.06390921026468277, 0.05408334732055664, -0.035177867859601974, 0.03272094577550888, 0.13032332062721252, -0.011533179320394993, -0.03967514634132385, 0.0831870287656784, 0.0379033200442791, -0.041234664618968964, -0.021742934361100197, 0.11885567009449005, 0.15673065185546875, 0.13124459981918335, -0.03511447086930275, 0.004914294462651014, 0.07076404243707657, -0.02309088408946991, 0.06539414077997208, 0.0558244064450264, 0.20973342657089233, 0.188301220536232, 0.038996949791908264, 0.008822928182780743, -0.07048165798187256, 0.0855446457862854, -0.0742373839020729, -0.14302679896354675, -0.05579735338687897, 0.08729292452335358, 0.016605578362941742, 0.023469142615795135, 0.08711627870798111, 0.024545932188630104, 0.09132762253284454, 0.15968108177185059, 0.01990218088030815, -0.09659269452095032, -0.050218869000673294, 0.01175848301500082, 0.027713103219866753, 0.04794301092624664, -0.04514073207974434, -0.00937939714640379, 0.017020760104060173, -0.10303554683923721, 0.031789086759090424, -0.1413339376449585, -0.1358717679977417, 0.044326696544885635, 0.003906996920704842, 0.010907664895057678, 0.02786896750330925, -0.0038291432429105043, 0.019039705395698547, 0.04351753741502762, -0.06975466758012772, 0.047416772693395615, -0.024745507165789604, -0.020031947642564774, 0.03340689837932587, -0.057257164269685745, -0.205775648355484, -0.17696654796600342, 0.00013708483311347663, -0.09910997003316879, 0.10194740444421768, 0.018308809027075768, -0.12373185902833939, 0.047737859189510345, -0.05822649225592613, 0.027574289590120316, -0.01875593699514866, -0.049130141735076904, 0.10507171601057053, 0.1525275856256485, -0.016146350651979446, 0.018018173053860664, -0.04865182936191559, -0.10157987475395203, -0.19632206857204437, 0.0691583976149559, 0.04680244252085686, 0.014610917307436466, 0.10669491440057755, 0.018072687089443207, 0.02367905154824257, -0.007674071006476879, -0.016521066427230835, -0.011659215204417706, -0.08781040459871292, 0.31909599900245667, 0.04510033503174782, -0.025173069909214973, 0.02041010931134224, -0.0043001663871109486, -0.028083480894565582, 0.03263787180185318, -0.0985708013176918, -0.07548979669809341, -0.08774089068174362, -0.04367410019040108, -0.09784720093011856, 0.053299110382795334, 0.05916472524404526, 0.003188040340319276, -0.07727594673633575, 0.04221395403146744, 0.11369874328374863, -0.0923808291554451, -0.07137343287467957, 0.07477962225675583, 0.0972946360707283, -0.07331304252147675, 0.00012658814375754446, 0.00874367356300354, 0.023951783776283264, 0.037102166563272476, 0.06778035312891006, -0.03966575115919113, 0.08589404821395874, -0.19917890429496765, 0.0372927263379097, 0.106058269739151, 0.023754918947815895, 0.0638108178973198, 0.07643651217222214, -0.1058402881026268, -0.008500572293996811, -0.032518330961465836, -0.21341575682163239, 0.1668180525302887, 0.1355515867471695, 0.06788124144077301, -0.025637222453951836, -0.00461410591378808, -0.0649740919470787, 0.05773647129535675, 0.02723747305572033, -0.14758841693401337, 0.004883295856416225, 0.06064270809292793, 0.026899009943008423, 0.01614922471344471, 0.07971042394638062, 0.014697225764393806, -0.1801026314496994, -0.014406266622245312, 0.10730406641960144, 0.002390873385593295, 0.0053148469887673855, -0.03175045922398567, -0.1755964607000351, 0.0751047357916832, 0.004285442177206278, 0.07233936339616776, -0.1676585078239441, 0.14297930896282196, -0.10089799761772156, 0.07726949453353882, -0.004285062663257122, -0.021311495453119278, 0.02507244050502777, -0.0541163794696331, 0.15163759887218475, 0.01058570109307766, -0.021810131147503853, -0.1200498715043068, -0.1717042326927185, -0.019227758049964905, -0.11788936704397202, -0.11679866164922714, 0.050424277782440186, 0.062185097485780716, 0.04923136904835701, -0.061147067695856094, 0.1518532931804657, -0.047422297298908234, 0.060713399201631546, -0.06893875449895859, -0.06755045056343079, 0.03764858841896057, -0.12588608264923096, -0.08176055550575256, 0.05573027580976486, 0.19166934490203857, 0.15833087265491486, -0.02816431224346161, -0.03472423925995827, -0.047419581562280655, -0.006212298292666674, -0.007802055217325687, 0.0275666993111372, 0.023223137483000755, 0.07315318286418915, -0.07681374251842499, -0.11649256944656372, 0.033787861466407776, -0.06713802367448807, -0.055589709430933, -0.015439179725944996, 0.1513158082962036, 0.04671623185276985, 0.07720734924077988, -0.018946662545204163, 0.03887668624520302, -0.001724981120787561, -0.056474871933460236, 0.16197094321250916, 0.03885216265916824, -0.05193585529923439, 0.06837689876556396, 0.053174007683992386, 0.043745119124650955, 0.03011113777756691, -0.026783017441630363, 0.206032395362854, 0.1980147808790207, 0.014206883497536182, 0.2175983190536499, 0.03177616000175476, -0.03772832080721855, -0.1300560086965561, -0.065880686044693, -0.006372632458806038, 0.03559038043022156, 0.08070417493581772, -0.18207235634326935, -0.015011128038167953, -0.05689644813537598, -0.034518610686063766, -0.15059494972229004, -0.28553900122642517, -0.05957856774330139, 0.20075850188732147, 0.14706264436244965, 0.27519428730010986, -0.10432573407888412, 0.035197313874959946, 0.02663275972008705, -0.04912831634283066, -0.006501141935586929, 0.00018665487004909664, 0.10268618166446686, -0.15421873331069946, 0.1176437959074974, 0.08486983180046082, -0.019002694636583328, 0.01058861706405878, -0.1619086116552353, 0.00936629343777895, -0.12191236019134521, 0.05354422330856323, 0.1400289237499237, -0.048128653317689896, -0.054873593151569366, 0.14033560454845428, -0.024562934413552284, -0.22685599327087402, -0.04648222774267197, -0.043600670993328094, -0.010640020482242107, 0.026607351377606392, -0.1013401448726654, 0.04101909324526787, 0.1330099105834961, 0.009380043484270573, 0.1147187277674675, 0.11749245226383209, -0.052566803991794586, 0.10792597383260727, 0.2257719188928604, -0.018785694614052773, 0.04689010605216026, -0.12743118405342102, -0.0012336712097749114, -0.028270328417420387, 0.013657891191542149, -0.09504974633455276, -0.09938385337591171, 0.02366873063147068, 0.02872389927506447, 0.009118586778640747, 0.0921793207526207, -0.029922157526016235, 0.0759170651435852, 0.06817561388015747, -0.13014446198940277, -0.16288450360298157, 0.015828335657715797, -0.007344507612287998, 0.08354310691356659, 0.00027861111448146403, 0.08878035843372345, -0.11932205408811569, -0.018093237653374672, -0.03153328225016594, -0.03319635987281799, -0.130486860871315, -0.07138993591070175, 0.06156524643301964, 0.028095467016100883, -0.06602972000837326, 0.1398407518863678, 0.026440169662237167, 0.15942534804344177, 0.049197953194379807, 0.012499804608523846, 0.07227300107479095, -0.05345509201288223, 0.1283530443906784, 0.13818155229091644, -0.00868943240493536, -0.05460423603653908, -0.1013643890619278, -0.10236792266368866, 0.08925779908895493, -0.05773641914129257, 0.07476430386304855, -0.14885357022285461, -0.06675903499126434, 0.015772046521306038, 0.016141414642333984, -0.09562095999717712, 0.02571965754032135, -0.01625603251159191, -0.18119946122169495, 0.056570518761873245, -0.048285093158483505, 0.0440407395362854, -0.06347788125276566, -0.1110161691904068, -0.17226378619670868, 0.06091433763504028, 0.08593481779098511, -0.053876690566539764, -0.12229149043560028, 0.011023230850696564, -0.00012518465518951416, -0.06341652572154999, -0.05023367330431938, 0.09722746908664703, -0.11020902544260025, 0.031452205032110214, -0.012567701749503613, 0.08853451162576675, -0.03510405123233795, -0.011538895778357983, 0.044220831245183945, -0.08039166033267975, -0.009481523185968399, 0.03534642979502678, -0.026372017338871956, -0.04127239063382149, -0.2689029574394226, 0.0036654395516961813, 0.0341104120016098, 0.02497158572077751, 0.07856601476669312, 0.011906822212040424, 0.021174922585487366, 0.03993808850646019, -0.15396519005298615, -0.013395369984209538, 0.14574195444583893, -0.07689505815505981, -0.022186370566487312, 0.05703273415565491, -0.09054436534643173, 0.013882770203053951, -0.030287226662039757, 0.1345842480659485, 0.023923413828015327, 0.06404478847980499, -0.0851147472858429, 0.10106813907623291, -0.1451139897108078, -0.04998219385743141, -0.01244612317532301, 0.09761348366737366, 0.07019034773111343, -0.10272270441055298, 0.014697125181555748, 0.04210108891129494, 0.19416837394237518, 0.016384804621338844, -0.0356343574821949, -0.03396720811724663, 0.004015897400677204, 0.22076453268527985, 0.03044266067445278, 0.10457023978233337, 0.07281364500522614, -0.026583973318338394, 0.12624378502368927, 0.09929762035608292, 0.11280370503664017, -0.055645186454057693, 0.13904185593128204, 0.04667386785149574, 0.038641396909952164, 0.0614289753139019, 0.06836545467376709, 0.09098632633686066, -0.0008288522367365658, 0.1138714924454689, 0.013811973854899406, -0.02422109805047512, -0.021335409954190254, 0.17759373784065247, 0.10501719266176224, -0.14769648015499115, 0.029047364369034767, -0.01258957851678133, 0.039933037012815475, -0.014194529503583908, -0.15634691715240479, -0.07240267097949982, -0.3315149247646332, 0.1226184144616127, -0.07119352370500565, 0.019930170848965645, 0.007913772016763687, -0.037425633519887924, -0.03296699747443199, -0.04477746784687042, 0.13151589035987854, -0.013641550205647945, -0.006079165264964104, -0.04815853759646416, -0.015360191464424133, -0.11607866734266281, -0.11200575530529022, -0.013207737356424332, -0.13671602308750153, -0.010119039565324783, 0.05595948174595833, 0.003977729007601738, 0.01821410097181797, -0.03142618387937546, 0.0024383175186812878, 0.06541839241981506, -0.05751744285225868, 0.056182678788900375, 0.12097269296646118, 0.08766137808561325, -0.1058853268623352, 0.031048951670527458, 0.2011747509241104, 0.04359564557671547, -0.12483977526426315, 0.01449228823184967, 0.1819491684436798, 0.004885740112513304, 0.017068125307559967, -0.006097703706473112, -0.0540788508951664, -0.07554277032613754, 0.1251034289598465, 0.08296554535627365, -0.09985227137804031, 0.015833314508199692, -0.0726347416639328, -0.01594804972410202, -0.06374675035476685, 0.10130585730075836, 0.09538925439119339, 0.04440245032310486, -0.10621760785579681, -0.08487539738416672, -0.10891728103160858, 0.040588874369859695, -0.08629853278398514, -0.07311757653951645, 0.09629398584365845, -0.07057105004787445, -0.07029950618743896, 0.025521177798509598, -0.17978744208812714, -0.009467960335314274, 0.1711762249469757, -0.24654000997543335, -0.0916430801153183, -0.10857923328876495, 0.14477859437465668, 0.016497576609253883, 0.1013975441455841, -0.006207061931490898, -0.007889035157859325, -0.20577777922153473, 0.024890204891562462, -0.05293011665344238, -0.02073732763528824, 0.07814782857894897, -0.09476397186517715, 0.22629831731319427, -0.08276885002851486, 0.020940175279974937, 0.012659613974392414, 0.0870661810040474, -0.030675338581204414, 0.09283176809549332, -0.03660329803824425, -0.12576518952846527, -0.03620953485369682, 0.03001813031733036, 0.013904244638979435, 0.10071761906147003, 0.09772487729787827, -0.03414725139737129, 0.03389119729399681, 0.09747414290904999, 0.04172342270612717, -0.023843804374337196, 0.0360250361263752, -0.17077107727527618, 0.02182629331946373, -0.018498148769140244, -0.06935930997133255, 0.03687669709324837, -0.06603235751390457, 0.1639697551727295, 0.04022442549467087, 0.0670473501086235, -0.036152735352516174, 0.0073931049555540085, -0.014454689808189869, -0.013775371946394444, -0.026180334389209747, -0.17259705066680908, -0.10422050207853317, -0.1347656100988388, -0.012701659463346004, -0.034971047192811966, 0.04591470584273338, 0.023234914988279343, -0.0003200018545612693, -0.014577031135559082, -0.12090865522623062, 0.04360328987240791, 0.11146783083677292, -0.04631396010518074, -0.026193076744675636 ]
null
null
peft
## Training procedure The following `bitsandbytes` quantization config was used during training: - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: bfloat16 ### Framework versions - PEFT 0.4.0
{"library_name": "peft"}
null
runse/OPS-koalpaca-polyglot-12.8b
[ "peft", "tensorboard", "region:us" ]
2023-11-12T17:49:35+00:00
[]
[]
TAGS #peft #tensorboard #region-us
## Training procedure The following 'bitsandbytes' quantization config was used during training: - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: bfloat16 ### Framework versions - PEFT 0.4.0
[ "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: bfloat16", "### Framework versions\n\n\n- PEFT 0.4.0" ]
[ "TAGS\n#peft #tensorboard #region-us \n", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: bfloat16", "### Framework versions\n\n\n- PEFT 0.4.0" ]
[ 13, 154, 11 ]
[ "passage: TAGS\n#peft #tensorboard #region-us \n## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: bfloat16### Framework versions\n\n\n- PEFT 0.4.0" ]
[ -0.08857559412717819, 0.05881764739751816, -0.0026079299859702587, 0.13755552470684052, 0.08889607340097427, 0.059008270502090454, 0.13547134399414062, 0.1311831772327423, 0.044567741453647614, 0.0979868620634079, 0.08869192004203796, 0.04804614931344986, 0.07064280658960342, 0.11643702536821365, -0.027466533705592155, -0.007760725449770689, 0.022867213934659958, 0.03728068992495537, -0.018920758739113808, 0.07775410264730453, 0.04168020561337471, -0.051059573888778687, 0.02436150051653385, -0.0731697753071785, -0.2033328413963318, 0.016233695670962334, 0.02250838652253151, 0.015323947183787823, 0.047530729323625565, 0.05831850320100784, 0.08502998948097229, -0.0021510100923478603, -0.03142643719911575, -0.17826899886131287, -0.005617802496999502, 0.12006574869155884, -0.020953098312020302, 0.08850351721048355, -0.058290597051382065, 0.12280753999948502, -0.10240237414836884, -0.03421471640467644, 0.000020409363060025498, 0.015055901370942593, -0.06344237923622131, -0.12384055554866791, -0.06603390723466873, 0.08165279030799866, 0.020725825801491737, 0.04981930926442146, -0.011181176640093327, 0.17272906005382538, -0.1010303720831871, 0.08554153889417648, 0.06364434957504272, -0.23867321014404297, -0.0431022010743618, 0.1273168921470642, -0.02726932056248188, 0.15285944938659668, -0.06857684999704361, -0.0758073702454567, 0.08651497960090637, 0.041329432278871536, -0.027111109346151352, -0.0078104715794324875, -0.09380494058132172, 0.01772884838283062, -0.1297898292541504, -0.02162010408937931, 0.13263367116451263, 0.02260524593293667, -0.025584060698747635, -0.04241345822811127, -0.09581507742404938, -0.3568532466888428, 0.027106616646051407, -0.036586351692676544, -0.06398586928844452, 0.03467245399951935, -0.014996837824583054, -0.042818181216716766, -0.029559709131717682, -0.08063934743404388, -0.0472135916352272, 0.10051370412111282, 0.041169650852680206, 0.038005929440259933, -0.020512312650680542, 0.1116136685013771, -0.0943245142698288, -0.04985314980149269, -0.013400684110820293, -0.023822229355573654, -0.053918275982141495, -0.009122515097260475, -0.06154436618089676, 0.11400038003921509, 0.07255126535892487, 0.07183817774057388, -0.16573062539100647, 0.12426125258207321, -0.05143781006336212, 0.07810834050178528, -0.04293813183903694, 0.027318179607391357, -0.12462438642978668, 0.11142608523368835, 0.005720790009945631, 0.1325158178806305, 0.03492449223995209, -0.03704923391342163, -0.07959315180778503, -0.0166986845433712, 0.13663901388645172, -0.003510440932586789, -0.09162122011184692, 0.015629425644874573, -0.12642492353916168, -0.03609265387058258, 0.04168735072016716, -0.09069520980119705, 0.023807577788829803, 0.04295193776488304, -0.06160855293273926, -0.009916816838085651, 0.09698886424303055, -0.06161128729581833, -0.04498487710952759, -0.03604581207036972, -0.09810836613178253, -0.0009207769762724638, -0.09947430342435837, -0.13412149250507355, 0.07276854664087296, -0.1965678334236145, -0.007912084460258484, -0.048870749771595, -0.050752464681863785, 0.027991795912384987, 0.01644265279173851, -0.0932701900601387, 0.07657476514577866, -0.07616331428289413, -0.12007217109203339, -0.029220636934041977, 0.005535260774195194, 0.016955554485321045, -0.017468711361289024, 0.08419115841388702, 0.042518217116594315, 0.10463716834783554, -0.16259139776229858, -0.0008707018569111824, 0.007267249748110771, 0.06490467488765717, 0.003482565050944686, 0.12284403294324875, -0.10877601057291031, -0.03783849999308586, -0.06306658685207367, -0.04662378877401352, -0.1388189047574997, 0.006197606213390827, 0.14954803884029388, 0.08673550933599472, -0.1805488020181656, -0.02157335914671421, 0.0914347767829895, -0.024447960779070854, -0.09204104542732239, 0.16153952479362488, -0.05300998315215111, 0.11454536765813828, -0.024645570665597916, 0.10425994545221329, 0.2413722723722458, -0.10726157575845718, -0.020372290164232254, 0.11163581162691116, 0.06254274398088455, -0.031452640891075134, -0.010331274941563606, 0.06126291677355766, -0.09380857646465302, 0.03511371836066246, 0.08600933104753494, 0.03369022160768509, -0.03993801772594452, -0.06295906752347946, -0.035640232264995575, -0.05071563646197319, 0.11197087913751602, 0.03789214789867401, 0.032866694033145905, -0.07034682482481003, -0.08603149652481079, 0.15205569565296173, 0.1142747700214386, -0.04540444537997246, -0.0015762010589241982, -0.10895416140556335, 0.026677867397665977, -0.10631950199604034, 0.021152909845113754, -0.13850779831409454, 0.006305752322077751, 0.0699213445186615, 0.004819297231733799, 0.021809671074151993, 0.08291218429803848, 0.07332752645015717, 0.024261074140667915, -0.05289885029196739, 0.018575703725218773, -0.02536243014037609, -0.007301475387066603, -0.10964083671569824, -0.10186523199081421, 0.006034119985997677, -0.030525704845786095, 0.20584452152252197, -0.16018904745578766, 0.042810264974832535, 0.12097053974866867, 0.009309107437729836, 0.001051859580911696, -0.02630029246211052, -0.05874010920524597, 0.08712669461965561, -0.03121154196560383, -0.036352574825286865, 0.0377131886780262, 0.024687716737389565, -0.07431130856275558, -0.11496629565954208, -0.10756157338619232, 0.05221369490027428, 0.1354491114616394, 0.06979009509086609, -0.0698419138789177, -0.059079207479953766, -0.03497582674026489, -0.04966628924012184, 0.0506071001291275, -0.04224354028701782, 0.04675033688545227, 0.005749944131821394, 0.06916376203298569, -0.10512419790029526, -0.04059508442878723, 0.07083739340305328, -0.018917527049779892, -0.05428758263587952, 0.11054817587137222, 0.04365398734807968, -0.09803837537765503, 0.0802619531750679, 0.07627198100090027, -0.16724276542663574, 0.14160828292369843, -0.00436848308891058, -0.026587029919028282, -0.09122602641582489, 0.17504137754440308, 0.029749935492873192, 0.1451154500246048, -0.14746206998825073, 0.08909809589385986, -0.008787349797785282, -0.00666070356965065, 0.08802465349435806, -0.214625284075737, 0.010325615294277668, -0.03634456917643547, -0.08066371828317642, -0.007841725833714008, -0.010212081484496593, -0.0000819158012745902, 0.04392525181174278, -0.017533639445900917, 0.032559849321842194, 0.14542092382907867, -0.01946389302611351, -0.08632726967334747, 0.1682133972644806, -0.1953413039445877, -0.2120850682258606, -0.2195560783147812, 0.010237992741167545, -0.12701532244682312, -0.02546410821378231, -0.05390758067369461, -0.0634990781545639, 0.04835813120007515, -0.08508995920419693, -0.04765698313713074, -0.008972669020295143, 0.009486616589128971, 0.036137327551841736, 0.011314795352518559, 0.16194264590740204, -0.08474798500537872, 0.038535553961992264, 0.03883276879787445, -0.027649974450469017, 0.08881161361932755, -0.06670596450567245, -0.007832796312868595, 0.14491762220859528, -0.01952093094587326, 0.025391222909092903, 0.0051254634745419025, 0.24765419960021973, -0.01140174176543951, 0.04934753477573395, 0.06264631450176239, -0.011946866288781166, 0.055732786655426025, 0.08893974870443344, 0.026265207678079605, -0.08001066744327545, 0.05175973102450371, 0.051763150840997696, -0.0934494286775589, -0.15635287761688232, -0.01790229044854641, -0.0611664317548275, 0.03247804194688797, 0.07832753658294678, 0.08599554002285004, 0.08887789398431778, 0.10704740136861801, 0.048086024820804596, 0.11442136764526367, -0.0005380088114179671, -0.014531860128045082, 0.10065554082393646, -0.01417609490454197, 0.0722677931189537, -0.03470703586935997, 0.018610861152410507, 0.057642899453639984, 0.16579140722751617, 0.06396520137786865, -0.08673252165317535, 0.03208863362669945, 0.05720125883817673, 0.24775172770023346, -0.00226561538875103, 0.08906242996454239, -0.0688658356666565, -0.017541470006108284, 0.004697687923908234, -0.049362942576408386, -0.05092930793762207, 0.02468475140631199, -0.008965671062469482, 0.07350382208824158, 0.008504008874297142, 0.00027810208848677576, 0.08319200575351715, 0.1018294095993042, 0.16033977270126343, -0.28413134813308716, -0.0984739363193512, -0.008599250577390194, 0.12379521131515503, -0.08858294785022736, 0.010772527195513248, 0.23843687772750854, 0.044037170708179474, -0.08783646672964096, -0.0499703586101532, 0.034280385822057724, -0.03230898827314377, 0.008375859819352627, 0.11840213090181351, 0.13691119849681854, -0.00168638676404953, 0.07788778841495514, -0.2804397642612457, 0.02405991405248642, 0.0793352723121643, 0.05068552494049072, -0.040130503475666046, 0.014677352271974087, -0.054131850600242615, -0.06511882692575455, 0.03542979061603546, -0.007645082660019398, 0.17049454152584076, -0.2408132553100586, -0.08234930038452148, -0.009164228104054928, 0.1031625047326088, 0.06085269898176193, 0.03334870561957359, 0.04656508192420006, 0.046702805906534195, 0.07815928757190704, 0.04366886243224144, -0.03814321383833885, -0.10690965503454208, 0.008273222483694553, 0.15751545131206512, -0.20673446357250214, -0.0414155088365078, -0.07489694654941559, -0.057088252156972885, 0.07918870449066162, -0.15903246402740479, -0.04880382865667343, -0.061119552701711655, 0.03814389184117317, 0.11658566445112228, -0.028931371867656708, -0.016219250857830048, -0.014965689741075039, 0.030288061127066612, -0.04842277616262436, -0.10073015093803406, 0.1269586831331253, -0.0392460860311985, -0.10439517349004745, -0.040311429649591446, 0.13776597380638123, 0.06083875149488449, -0.009049157612025738, -0.0754890888929367, -0.06300915032625198, 0.0414666011929512, -0.1308290958404541, 0.021233532577753067, 0.0751592293381691, -0.05475099757313728, 0.06107848882675171, -0.10773513466119766, 0.17287494242191315, -0.04027008265256882, 0.09591282904148102, 0.06621057540178299, 0.27530062198638916, -0.09943555295467377, -0.010561461560428143, 0.09865163266658783, -0.035745665431022644, -0.25540071725845337, 0.09109587967395782, 0.042943648993968964, 0.05418100953102112, -0.04066837206482887, -0.16511894762516022, 0.05529646947979927, 0.09165537357330322, 0.0019447734812274575, 0.23740997910499573, -0.3273702561855316, -0.07880958169698715, 0.046039316803216934, 0.07327010482549667, 0.11221806704998016, -0.05853918939828873, -0.007521896157413721, 0.020281165838241577, -0.0034121640492230654, 0.1392396092414856, -0.16947658360004425, 0.11203836649656296, -0.0007851328700780869, 0.02402743138372898, 0.013245020061731339, -0.04990648850798607, 0.14044511318206787, 0.004857753403484821, 0.0944424420595169, 0.016777485609054565, -0.024910125881433487, 0.06901083886623383, -0.0791291892528534, 0.045499905943870544, -0.07193344086408615, 0.0699193999171257, -0.04803619533777237, 0.004681998863816261, -0.05098540335893631, 0.003089254954829812, -0.07402677834033966, -0.053102947771549225, -0.10987741500139236, 0.06930546462535858, -0.02291909046471119, -0.03492497280240059, -0.009150068275630474, 0.05447538569569588, 0.06120287999510765, 0.450122594833374, -0.049082428216934204, -0.03740329295396805, 0.04586318880319595, 0.08556415885686874, -0.028910057619214058, 0.11058349907398224, -0.1396348774433136, 0.0395297035574913, 0.12371736764907837, -0.004300600849092007, 0.12093222886323929, 0.08749991655349731, -0.12879297137260437, -0.004798395559191704, 0.04326581582427025, -0.14341138303279877, -0.09564485400915146, -0.0300280824303627, -0.03260510414838791, -0.09429169446229935, 0.04393122345209122, 0.1220446452498436, -0.036646902561187744, 0.05669097602367401, 0.048252757638692856, 0.034725941717624664, -0.12377294152975082, 0.1723400503396988, 0.047656431794166565, 0.06229957193136215, -0.07570845633745193, 0.11101776361465454, 0.024255143478512764, -0.033837880939245224, 0.051774993538856506, -0.016340870410203934, -0.08631407469511032, -0.00148488930426538, -0.05010408163070679, -0.05160639062523842, 0.11398708820343018, -0.05543050169944763, -0.07297147065401077, -0.0917937159538269, 0.0044320072047412395, 0.08702437579631805, 0.046522703021764755, 0.09344752877950668, -0.03081660158932209, 0.01708861067891121, -0.1340371072292328, 0.09904660284519196, -0.020519979298114777, 0.03612791374325752, -0.14884638786315918, 0.08137022703886032, -0.015752002596855164, 0.06684833019971848, -0.02550986036658287, -0.007525433320552111, -0.20371520519256592, 0.022915959358215332, -0.07297757267951965, 0.011627870611846447, 0.041570115834474564, 0.028335710987448692, 0.012323004193603992, 0.06454917788505554, -0.04112742096185684, 0.03563009947538376, -0.04174695536494255, -0.036641985177993774, 0.05131080746650696, -0.016527947038412094, -0.04882132261991501, -0.03743983060121536, 0.030097689479589462, -0.10182017832994461, 0.05682086572051048, 0.020174171775579453, -0.05489595606923103, 0.058231402188539505, 0.03730843961238861, 0.03653288260102272, 0.1038179099559784, 0.04115581884980202, 0.06062908470630646, -0.040202271193265915, 0.03198482468724251, -0.020241733640432358, 0.004385208711028099, 0.04107489809393883, 0.1137855052947998, -0.0437944233417511, -0.04817081242799759, -0.15249793231487274, -0.00038397349999286234, -0.05828358978033066, 0.07397250086069107, 0.14092953503131866, 0.13238728046417236, 0.07416033744812012, -0.08547154068946838, -0.028622645884752274, -0.14298300445079803, -0.07308419048786163, 0.06591270864009857, -0.05408303439617157, 0.012810468673706055, -0.022406190633773804, 0.07040965557098389, -0.024904640391469002, 0.13030773401260376, -0.06880105286836624, -0.11105124652385712, -0.05163251608610153, -0.1972569227218628, -0.13208700716495514, 0.006451729219406843, 0.24565601348876953, -0.006037991959601641, -0.022731496021151543, -0.08512300252914429, 0.024661341682076454, 0.07612370699644089, 0.167771115899086, 0.04087698459625244, 0.08118001371622086, -0.14379467070102692, 0.1325668841600418, 0.07075284421443939, -0.054252929985523224, 0.09969701617956161, 0.30780228972435, -0.09363805502653122, 0.03391190245747566, -0.09407040476799011, 0.10478103160858154, 0.04076147451996803, -0.11170412600040436, 0.0038014736492186785, -0.025606155395507812, -0.16090704500675201, -0.1186407133936882, -0.007087644189596176, -0.06542149186134338, -0.15243546664714813, -0.023781459778547287, -0.09933245927095413, -0.09059391915798187, 0.07868962734937668, 0.06372690200805664, -0.03779353201389313, 0.22938954830169678, -0.03903815150260925, 0.04364216327667236, 0.02072872966527939, -0.000022951986466068774, -0.013780904933810234, -0.04936030134558678, -0.09231112897396088, 0.11982320249080658, 0.027062969282269478, 0.09635847061872482, -0.007248339243233204, 0.10877151787281036, 0.04660867899656296, -0.014435621909797192, -0.027075519785284996, -0.016960807144641876, 0.014973939396440983, -0.0355948805809021, 0.10068858414888382, 0.05420166254043579, -0.08349727094173431, -0.08811654895544052, -0.01385699026286602, -0.08286464959383011, 0.0004181711992714554, -0.14644445478916168, 0.2119998335838318, -0.05534656345844269, 0.10173219442367554, -0.016082532703876495, -0.08419321477413177, -0.07668127119541168, 0.1395350992679596, 0.16618496179580688, -0.126475989818573, -0.00031431150273419917, 0.0818956196308136, -0.005550273694097996, -0.11321070045232773, 0.1410473734140396, 0.07249141484498978, 0.0027656343299895525, 0.025106092914938927, -0.02651083469390869, -0.015207814984023571, 0.003547537373378873, -0.006662597414106131, -0.005425718147307634, 0.013027794659137726, 0.04877990856766701, -0.1376865804195404, -0.018954556435346603, -0.05633809417486191, -0.11805430799722672, 0.1616041213274002, -0.12904813885688782, -0.10119124501943588, -0.03065546229481697, -0.061190009117126465, -0.12829677760601044, 0.028983261436223984, -0.1129956841468811, 0.06422551721334457, 0.046320635825395584, -0.05775465816259384, -0.02130419760942459, -0.035476695746183395, -0.016820721328258514, 0.02899520844221115, 0.08625003695487976, -0.02576841227710247, 0.060843680053949356, 0.1225481852889061, -0.010286946780979633, -0.05504870414733887, 0.08430372178554535, 0.0160970501601696, -0.04803958535194397, -0.13490253686904907, 0.033017564564943314, -0.02733662724494934, 0.0948854312300682, 0.043634556233882904, -0.037128645926713943, -0.03507983684539795, -0.22050181031227112, -0.01341930404305458, -0.16797031462192535, -0.08301208168268204, -0.07510613650083542, 0.1046651154756546, 0.17708268761634827, -0.039738066494464874, 0.019882285967469215, -0.048405617475509644, 0.027950821444392204, -0.04688383638858795, 0.06539826840162277, 0.010295945219695568, -0.14620044827461243, 0.06721357256174088, -0.03693856671452522, 0.01009000837802887, -0.33220529556274414, -0.004476768895983696, 0.03710338845849037, -0.0460556261241436, -0.03855951875448227, 0.1254054456949234, 0.04808017611503601, 0.09748374670743942, -0.06477991491556168, -0.25744903087615967, -0.06751903891563416, 0.11282151937484741, -0.02509445883333683, -0.0739818885922432 ]
null
null
null
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fine-tuned-text-summarization This model is a fine-tuned version of [google/flan-t5-base](https://huggingface.co/google/flan-t5-base) on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "base_model": "google/flan-t5-base", "model-index": [{"name": "fine-tuned-text-summarization", "results": []}]}
null
AlyGreo/fine-tuned-text-summarization
[ "tensorboard", "safetensors", "generated_from_trainer", "base_model:google/flan-t5-base", "license:apache-2.0", "region:us" ]
2023-11-12T17:50:40+00:00
[]
[]
TAGS #tensorboard #safetensors #generated_from_trainer #base_model-google/flan-t5-base #license-apache-2.0 #region-us
# fine-tuned-text-summarization This model is a fine-tuned version of google/flan-t5-base on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
[ "# fine-tuned-text-summarization\n\nThis model is a fine-tuned version of google/flan-t5-base on an unknown dataset.", "## Model description\n\nMore information needed", "## Intended uses & limitations\n\nMore information needed", "## Training and evaluation data\n\nMore information needed", "## Training procedure", "### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 1e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 5", "### Training results", "### Framework versions\n\n- Transformers 4.35.0\n- Pytorch 2.1.0+cu118\n- Datasets 2.14.6\n- Tokenizers 0.14.1" ]
[ "TAGS\n#tensorboard #safetensors #generated_from_trainer #base_model-google/flan-t5-base #license-apache-2.0 #region-us \n", "# fine-tuned-text-summarization\n\nThis model is a fine-tuned version of google/flan-t5-base on an unknown dataset.", "## Model description\n\nMore information needed", "## Intended uses & limitations\n\nMore information needed", "## Training and evaluation data\n\nMore information needed", "## Training procedure", "### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 1e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 5", "### Training results", "### Framework versions\n\n- Transformers 4.35.0\n- Pytorch 2.1.0+cu118\n- Datasets 2.14.6\n- Tokenizers 0.14.1" ]
[ 42, 36, 6, 12, 8, 3, 90, 4, 33 ]
[ "passage: TAGS\n#tensorboard #safetensors #generated_from_trainer #base_model-google/flan-t5-base #license-apache-2.0 #region-us \n# fine-tuned-text-summarization\n\nThis model is a fine-tuned version of google/flan-t5-base on an unknown dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 1e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 5### Training results### Framework versions\n\n- Transformers 4.35.0\n- Pytorch 2.1.0+cu118\n- Datasets 2.14.6\n- Tokenizers 0.14.1" ]
[ -0.1221853494644165, 0.1370992362499237, -0.0010247378377243876, 0.10777068883180618, 0.14110660552978516, 0.006529451813548803, 0.10958582907915115, 0.09449248760938644, -0.08342529833316803, 0.09244300425052643, 0.09333959966897964, -0.001905346056446433, 0.04241875559091568, 0.21256762742996216, -0.01214770320802927, -0.21811208128929138, 0.013644436374306679, -0.03170802444219589, 0.013544632121920586, 0.11231469362974167, 0.09558092802762985, -0.09317771345376968, 0.08478917181491852, -0.0014650773955509067, -0.1448957622051239, 0.009075900539755821, -0.014462949708104134, -0.06550224870443344, 0.1004752591252327, 0.003333061235025525, 0.08229467272758484, 0.02286406233906746, 0.10022653639316559, -0.18017062544822693, 0.013325044885277748, 0.06688420474529266, -0.0012348159216344357, 0.09074485301971436, 0.04823058471083641, 0.00448956061154604, 0.08746835589408875, -0.14705686271190643, 0.05207882821559906, 0.027836931869387627, -0.0726483166217804, -0.09890111535787582, -0.09226017445325851, 0.08911999315023422, 0.056847721338272095, 0.09738698601722717, -0.012739975936710835, 0.13917410373687744, -0.08965953439474106, 0.06596710532903671, 0.15548767149448395, -0.32917848229408264, -0.06408196687698364, 0.07953258603811264, 0.028992220759391785, 0.07260701805353165, -0.08226506412029266, -0.014945484697818756, 0.07615373283624649, 0.03143195062875748, 0.08452364057302475, 0.011184100061655045, -0.122103750705719, -0.015412908978760242, -0.13847242295742035, -0.03150368854403496, 0.20838506519794464, 0.06445420533418655, -0.06230100244283676, -0.08911553025245667, -0.08264331519603729, -0.11459288001060486, -0.005284421611577272, -0.023483620956540108, 0.03630043566226959, -0.02027484029531479, -0.05021034926176071, -0.07411574572324753, -0.08853913843631744, -0.05305217579007149, -0.03556613624095917, 0.03857707232236862, 0.05654187873005867, 0.024641962721943855, -0.02580655924975872, 0.1087389811873436, -0.03277415782213211, -0.102756567299366, 0.009257867000997066, -0.013247674331068993, -0.02457788586616516, -0.04453331232070923, -0.034204255789518356, -0.03592879697680473, 0.04102734103798866, 0.131276473402977, -0.09396819025278091, 0.042406804859638214, -0.0052528390660882, 0.02601151168346405, -0.025611521676182747, 0.119961678981781, -0.08111346513032913, -0.006595108658075333, 0.0746956393122673, 0.12370248138904572, 0.07349623739719391, -0.010546853765845299, -0.09538228064775467, -0.03430359065532684, 0.1133262887597084, 0.06128186732530594, -0.03632134944200516, 0.04285852611064911, -0.045087385922670364, -0.012415428645908833, 0.027048788964748383, -0.12538133561611176, 0.024561310186982155, 0.012620115652680397, -0.08849567919969559, -0.07383348047733307, 0.03362346068024635, 0.0035659237764775753, -0.0563325434923172, 0.03584935888648033, -0.09870858490467072, -0.007145480718463659, -0.06375371664762497, -0.052503932267427444, 0.03561227768659592, -0.09709896892309189, -0.01928190514445305, -0.09940572082996368, -0.20047307014465332, -0.031189443543553352, 0.006182197947055101, -0.052855148911476135, -0.07753729075193405, -0.059038784354925156, -0.06324189156293869, -0.015232518315315247, -0.01687806099653244, 0.09513186663389206, -0.058517660945653915, 0.08624336123466492, 0.014026043005287647, 0.02343577705323696, -0.03651999309659004, 0.027872683480381966, -0.1009468287229538, 0.043716587126255035, -0.12116161733865738, 0.07901068031787872, -0.06690653413534164, 0.043435368686914444, -0.09999299049377441, -0.08271385729312897, -0.030514810234308243, -0.028327399864792824, 0.09344084560871124, 0.14867685735225677, -0.17078551650047302, -0.0024054571986198425, 0.16405782103538513, -0.09390158206224442, -0.10378514230251312, 0.13760650157928467, -0.029687123373150826, 0.024718528613448143, 0.06119637191295624, 0.21299846470355988, 0.06474465131759644, -0.10859145224094391, -0.030376890674233437, 0.00990296620875597, 0.02525034174323082, -0.0442592017352581, 0.058810438960790634, -0.021690422669053078, -0.04222932085394859, 0.03597569093108177, -0.023564539849758148, 0.025679193437099457, -0.0878748670220375, -0.08469979465007782, -0.07257843017578125, -0.08950181305408478, 0.04944992810487747, 0.013686536811292171, 0.04734288156032562, -0.08265284448862076, -0.08237536996603012, -0.0006691321032121778, 0.11583659797906876, -0.045216336846351624, 0.005506697576493025, -0.08275391906499863, 0.12946097552776337, -0.03937870264053345, -0.014541626907885075, -0.18104979395866394, -0.14230266213417053, 0.05287560820579529, -0.05053941160440445, 0.025973588228225708, -0.0561533197760582, 0.03637220710515976, 0.07682699710130692, -0.028690814971923828, -0.016027439385652542, -0.0618380531668663, -0.02601667307317257, -0.11688575148582458, -0.1667432337999344, -0.018645744770765305, -0.019560877233743668, 0.19065721333026886, -0.2400890290737152, 0.03164811059832573, 0.009425802156329155, 0.12601058185100555, 0.008993237279355526, -0.042022332549095154, 0.03386596590280533, 0.044377852231264114, -0.015956396237015724, -0.095452219247818, 0.055858369916677475, 0.025895707309246063, -0.08952081948518753, -0.042200684547424316, -0.1153176948428154, 0.07840234041213989, 0.11527512222528458, 0.0772116556763649, -0.07601635903120041, -0.031411994248628616, -0.08558456599712372, -0.037496715784072876, -0.07718674838542938, 0.02177596464753151, 0.14952093362808228, -0.00972932018339634, 0.14608119428157806, -0.08770834654569626, -0.029876945540308952, 0.023359239101409912, 0.0020717920269817114, -0.025947080925107002, 0.08329404890537262, 0.0709657073020935, -0.08262654393911362, 0.1035366803407669, 0.07717372477054596, -0.045770566910505295, 0.12430498003959656, -0.05513159930706024, -0.07415017485618591, -0.011361607350409031, 0.055750273168087006, -0.02265484631061554, 0.1475418210029602, -0.11378628760576248, 0.0021536133717745543, 0.028170393779873848, -0.004027592483907938, 0.06712394952774048, -0.18668490648269653, -0.00949137844145298, -0.006879622116684914, -0.06889783591032028, -0.0488436296582222, -0.0009154572617262602, 0.011881941929459572, 0.09825573861598969, 0.0121194738894701, -0.034818120300769806, 0.04175626114010811, 0.010211199522018433, -0.09478852152824402, 0.17491233348846436, -0.10328357666730881, -0.1649773120880127, -0.12947000563144684, 0.11346492916345596, -0.07854839414358139, -0.01944800466299057, 0.02374274842441082, -0.06172101944684982, -0.04174702614545822, -0.13171584904193878, -0.06417502462863922, -0.0074227903969585896, -0.0022420852910727262, 0.015092389658093452, 0.01618172787129879, 0.10576928406953812, -0.11986739188432693, 0.014137635938823223, -0.021666880697011948, -0.08337344974279404, 0.007363432087004185, 0.036717113107442856, 0.12349289655685425, 0.10282520204782486, -0.03432010859251022, 0.027492672204971313, -0.043368998914957047, 0.23060929775238037, -0.044076353311538696, -0.01605372317135334, 0.11265522986650467, 0.02690780907869339, 0.05332507565617561, 0.11957032978534698, 0.005795092787593603, -0.09555947035551071, 0.04546509310603142, 0.050170838832855225, -0.02293662168085575, -0.2658517062664032, -0.05791236087679863, -0.018505427986383438, -0.008185850456357002, 0.08414824306964874, 0.07831640541553497, -0.004791927058249712, 0.0735829770565033, -0.017025185748934746, 0.07076194882392883, -0.03290121257305145, 0.05580612272024155, 0.11439376324415207, 0.017127839848399162, 0.06904451549053192, -0.0435297004878521, -0.023234641179442406, 0.08130618929862976, -0.018503926694393158, 0.2556079924106598, -0.04631602019071579, 0.11374698579311371, 0.028814243152737617, 0.22950299084186554, -0.00877148937433958, 0.038174666464328766, 0.011418306268751621, 0.021750332787632942, 0.0023426534608006477, -0.060586776584386826, -0.04406522586941719, 0.013840360566973686, -0.03214278444647789, 0.05610845983028412, -0.11487148702144623, 0.08879832923412323, 0.039072226732969284, 0.25719624757766724, 0.033639490604400635, -0.32326042652130127, -0.0845421701669693, -0.014055632054805756, 0.005951683968305588, -0.06010934337973595, 0.03819986805319786, 0.17817680537700653, -0.09703678637742996, 0.03234255313873291, -0.05307462066411972, 0.07821358740329742, -0.019942132756114006, -0.0013393039116635919, 0.03604480251669884, 0.13931013643741608, -0.0046418197453022, 0.10157543420791626, -0.1923125684261322, 0.2087758183479309, 0.01788836158812046, 0.09828829020261765, -0.047101911157369614, 0.0041871811263263226, 0.016548044979572296, 0.08885331451892853, 0.12296400964260101, -0.0007237363024614751, -0.03213509917259216, -0.12528982758522034, -0.16013853251934052, 0.050966400653123856, 0.09310541301965714, -0.018141677603125572, 0.0702638104557991, -0.03521319851279259, 0.013182224705815315, 0.02578888088464737, -0.040160294622182846, -0.1952628344297409, -0.10517275333404541, -0.01934344880282879, 0.05890043452382088, -0.016870955005288124, -0.09421733021736145, -0.10799441486597061, -0.041227202862501144, 0.141517773270607, -0.0015624576481059194, -0.05295899137854576, -0.11672307550907135, 0.09789244085550308, 0.11210595816373825, -0.06121903657913208, 0.02680043689906597, 0.0004083841049578041, 0.1031796932220459, 0.030123744159936905, -0.08328527957201004, 0.0683881863951683, -0.05088607221841812, -0.1619366556406021, -0.0556764155626297, 0.14779767394065857, 0.014257842674851418, 0.02938634529709816, -0.0026098075322806835, -0.017395637929439545, 0.005220846273005009, -0.09588317573070526, 0.011941115371882915, 0.058023471385240555, 0.07870352268218994, 0.06005854159593582, -0.08588802814483643, -0.010325966402888298, -0.039293643087148666, -0.029836535453796387, 0.13963602483272552, 0.19647321105003357, -0.08331659436225891, 0.06414840370416641, 0.08037804812192917, -0.05340002477169037, -0.18791766464710236, 0.04295331984758377, 0.06906625628471375, 0.039782896637916565, 0.02291012741625309, -0.16262803971767426, 0.07787840813398361, 0.12446637451648712, -0.026839235797524452, 0.10642855614423752, -0.3198542892932892, -0.13201069831848145, 0.06651481986045837, 0.12065543979406357, 0.08278319984674454, -0.14193874597549438, -0.056816648691892624, -0.0026501337997615337, -0.0671088770031929, 0.08644832670688629, -0.19086572527885437, 0.10099990665912628, -0.005818231962621212, 0.07619985938072205, 0.018699275329709053, -0.04622151702642441, 0.12048070877790451, 0.014095613732933998, 0.08082842081785202, -0.05944862216711044, -0.009368879720568657, 0.10412650555372238, -0.06489668041467667, 0.09785059094429016, -0.03964615240693092, 0.10110510140657425, -0.09627191722393036, -0.02212262712419033, -0.049894507974386215, 0.07655101269483566, -0.04960932210087776, -0.031711146235466, -0.05433639511466026, 0.04326620325446129, 0.03828766569495201, -0.013139510527253151, 0.04596126452088356, 0.05783604830503464, 0.06657478958368301, 0.1129104346036911, 0.07111120223999023, -0.041139036417007446, -0.04896796867251396, 0.007478026673197746, -0.035384651273489, 0.055443670600652695, -0.12870031595230103, 0.009210782125592232, 0.11679716408252716, 0.025089921429753304, 0.11230139434337616, 0.038083966821432114, -0.06503408402204514, 0.019433170557022095, 0.05736217647790909, -0.16367821395397186, -0.20012693107128143, -0.0349171981215477, -0.0901816263794899, -0.1241467222571373, 0.031108656898140907, 0.08853982388973236, -0.08052563667297363, -0.011931546032428741, -0.03220808133482933, 0.009288284927606583, 0.0010975077748298645, 0.1486697942018509, 0.061997659504413605, 0.040466297417879105, -0.09010224789381027, 0.137318417429924, 0.09432395547628403, -0.08996537327766418, 0.05316285416483879, 0.05221471190452576, -0.12583614885807037, -0.03200481832027435, 0.03054344281554222, 0.16076336801052094, 0.013367782346904278, -0.07336409389972687, -0.09464309364557266, -0.09002980589866638, 0.03522856906056404, 0.09961594641208649, 0.05422984063625336, 0.017181962728500366, 0.00005616023190668784, 0.0021910083014518023, -0.1296536922454834, 0.10435165464878082, 0.05633067712187767, 0.05338482931256294, -0.17731039226055145, 0.10480640083551407, 0.011854158714413643, 0.05639364942908287, -0.02279180847108364, 0.029590701684355736, -0.08705715090036392, -0.03206239268183708, -0.12153477966785431, 0.035543620586395264, -0.029282575473189354, 0.00939062237739563, -0.004609252791851759, -0.03892305865883827, -0.054428137838840485, 0.05803342163562775, -0.05068458244204521, -0.05288439244031906, 0.015731411054730415, 0.05691274628043175, -0.14261019229888916, 0.0024178503081202507, 0.003319550771266222, -0.07391323149204254, 0.11459128558635712, 0.06929177790880203, 0.022218842059373856, 0.013917825184762478, -0.13757580518722534, -0.0005063985590822995, 0.03271431103348732, 0.022400397807359695, 0.044176239520311356, -0.07262510806322098, 0.0002952151116915047, -0.007935925386846066, 0.01768711768090725, 0.01208445243537426, 0.08026367425918579, -0.1427897959947586, -0.060472652316093445, -0.02425416372716427, -0.01537136361002922, -0.052279919385910034, 0.028371525928378105, 0.07948961853981018, 0.033992938697338104, 0.16168546676635742, -0.10579515248537064, 0.008769223466515541, -0.19332297146320343, -0.01916692964732647, -0.020980559289455414, -0.016830088570713997, -0.10355501621961594, -0.012121806852519512, 0.07261981070041656, -0.05131910368800163, 0.13030321896076202, 0.015251077711582184, 0.06512545049190521, 0.013171547092497349, -0.028661303222179413, -0.010654457844793797, 0.005133489146828651, 0.17995315790176392, 0.033458419144153595, -0.016527950763702393, 0.07798465341329575, -0.014935202896595001, 0.09346441179513931, 0.03360092639923096, 0.15500037372112274, 0.10631576925516129, -0.03070986643433571, 0.0903855487704277, 0.07978112995624542, -0.09282094240188599, -0.11011506617069244, 0.1068737804889679, -0.054460447281599045, 0.10011646896600723, -0.047235000878572464, 0.12045548856258392, 0.11191657930612564, -0.1371396780014038, 0.022770771756768227, -0.041414130479097366, -0.10506103932857513, -0.09898573905229568, -0.07490753382444382, -0.08723820000886917, -0.11889804154634476, 0.021123986691236496, -0.11724092811346054, -0.02031538635492325, 0.08791124075651169, -0.003160999156534672, -0.02531594969332218, 0.1624194085597992, -0.007988834753632545, 0.007425614632666111, 0.0666622668504715, 0.025361133739352226, -0.021754376590251923, -0.051211997866630554, -0.09038098901510239, 0.043971557170152664, -0.014473826624453068, 0.06787855923175812, -0.023609120398759842, 0.04497707262635231, 0.05902508273720741, -0.015152927488088608, -0.04268903657793999, 0.004640629515051842, 0.01870802231132984, 0.03550298511981964, 0.019813334569334984, 0.05545760691165924, -0.028572753071784973, -0.023809446021914482, 0.26363080739974976, -0.03679068014025688, -0.0569496676325798, -0.1325514167547226, 0.1556115597486496, 0.025307483971118927, -0.03407725319266319, 0.0596502348780632, -0.13518303632736206, 0.020690852776169777, 0.19257880747318268, 0.18444643914699554, -0.07530011981725693, -0.02597525343298912, -0.02661864459514618, -0.02085014246404171, -0.037835996598005295, 0.11342998594045639, 0.1169145330786705, 0.03438710421323776, -0.04523593932390213, -0.012686322443187237, -0.034558191895484924, -0.003392809769138694, -0.09934467822313309, 0.08547079563140869, 0.000875605212058872, 0.011899219825863838, -0.06355903297662735, 0.055001839995384216, -0.03486640006303787, -0.11814387887716293, 0.042998120188713074, -0.15430451929569244, -0.16554570198059082, -0.029752982780337334, 0.06371040642261505, 0.003846366424113512, 0.049626581370830536, -0.034927695989608765, 0.01897144317626953, 0.09128762036561966, -0.03551569581031799, -0.09635673463344574, -0.0512319840490818, 0.035801805555820465, -0.05911684408783913, 0.2797085642814636, 0.0007491064607165754, 0.07904815673828125, 0.10551346838474274, 0.026254205033183098, -0.15856702625751495, 0.059658072888851166, 0.045836638659238815, 0.00033038543188013136, 0.03357742354273796, 0.07998990267515182, -0.02380838803946972, 0.05437888950109482, 0.03397468850016594, -0.047740768641233444, -0.019844871014356613, -0.011617664247751236, -0.0026078529190272093, -0.09638329595327377, 0.009536395780742168, -0.07875891774892807, 0.14606483280658722, 0.17114998400211334, -0.05920097976922989, 0.015108004212379456, -0.066609226167202, 0.026418132707476616, 0.033117763698101044, 0.07074351608753204, 0.02211172692477703, -0.1760900914669037, 0.022470032796263695, 0.021101906895637512, 0.004901845008134842, -0.27556124329566956, -0.05813998356461525, 0.0015886927722021937, -0.0432819239795208, -0.08736337721347809, 0.1059175580739975, 0.09712115675210953, 0.04259145259857178, -0.03963886946439743, -0.036401793360710144, -0.05470899119973183, 0.13979244232177734, -0.12510767579078674, -0.08101039379835129 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ICU_Returns_BioClinicalBERT This model is a fine-tuned version of [emilyalsentzer/Bio_ClinicalBERT](https://huggingface.co/emilyalsentzer/Bio_ClinicalBERT) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.7775 - F1:: 0.7063 - Roc Auc: 0.7198 - Precision with 0:: 0.8846 - Precision with 1:: 0.6538 - Recall with 0:: 0.5055 - Recal with 1:: 0.9341 - Accuracy:: 0.7198 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 13 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1: | Roc Auc | Precision with 0: | Precision with 1: | Recall with 0: | Recal with 1: | Accuracy: | |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:-----------------:|:-----------------:|:--------------:|:--------------:|:---------:| | No log | 1.0 | 46 | 0.6964 | 0.3573 | 0.5110 | 1.0 | 0.5056 | 0.0220 | 1.0 | 0.5110 | | No log | 2.0 | 92 | 0.6611 | 0.5248 | 0.5714 | 0.6912 | 0.5439 | 0.2582 | 0.8846 | 0.5714 | | No log | 3.0 | 138 | 0.6322 | 0.6315 | 0.6374 | 0.6838 | 0.6096 | 0.5110 | 0.7637 | 0.6374 | | No log | 4.0 | 184 | 0.6526 | 0.6396 | 0.6566 | 0.7767 | 0.6092 | 0.4396 | 0.8736 | 0.6566 | | No log | 5.0 | 230 | 0.6826 | 0.6693 | 0.6923 | 0.9070 | 0.6259 | 0.4286 | 0.9560 | 0.6923 | | No log | 6.0 | 276 | 0.7496 | 0.7230 | 0.7335 | 0.8829 | 0.6680 | 0.5385 | 0.9286 | 0.7335 | | No log | 7.0 | 322 | 1.5500 | 0.6398 | 0.6703 | 0.9079 | 0.6076 | 0.3791 | 0.9615 | 0.6703 | | No log | 8.0 | 368 | 0.9037 | 0.7438 | 0.7527 | 0.9035 | 0.684 | 0.5659 | 0.9396 | 0.7527 | | No log | 9.0 | 414 | 1.6723 | 0.6965 | 0.7143 | 0.9149 | 0.6444 | 0.4725 | 0.9560 | 0.7143 | | No log | 10.0 | 460 | 1.4913 | 0.7030 | 0.7170 | 0.8835 | 0.6513 | 0.5 | 0.9341 | 0.7170 | | 0.3158 | 11.0 | 506 | 1.7129 | 0.6990 | 0.7143 | 0.89 | 0.6477 | 0.4890 | 0.9396 | 0.7143 | | 0.3158 | 12.0 | 552 | 1.8420 | 0.6882 | 0.7060 | 0.8947 | 0.6394 | 0.4670 | 0.9451 | 0.7060 | | 0.3158 | 13.0 | 598 | 1.7775 | 0.7063 | 0.7198 | 0.8846 | 0.6538 | 0.5055 | 0.9341 | 0.7198 | ### Framework versions - Transformers 4.34.0 - Pytorch 2.1.0+cu121 - Datasets 2.14.5 - Tokenizers 0.14.1
{"license": "mit", "tags": ["generated_from_trainer"], "base_model": "emilyalsentzer/Bio_ClinicalBERT", "model-index": [{"name": "ICU_Returns_BioClinicalBERT", "results": []}]}
text-classification
moro01525/ICU_Returns_BioClinicalBERT
[ "transformers", "pytorch", "bert", "text-classification", "generated_from_trainer", "base_model:emilyalsentzer/Bio_ClinicalBERT", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T17:53:50+00:00
[]
[]
TAGS #transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-emilyalsentzer/Bio_ClinicalBERT #license-mit #autotrain_compatible #endpoints_compatible #region-us
ICU\_Returns\_BioClinicalBERT ============================= This model is a fine-tuned version of emilyalsentzer/Bio\_ClinicalBERT on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 1.7775 * F1:: 0.7063 * Roc Auc: 0.7198 * Precision with 0:: 0.8846 * Precision with 1:: 0.6538 * Recall with 0:: 0.5055 * Recal with 1:: 0.9341 * Accuracy:: 0.7198 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 0.0001 * train\_batch\_size: 32 * eval\_batch\_size: 16 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 13 ### Training results ### Framework versions * Transformers 4.34.0 * Pytorch 2.1.0+cu121 * Datasets 2.14.5 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-emilyalsentzer/Bio_ClinicalBERT #license-mit #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ 66, 97, 4, 33 ]
[ "passage: TAGS\n#transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-emilyalsentzer/Bio_ClinicalBERT #license-mit #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13### Training results### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ -0.09056839346885681, 0.08475185930728912, -0.0015843362780287862, 0.10697293281555176, 0.16269953548908234, 0.03651510924100876, 0.1327241212129593, 0.1209334135055542, -0.0702114850282669, 0.029579630121588707, 0.12896092236042023, 0.13817715644836426, -0.0012983549386262894, 0.12518924474716187, -0.04163144528865814, -0.2746811509132385, -0.003298318013548851, 0.061213988810777664, -0.06132741644978523, 0.1165081337094307, 0.09804302453994751, -0.14146625995635986, 0.09966473281383514, 0.00428055552765727, -0.20073337852954865, 0.02134735696017742, 0.029470711946487427, -0.03938615322113037, 0.15180310606956482, 0.03136570751667023, 0.12404434382915497, 0.014475726522505283, 0.10232046246528625, -0.19185934960842133, 0.011500273831188679, 0.03017297200858593, -0.0018234612653031945, 0.09613046795129776, 0.030224837362766266, -0.00801762007176876, 0.14172764122486115, -0.07363210618495941, 0.06073182821273804, 0.026096731424331665, -0.12787576019763947, -0.2020096480846405, -0.06999225169420242, 0.0585617832839489, 0.05912434309720993, 0.08040474355220795, -0.007207207381725311, 0.11081831157207489, -0.08773698657751083, 0.09665951132774353, 0.22797146439552307, -0.28431472182273865, -0.06642594933509827, 0.03067079372704029, 0.028446389362215996, 0.06548555195331573, -0.12272785604000092, -0.020784255117177963, 0.06730189919471741, 0.029880154877901077, 0.1181875541806221, -0.032205935567617416, -0.04603040963411331, 0.02410234324634075, -0.13430243730545044, -0.008383791893720627, 0.14683863520622253, 0.04572908952832222, -0.03837518021464348, -0.0241527296602726, -0.04041862115263939, -0.15740247070789337, -0.025322435423731804, -0.019213788211345673, 0.04371917247772217, -0.05142446979880333, -0.07913129031658173, 0.00029435206670314074, -0.09027422219514847, -0.08347701281309128, -0.05780671536922455, 0.17934052646160126, 0.03214903920888901, 0.0043571218848228455, 0.0031268284656107426, 0.10407222807407379, -0.017319712787866592, -0.12358777225017548, 0.019781211391091347, 0.01851145550608635, 0.007264143321663141, -0.06995595991611481, -0.0650014653801918, -0.0007253898656927049, 0.0007210922776721418, 0.145477756857872, -0.046224795281887054, 0.03726327419281006, 0.051309388130903244, 0.03421710431575775, -0.08688760548830032, 0.16959425806999207, -0.04241369292140007, -0.03279838711023331, -0.010103185661137104, 0.05780759081244469, -0.008928345516324043, 0.0037568463012576103, -0.12808890640735626, -0.0067304279655218124, 0.10048707574605942, -0.0011212857207283378, -0.11260738223791122, 0.07032275199890137, -0.05144748464226723, -0.028708001598715782, 0.007608246058225632, -0.07241757214069366, 0.03217649087309837, 0.00649086432531476, -0.07883401215076447, -0.02249293588101864, 0.02172512374818325, 0.008524765260517597, 0.005155746825039387, 0.1261012703180313, -0.11236502230167389, 0.029200684279203415, -0.0951661467552185, -0.12662683427333832, 0.0009872072841972113, -0.07037180662155151, 0.04445550963282585, -0.10185885429382324, -0.14203645288944244, -0.01934104599058628, 0.045339860022068024, -0.03344891965389252, -0.04667329415678978, -0.061387285590171814, -0.05909545719623566, 0.018459215760231018, -0.013696229085326195, 0.0827082097530365, -0.06988808512687683, 0.10431484133005142, 0.03431516885757446, 0.08030594885349274, -0.06745019555091858, 0.05485181137919426, -0.10058198869228363, 0.00046134114381857216, -0.18991009891033173, 0.02333224005997181, -0.06202724575996399, 0.04826344922184944, -0.08242382109165192, -0.11837896704673767, 0.017551904544234276, 0.010112090036273003, 0.07502260059118271, 0.09432561695575714, -0.1384466141462326, -0.09377698600292206, 0.15194396674633026, -0.05955484136939049, -0.12834332883358002, 0.10758974403142929, -0.07374735921621323, 0.06975437700748444, 0.07920615375041962, 0.18614429235458374, 0.07664989680051804, -0.0947732925415039, 0.01609892211854458, -0.006066307425498962, 0.07498829066753387, -0.04312608391046524, 0.07082675397396088, 0.009733844548463821, -0.017460066825151443, 0.02161969617009163, -0.04936591535806656, 0.05891365557909012, -0.11830530315637589, -0.08068197965621948, -0.009455458261072636, -0.10429604351520538, 0.0856470987200737, 0.058238204568624496, 0.08390632271766663, -0.11778584867715836, -0.06740132719278336, 0.10876501351594925, 0.0824195146560669, -0.06217533349990845, 0.021308990195393562, -0.05271559953689575, 0.04650647193193436, -0.016700461506843567, -0.03550354391336441, -0.16420267522335052, -0.02622467651963234, 0.010537238791584969, 0.028231976553797722, 0.0379616804420948, 0.038013096898794174, 0.066435806453228, 0.08696305751800537, -0.07461684942245483, -0.01918666809797287, -0.05548703297972679, 0.01163331139832735, -0.13494916260242462, -0.2090013325214386, -0.025510460138320923, -0.016098517924547195, 0.12053923308849335, -0.22375375032424927, 0.03659358620643616, -0.022136926651000977, 0.07617170363664627, 0.02023402974009514, -0.0154995396733284, -0.05852764472365379, 0.08755955100059509, -0.040000155568122864, -0.03023238480091095, 0.07219626009464264, -0.010273278690874577, -0.09269966185092926, -0.05280382186174393, -0.0880410447716713, 0.18799878656864166, 0.12175235897302628, -0.13402098417282104, -0.10162420570850372, -0.03669998049736023, -0.049784742295742035, -0.02578127011656761, -0.04654297977685928, 0.0007160363020375371, 0.18572157621383667, -0.02661692537367344, 0.1512354612350464, -0.06488858908414841, -0.032217636704444885, 0.010430662892758846, -0.038759876042604446, 0.038530703634023666, 0.1285025179386139, 0.12702646851539612, -0.08452091366052628, 0.14665216207504272, 0.1364331841468811, -0.08272407203912735, 0.1619071513414383, -0.021008223295211792, -0.0670214518904686, -0.019369354471564293, -0.046148646622896194, -0.019038213416934013, 0.11467786133289337, -0.16321159899234772, -0.01882997527718544, 0.012463262304663658, 0.021756654605269432, 0.011696809902787209, -0.2054317146539688, -0.04112920165061951, 0.04335564374923706, -0.034183401614427567, -0.02774002216756344, -0.018661336973309517, -0.003514857031404972, 0.12037770450115204, 0.014017422683537006, -0.08456578850746155, 0.024522071704268456, 0.00845962855964899, -0.08852510899305344, 0.21666491031646729, -0.07775552570819855, -0.1020534560084343, -0.1402633786201477, -0.08494484424591064, -0.046767301857471466, 0.03215964138507843, 0.058747995644807816, -0.0861176997423172, -0.020047035068273544, -0.053675632923841476, 0.041762251406908035, 0.0063093965873122215, 0.03267776221036911, 0.007326760329306126, 0.004995917435735464, 0.05509909987449646, -0.10973386466503143, -0.021868573501706123, -0.06374064832925797, -0.052244942635297775, 0.03722931072115898, 0.007866251282393932, 0.11323808133602142, 0.14490918815135956, -0.04046265780925751, -0.00401271041482687, -0.045633140951395035, 0.2511744201183319, -0.0622810460627079, -0.027441412210464478, 0.12262304127216339, -0.015175262466073036, 0.037258487194776535, 0.10672067105770111, 0.07829749584197998, -0.08971009403467178, 0.021913044154644012, 0.03659496828913689, -0.025511208921670914, -0.21406741440296173, -0.050799526274204254, -0.04394825920462608, -0.013823486864566803, 0.10271269083023071, 0.005096482578665018, 0.03640054166316986, 0.06946871429681778, 0.03485894575715065, 0.08695318549871445, -0.044132690876722336, 0.07406378537416458, 0.11820285767316818, 0.031100928783416748, 0.13510802388191223, -0.024829989299178123, -0.07087095081806183, 0.03711218759417534, -0.021971696987748146, 0.21576975286006927, 0.02595183439552784, 0.12073685973882675, 0.058225471526384354, 0.13372166454792023, -0.0006040922598913312, 0.09427511692047119, 0.0027428516186773777, -0.050848763436079025, -0.017773641273379326, -0.03962313011288643, -0.048905838280916214, 0.0363943949341774, -0.06975317001342773, 0.06330376118421555, -0.1526581346988678, -0.021237831562757492, 0.05000920966267586, 0.21419459581375122, 0.05034121870994568, -0.3061482906341553, -0.10919088125228882, 0.004408295266330242, -0.0355704240500927, -0.02166399173438549, 0.030180208384990692, 0.11706126481294632, -0.08957206457853317, 0.020404338836669922, -0.0473027229309082, 0.09290467947721481, -0.0388767383992672, 0.06065819412469864, 0.053749896585941315, 0.058241259306669235, -0.021959295496344566, 0.08636660873889923, -0.27278003096580505, 0.2871623635292053, -0.0019768693018704653, 0.049598194658756256, -0.06118262559175491, -0.016122816130518913, 0.03556498885154724, 0.102613665163517, 0.05687398090958595, -0.0012638551415875554, -0.041245244443416595, -0.23238085210323334, -0.03098747320473194, 0.02273746021091938, 0.1065889224410057, -0.05687179043889046, 0.10530642420053482, -0.023510821163654327, 0.013362809084355831, 0.07138596475124359, -0.006499096751213074, -0.07572053372859955, -0.0757850855588913, -0.0145269138738513, -0.006378205493092537, 0.011107437312602997, -0.05482879653573036, -0.12052607536315918, -0.09847704321146011, 0.14554144442081451, -0.0480266734957695, -0.025011815130710602, -0.1105976402759552, 0.08928213268518448, 0.06477971374988556, -0.09245562553405762, 0.038264356553554535, 0.0169710423797369, 0.06755825132131577, 0.0282951220870018, -0.06009076535701752, 0.11415644735097885, -0.055323898792266846, -0.17212648689746857, -0.07274220883846283, 0.11129830032587051, 0.04477803409099579, 0.07954307645559311, 0.00158111029304564, 0.002105278894305229, -0.034240804612636566, -0.09097951650619507, 0.033868033438920975, -0.033096782863140106, 0.06829843670129776, 0.030661839991807938, -0.1048693135380745, 0.029897697269916534, -0.05944502353668213, -0.021905027329921722, 0.18363423645496368, 0.2663237452507019, -0.0954250693321228, 0.007701950613409281, 0.04254429042339325, -0.08175671100616455, -0.20037934184074402, 0.06415364146232605, 0.06988278776407242, 0.014140385203063488, 0.03389051556587219, -0.19927723705768585, 0.143082857131958, 0.1104145273566246, -0.0015561901964247227, 0.10110844671726227, -0.2490457147359848, -0.1361473798751831, 0.1135881170630455, 0.14255200326442719, 0.15395644307136536, -0.14386843144893646, -0.013870618306100368, -0.03373539075255394, -0.1363079845905304, 0.11455376446247101, -0.05691603943705559, 0.1286167949438095, -0.034068863838911057, 0.0887799859046936, 0.011533482000231743, -0.04306600242853165, 0.11742082983255386, 0.02923516556620598, 0.10532216727733612, -0.05331612005829811, -0.08061058074235916, 0.029146017506718636, -0.032274603843688965, 0.016623109579086304, -0.06444397568702698, 0.014656558632850647, -0.11859845370054245, -0.028616888448596, -0.08077728003263474, 0.03811139240860939, -0.03866959735751152, -0.08340024203062057, -0.04597852751612663, 0.044734466820955276, 0.035663384944200516, -0.010056398808956146, 0.1376722753047943, -0.019024865701794624, 0.16926537454128265, 0.07551344484090805, 0.09171665459871292, -0.07112198323011398, -0.030469568446278572, 0.008547944948077202, -0.02491925284266472, 0.0569717101752758, -0.1469254046678543, 0.027674205601215363, 0.14975403249263763, 0.028254656121134758, 0.15121355652809143, 0.08951961994171143, -0.01112401019781828, 0.008557327091693878, 0.06580044329166412, -0.16989533603191376, -0.0472354032099247, -0.028179612010717392, -0.07182000577449799, -0.11744628846645355, 0.057741083204746246, 0.10677991807460785, -0.07586706429719925, -0.014891367405653, -0.026150841265916824, -0.010927418246865273, -0.06986149400472641, 0.19210316240787506, 0.0826510339975357, 0.05704459547996521, -0.08097793161869049, 0.043455030769109726, 0.0330578088760376, -0.044394586235284805, 0.004609253257513046, 0.053851015865802765, -0.07711432129144669, -0.03718095272779465, 0.05096287280321121, 0.20701315999031067, -0.07900159060955048, -0.025850484147667885, -0.15007270872592926, -0.1240924820303917, 0.06870698183774948, 0.20214785635471344, 0.11700496077537537, 0.0017545823939144611, -0.04848455637693405, 0.01882305182516575, -0.1158798411488533, 0.08431396633386612, 0.011186989955604076, 0.07456771284341812, -0.11609096080064774, 0.18827933073043823, -0.019886909052729607, 0.036314237862825394, -0.03248993307352066, 0.03298554569482803, -0.14314004778862, -0.00025151492445729673, -0.12561650574207306, -0.043241728097200394, -0.020151441916823387, 0.005488888826221228, 0.000490513863041997, -0.07006990164518356, -0.05144022777676582, 0.0074383774772286415, -0.12219567596912384, -0.008827680721879005, 0.043944891542196274, 0.07139629870653152, -0.10879413783550262, -0.03263493627309799, 0.028445348143577576, -0.058749813586473465, 0.06273391097784042, 0.03424778953194618, 0.040058329701423645, 0.059073254466056824, -0.14085491001605988, 0.022128500044345856, 0.055880192667245865, 0.01602580025792122, 0.05867193266749382, -0.09761566668748856, -0.009215076453983784, -0.017040232196450233, 0.07766848057508469, 0.030691368505358696, 0.06449545174837112, -0.12091109156608582, -0.005358986556529999, -0.01719670183956623, -0.0942615270614624, -0.056617509573698044, 0.03085043653845787, 0.08459701389074326, 0.002724830759689212, 0.18495215475559235, -0.07731460779905319, 0.03309395909309387, -0.20879851281642914, 0.0006747576990164816, -0.002622063271701336, -0.11462187767028809, -0.12017770111560822, -0.09314145892858505, 0.060378432273864746, -0.03838485851883888, 0.14726929366588593, 0.02241929993033409, 0.0365627259016037, 0.031467001885175705, -0.02673392742872238, 0.03458280488848686, 0.020066678524017334, 0.2190411239862442, 0.03538943827152252, -0.04705096781253815, 0.040750157088041306, 0.06685023754835129, 0.0999249741435051, 0.12232315540313721, 0.20804856717586517, 0.1429135501384735, -0.0436507873237133, 0.07234133034944534, 0.04145696759223938, -0.04584783688187599, -0.14926834404468536, -0.006073400378227234, 0.009886518120765686, 0.07354635000228882, -0.0287385955452919, 0.21747687458992004, 0.057145632803440094, -0.18427380919456482, 0.039903536438941956, -0.054612334817647934, -0.09203760325908661, -0.11162684112787247, 0.0011781632201746106, -0.08286444842815399, -0.15399818122386932, 0.004863607231527567, -0.1381930708885193, -0.00023531301121693105, 0.0974915474653244, 0.005309577099978924, -0.02008218504488468, 0.15513886511325836, -0.016830235719680786, 0.02770085632801056, 0.0630907192826271, 0.004532445687800646, -0.022850805893540382, -0.12873470783233643, -0.07689023017883301, -0.014542201533913612, -0.03277372941374779, 0.019409259781241417, -0.0628293827176094, -0.06804027408361435, 0.005772767588496208, -0.008547420613467693, -0.1003122329711914, 0.00996240135282278, 0.011908854357898235, 0.06741118431091309, 0.056315552443265915, -0.003643793286755681, 0.012457181699573994, -0.011632561683654785, 0.2182600200176239, -0.07263169437646866, -0.051255982369184494, -0.0994253158569336, 0.2727925479412079, 0.06154564768075943, 0.022110356017947197, 0.01937416009604931, -0.06627090275287628, 0.020130878314375877, 0.24742381274700165, 0.18050517141819, -0.09262196719646454, -0.004959849640727043, 0.008826018311083317, -0.003221573308110237, 0.009200789965689182, 0.11610841006040573, 0.10803398489952087, 0.008209421299397945, -0.08693958818912506, -0.044021572917699814, -0.0494920089840889, -0.009857109747827053, -0.02897975780069828, 0.04821614921092987, 0.07000619173049927, 0.003064119489863515, -0.05850846692919731, 0.048105739057064056, -0.06055482476949692, -0.09663312882184982, 0.07042849808931351, -0.22277720272541046, -0.14896762371063232, -0.01965157315135002, 0.07410099357366562, 0.006394194904714823, 0.07464975863695145, -0.031117623671889305, -0.00012385092850308865, 0.07913162559270859, -0.010419449768960476, -0.09236074239015579, -0.09924400597810745, 0.10298359394073486, -0.08860298991203308, 0.19490988552570343, -0.04660540819168091, 0.07246120274066925, 0.12569734454154968, 0.05634235218167305, -0.04964546486735344, 0.06859336048364639, 0.03872576728463173, -0.08576948195695877, 0.01916179619729519, 0.08779655396938324, -0.04051366075873375, 0.09483348578214645, 0.03578643128275871, -0.1635775864124298, 0.021341370418667793, -0.04750248044729233, -0.08735080808401108, -0.04419080913066864, -0.01936667039990425, -0.0584808848798275, 0.13602755963802338, 0.2344507873058319, -0.03711502254009247, -0.010535995475947857, -0.06023474782705307, 0.019502177834510803, 0.07540713995695114, 0.02558600716292858, -0.07408904284238815, -0.23683789372444153, 0.00789265614002943, 0.06546413898468018, -0.024390369653701782, -0.27579864859580994, -0.08620461076498032, -0.011174407787621021, -0.04458865150809288, -0.09152290970087051, 0.08876829594373703, 0.0681266039609909, 0.043494418263435364, -0.05165110155940056, -0.12228888273239136, -0.06372242420911789, 0.15597201883792877, -0.13843588531017303, -0.10733441263437271 ]
null
null
transformers
ExllamaV2 version of the model created by BlueNipples! Original Model https://huggingface.co/BlueNipples/TimeCrystal-l2-13B Requires ExllamaV2, which is being developed by turboderp https://github.com/turboderp/exllamav2 under an MIT license. Main branch is 8bpw 8h ---- This 13B model, TimeCrystal-l2-13B is built to maximize logic and instruct following, whilst also increasing the vividness of prose found in Chronos based models like Mythomax, over the more romantic prose, hopefully without losing the elegent narrative prose touch of newer models like synthia and xwin. TLDR: Attempt at more clever, better prose. Tentative test results: I'm not certain if logic/instruct was improved or not, but the prose infusion seems to have worked. It is built so: SLERPS: Amethyst + Openchat Super = OpenStone MythoMax + Chronos = ChronoMax ChronoMax + Amethyst = TimeStone Gradient Merge: TimeStone + OpenStone (0.9,0,0) = TimeCrystal Props to all the mergers, fine tuners! All models in Merge: Many, lol.
{"license": "apache-2.0", "tags": ["llama-2", "roleplaying"]}
text-generation
AzureBlack/TimeCrystal-l2-13B-exl2
[ "transformers", "safetensors", "llama", "text-generation", "llama-2", "roleplaying", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "text-generation-inference", "region:us" ]
2023-11-12T17:57:04+00:00
[]
[]
TAGS #transformers #safetensors #llama #text-generation #llama-2 #roleplaying #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
ExllamaV2 version of the model created by BlueNipples! Original Model URL Requires ExllamaV2, which is being developed by turboderp URL under an MIT license. Main branch is 8bpw 8h ---- This 13B model, TimeCrystal-l2-13B is built to maximize logic and instruct following, whilst also increasing the vividness of prose found in Chronos based models like Mythomax, over the more romantic prose, hopefully without losing the elegent narrative prose touch of newer models like synthia and xwin. TLDR: Attempt at more clever, better prose. Tentative test results: I'm not certain if logic/instruct was improved or not, but the prose infusion seems to have worked. It is built so: SLERPS: Amethyst + Openchat Super = OpenStone MythoMax + Chronos = ChronoMax ChronoMax + Amethyst = TimeStone Gradient Merge: TimeStone + OpenStone (0.9,0,0) = TimeCrystal Props to all the mergers, fine tuners! All models in Merge: Many, lol.
[]
[ "TAGS\n#transformers #safetensors #llama #text-generation #llama-2 #roleplaying #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n" ]
[ 64 ]
[ "passage: TAGS\n#transformers #safetensors #llama #text-generation #llama-2 #roleplaying #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n" ]
[ -0.0013048341497778893, 0.02985749952495098, -0.006491969805210829, 0.02622772380709648, 0.0847533792257309, -0.03205602988600731, 0.18770553171634674, 0.11261523514986038, -0.03938153386116028, -0.029700199142098427, 0.14478813111782074, 0.1644625961780548, -0.011986946687102318, 0.0634050965309143, -0.0757095217704773, -0.15683837234973907, 0.09412338584661484, -0.02994297444820404, 0.02175210416316986, 0.08011490106582642, 0.08758381009101868, -0.027384022250771523, 0.0792393684387207, 0.0019890558905899525, -0.017380626872181892, 0.026303011924028397, 0.07122909277677536, -0.13567371666431427, 0.09848450124263763, 0.06319054216146469, 0.0588749498128891, 0.021954607218503952, -0.0208024550229311, -0.25968632102012634, 0.02207372337579727, 0.01775657944381237, -0.046443164348602295, 0.009763470850884914, 0.05271712318062782, -0.054133091121912, 0.024394579231739044, 0.043104805052280426, -0.009118769317865372, 0.08877067267894745, -0.09195516258478165, -0.06316576898097992, -0.06608197838068008, -0.00621688412502408, 0.12013860046863556, 0.09258633106946945, 0.011354484595358372, 0.12390758097171783, -0.06841540336608887, 0.06633177399635315, 0.17042002081871033, -0.3628743290901184, 0.011059453710913658, 0.08463391661643982, 0.10326914489269257, 0.03148704022169113, -0.031055454164743423, 0.09380549937486649, 0.05528515949845314, -0.028360730037093163, 0.04491906240582466, -0.0674470067024231, -0.06517940759658813, 0.039702143520116806, -0.040283411741256714, -0.02917855605483055, 0.26635462045669556, -0.04351705685257912, 0.018581677228212357, -0.09292083978652954, -0.03107319213449955, 0.03882906585931778, -0.02037774585187435, 0.049351032823324203, 0.03496004268527031, 0.10915546119213104, 0.0864943191409111, -0.01355455070734024, -0.11592917144298553, -0.01646651327610016, -0.13806107640266418, 0.09331677854061127, 0.0009924977784976363, 0.017069950699806213, -0.17318642139434814, 0.04643061384558678, 0.00683935172855854, -0.09700070321559906, -0.052618272602558136, -0.03722556680440903, 0.08348432183265686, 0.019785603508353233, -0.05117030814290047, -0.017854763194918633, 0.1721537560224533, 0.1915426254272461, 0.021296344697475433, 0.049090560525655746, -0.11683376133441925, 0.0946572870016098, -0.016503119841217995, 0.02418566681444645, 0.039218489080667496, -0.03388378024101257, 0.11698446422815323, -0.050862401723861694, 0.11636505275964737, -0.058291055262088776, -0.13846534490585327, 0.0021548389922827482, -0.001263866201043129, 0.16112631559371948, 0.06952495872974396, 0.07837320119142532, -0.05440506339073181, 0.07361314445734024, 0.0814136266708374, -0.10342995822429657, 0.0033363772090524435, 0.005175293888896704, 0.05267613008618355, 0.05655812472105026, 0.06137543544173241, 0.046200837939977646, -0.05742935463786125, 0.024368254467844963, -0.046032484620809555, -0.050076887011528015, -0.05202684551477432, -0.026179015636444092, 0.09793707728385925, -0.018163379281759262, 0.03961748257279396, -0.14458385109901428, -0.19209975004196167, 0.01823016069829464, 0.033666666597127914, -0.020297562703490257, -0.060577601194381714, -0.02536967769265175, -0.06187126040458679, 0.03572992607951164, -0.0826658383011818, -0.02708614617586136, -0.09256201982498169, 0.04654762148857117, -0.03912520781159401, 0.05425175651907921, -0.1168629452586174, 0.0361461415886879, -0.09442809224128723, 0.06230136752128601, -0.06144599989056587, 0.02892462909221649, -0.05526518076658249, 0.1472439020872116, -0.04345271363854408, 0.030599908903241158, -0.005841422826051712, 0.0463891327381134, -0.0394475944340229, 0.1520131677389145, -0.12623459100723267, -0.03208300843834877, 0.21295669674873352, -0.13113538920879364, -0.2497009038925171, 0.0349639430642128, 0.002796489279717207, 0.04948301240801811, 0.08241157233715057, 0.18217483162879944, 0.05802662670612335, -0.09874463826417923, 0.04110331833362579, 0.10700098425149918, -0.05555140599608421, -0.126938134431839, 0.05220675468444824, -0.024625031277537346, -0.04813733696937561, 0.028430430218577385, -0.03171507269144058, 0.06657008826732635, 0.03145793080329895, -0.0740916058421135, -0.05894884094595909, -0.07179258018732071, -0.04277116060256958, -0.04721243306994438, 0.017422910779714584, -0.091800756752491, -0.015474037267267704, 0.025533366948366165, 0.040020935237407684, 0.0004060211358591914, 0.07038389146327972, -0.09842027723789215, 0.0695149227976799, 0.027453823015093803, 0.06544965505599976, -0.10226864367723465, -0.09504609555006027, -0.026438070461153984, 0.04387880861759186, 0.0003547032829374075, -0.01168910413980484, 0.05043502151966095, -0.01094254944473505, -0.008145746774971485, -0.004406699910759926, 0.15396767854690552, 0.04116586595773697, -0.014470781199634075, -0.16532178223133087, 0.06279705464839935, -0.05429038405418396, 0.0342898927628994, -0.055597223341464996, 0.014614254236221313, 0.0400506891310215, 0.059219613671302795, -0.009651667438447475, 0.0740169882774353, -0.01293360348790884, -0.030095268040895462, -0.09559691697359085, -0.0033058489207178354, 0.08596154302358627, 0.020683422684669495, -0.09846539795398712, 0.20626917481422424, -0.21559995412826538, 0.2508217394351959, 0.2083105593919754, -0.1382969617843628, 0.06475634127855301, -0.019743040204048157, 0.014952019788324833, -0.015300431288778782, 0.004745954647660255, -0.009653192013502121, -0.015197466127574444, 0.028384149074554443, 0.16015996038913727, -0.0835576057434082, -0.023523999378085136, -0.026459572836756706, -0.09768660366535187, -0.023436829447746277, 0.02142687328159809, 0.10743945837020874, -0.11016388982534409, 0.15398894250392914, 0.28000855445861816, 0.012831035070121288, 0.1283872127532959, -0.06404340267181396, -0.0006227304693311453, 0.03933700919151306, 0.03637869283556938, 0.03045603446662426, -0.04983183741569519, -0.061251673847436905, 0.013527337461709976, 0.058511607348918915, 0.02824617549777031, 0.040925104171037674, -0.1442389190196991, -0.07366863638162613, 0.016488801687955856, -0.05586403235793114, -0.017304304987192154, 0.06408367305994034, -0.023917237296700478, 0.10883545130491257, -0.027048053219914436, -0.05180025473237038, 0.11493132263422012, -0.016593175008893013, -0.10264867544174194, 0.15504074096679688, -0.1639220118522644, -0.23358358442783356, -0.17113326489925385, -0.15323571860790253, -0.05052286386489868, 0.04940371960401535, 0.1168346256017685, -0.06041916832327843, -0.0624675452709198, -0.07117018103599548, -0.023984678089618683, -0.016212619841098785, -0.012983781285583973, 0.024012818932533264, 0.07383717596530914, -0.0022712901700288057, -0.11660207062959671, -0.03392496705055237, 0.07105152308940887, -0.07274240255355835, 0.061576731503009796, -0.058571625500917435, 0.09527840465307236, 0.1501920372247696, 0.04401963949203491, 0.012129317037761211, -0.03365081176161766, 0.08241689205169678, -0.043260928243398666, 0.006828297395259142, 0.2287110835313797, -0.07367298007011414, 0.05610356852412224, 0.11440660804510117, 0.005351613741368055, -0.09951704740524292, 0.041428353637456894, -0.007293272763490677, -0.10438942909240723, -0.24549630284309387, -0.07293237745761871, -0.08660831302404404, 0.12181318551301956, 0.025533583015203476, 0.07702022790908813, 0.12573254108428955, 0.07065768539905548, -0.02223394438624382, -0.022472605109214783, 0.08077355474233627, 0.06962413340806961, 0.2421642541885376, -0.035191312432289124, 0.11389726400375366, -0.1299736052751541, -0.052504658699035645, 0.0897417813539505, 0.10601828247308731, 0.09478438645601273, 0.12523874640464783, 0.052698828279972076, 0.08915893733501434, 0.0679144561290741, 0.06031094491481781, 0.1309911161661148, 0.023409487679600716, -0.02098395861685276, -0.029961273074150085, -0.06137565150856972, -0.03720918670296669, 0.07925619930028915, -0.11539658904075623, -0.1166432574391365, -0.03694268316030502, 0.008909621275961399, 0.0673418641090393, 0.14337387681007385, 0.023040400817990303, -0.1750260889530182, 0.0478791706264019, 0.14860093593597412, -0.03135848790407181, -0.06931457668542862, 0.13909876346588135, -0.03537112846970558, -0.06202077120542526, 0.1610237956047058, -0.006015919614583254, 0.11906086653470993, -0.012133733369410038, 0.05876830965280533, -0.050805941224098206, -0.08227253705263138, 0.02646891586482525, 0.12133967131376266, -0.2858532667160034, 0.15900805592536926, -0.0012854200322180986, 0.009135306812822819, -0.08562572300434113, 0.014235913753509521, 0.05562761425971985, 0.1693418025970459, 0.1550663262605667, -0.010624736547470093, -0.09972092509269714, 0.012875990942120552, -0.03446707874536514, 0.02544979564845562, 0.04566187039017677, 0.01549559086561203, 0.00876372866332531, -0.08764608949422836, -0.005336280446499586, 0.029532691463828087, 0.006316466256976128, -0.052534472197294235, -0.15284857153892517, 0.02432847209274769, 0.16195587813854218, 0.06882088631391525, -0.07248025387525558, 0.009606114588677883, -0.12744565308094025, 0.17510263621807098, -0.05741674453020096, -0.05950142443180084, -0.08037291467189789, -0.1628338247537613, 0.0014855796471238136, -0.03214791417121887, 0.06629501283168793, -0.08800063282251358, 0.06627807021141052, -0.0923648327589035, -0.16627733409404755, 0.1072782501578331, -0.11672227084636688, -0.045258134603500366, -0.020044872537255287, 0.1278725564479828, -0.09096217155456543, -0.0141003942117095, 0.04833841696381569, 0.026343099772930145, -0.031069772318005562, -0.11753955483436584, 0.006184241268783808, 0.04663126915693283, -0.032028380781412125, -0.04061836749315262, -0.08313170075416565, -0.03064495325088501, 0.028654873371124268, -0.08015987277030945, 0.22443737089633942, 0.2581051290035248, -0.054294854402542114, 0.18360745906829834, 0.12361860275268555, -0.12150541692972183, -0.3520907461643219, -0.15892016887664795, -0.19492626190185547, -0.07361853867769241, 0.0520462691783905, -0.13638226687908173, 0.1176304742693901, -0.007392758969217539, -0.07740622758865356, 0.09891339391469955, -0.19658774137496948, -0.103497214615345, 0.16724707186222076, 0.01847045123577118, 0.31728431582450867, -0.19290292263031006, -0.08335644751787186, -0.1310219019651413, -0.11268976330757141, 0.1047947108745575, -0.1885710507631302, 0.08004268258810043, 0.0028676684014499187, 0.04414181783795357, 0.007394392509013414, -0.04097127914428711, 0.12401671707630157, -0.04149835556745529, 0.05387594550848007, -0.14348749816417694, 0.06195880472660065, 0.08395522832870483, -0.05849483609199524, 0.06030379980802536, -0.21893317997455597, 0.004122020676732063, -0.04903101921081543, -0.025030750781297684, -0.004330743569880724, 0.06215525418519974, -0.011771511286497116, -0.029149172827601433, -0.05224951356649399, -0.04300396144390106, 0.057781659066677094, 0.020241787657141685, 0.24345648288726807, -0.02750500850379467, 0.0709025114774704, 0.18396985530853271, 0.1621919870376587, -0.09473971277475357, 0.05840189382433891, -0.03747875243425369, -0.08832331001758575, 0.050323840230703354, -0.1880781203508377, 0.08205711841583252, 0.050596632063388824, -0.04862990230321884, 0.026582136750221252, 0.08669289946556091, -0.007924264296889305, -0.013901611790060997, 0.1552181988954544, -0.15333291888237, -0.06165948510169983, -0.009981723502278328, 0.10923422873020172, 0.05267970636487007, 0.030824871733784676, 0.16527117788791656, -0.00033099378924816847, 0.024746669456362724, 0.018454812467098236, 0.05435996130108833, -0.07265409082174301, 0.028410982340574265, -0.0030234246514737606, 0.01913832314312458, -0.1173047348856926, 0.1450347602367401, -0.0017974410438910127, -0.06212053447961807, 0.05056501924991608, 0.09981440007686615, -0.12434215843677521, -0.10258644819259644, -0.001678277738392353, 0.11585696041584015, -0.14349740743637085, -0.1401955932378769, -0.04807422310113907, -0.20175477862358093, 0.06245271489024162, 0.15779848396778107, 0.05400242283940315, 0.05897065997123718, 0.01953214220702648, -0.053619176149368286, -0.018388455733656883, 0.056276705116033554, -0.04171425476670265, 0.00455040717497468, -0.1411696821451187, -0.0611543245613575, 0.006379914935678244, 0.06228795647621155, -0.05473703518509865, -0.0034226207062602043, -0.09805069118738174, 0.0445922389626503, -0.1913081407546997, 0.0024888277985155582, -0.08109400421380997, -0.011132711544632912, 0.011867919005453587, -0.01577237993478775, -0.05120588093996048, -0.007666877005249262, -0.09017624706029892, -0.014473766088485718, -0.05365043878555298, 0.07900727540254593, -0.14050722122192383, -0.051453012973070145, 0.07735364139080048, -0.05096711218357086, 0.08219260722398758, 0.034179043024778366, -0.08822371810674667, 0.0839303657412529, -0.20760127902030945, -0.06278501451015472, 0.1093071773648262, 0.021602801978588104, -0.010687487199902534, -0.020250648260116577, -0.032841168344020844, 0.11929872632026672, -0.02164323627948761, 0.03795552998781204, 0.000491855782456696, -0.10117703676223755, -0.001961434492841363, -0.027526943013072014, -0.0967293381690979, 0.00858257245272398, -0.07276732474565506, 0.12081442028284073, -0.0377289243042469, 0.16085019707679749, -0.05175641179084778, 0.04143239185214043, -0.05037698522210121, 0.04683002829551697, -0.0027516561094671488, -0.12829959392547607, -0.10411583632230759, -0.06106819957494736, -0.03639479726552963, -0.03830012306571007, 0.26656490564346313, -0.00863250344991684, -0.08460022509098053, 0.0867665633559227, 0.04621561989188194, 0.038572896271944046, 0.03831927105784416, 0.2890394628047943, 0.07175426185131073, -0.010911982506513596, -0.08947287499904633, -0.0012647910043597221, 0.06760633736848831, -0.12615099549293518, 0.10219798237085342, 0.079454205930233, -0.0566348135471344, 0.11496880650520325, 0.04117318242788315, -0.002004389651119709, -0.05873047187924385, -0.04225403815507889, -0.031233087182044983, 0.05780573934316635, -0.018084688112139702, 0.08464422821998596, 0.22598418593406677, -0.030016735196113586, 0.004761712159961462, -0.063124880194664, -0.014269508421421051, -0.19397850334644318, -0.11156556010246277, -0.10215892642736435, -0.12973655760288239, 0.016240203753113747, -0.09029947221279144, 0.06896251440048218, 0.046953462064266205, 0.047069188207387924, -0.035664211958646774, 0.09989159554243088, -0.050176627933979034, -0.029473774135112762, 0.015955951064825058, -0.04663475975394249, 0.008174849674105644, 0.009049453772604465, -0.05053992196917534, -0.041069239377975464, -0.04403184354305267, -0.046694234013557434, 0.08054453879594803, 0.017612803727388382, 0.09982935339212418, -0.13117192685604095, -0.05707797408103943, -0.03294886648654938, 0.0465388260781765, 0.0044213575311005116, 0.14141716063022614, 0.027570487931370735, -0.03818569704890251, 0.08484098315238953, 0.14225083589553833, -0.05893876403570175, -0.17239627242088318, -0.022824814543128014, 0.14235004782676697, -0.008342484012246132, 0.07112833112478256, -0.040505312383174896, 0.016008978709578514, -0.05023522302508354, 0.36457526683807373, 0.240193173289299, -0.06288862973451614, 0.023364588618278503, -0.10669384896755219, 0.040239010006189346, 0.038694173097610474, 0.1109357699751854, 0.08876899629831314, 0.15907756984233856, -0.047828562557697296, -0.022912759333848953, -0.04703555256128311, 0.0004921231302432716, -0.15506905317306519, 0.09079214930534363, -0.010530421510338783, -0.05119113624095917, -0.010400574654340744, 0.0771745964884758, -0.1478210836648941, 0.0874616950750351, -0.05467142164707184, -0.06634823232889175, 0.0008774528978392482, -0.013443402014672756, 0.1700761318206787, -0.00815252773463726, 0.030682945623993874, -0.019516894593834877, -0.06130548194050789, 0.05704819783568382, -0.000253026548307389, -0.18228965997695923, 0.021935537457466125, 0.03572466969490051, -0.02465309202671051, 0.1198473647236824, 0.01096899900585413, 0.015440888702869415, 0.07635603845119476, 0.03146837279200554, -0.08705839514732361, 0.12259300798177719, 0.035459965467453, -0.03818256035447121, 0.024175487458705902, -0.0569760724902153, -0.03103850781917572, 0.02229689434170723, 0.04768621549010277, -0.05949154496192932, 0.04576407000422478, 0.03814719244837761, -0.10847533494234085, -0.041863590478897095, 0.009303782135248184, -0.09223558753728867, 0.06353133171796799, -0.004473753739148378, -0.04132523015141487, -0.01222181599587202, -0.029847828671336174, 0.03194095939397812, -0.02299630083143711, -0.17723385989665985, -0.02613982930779457, -0.05691012740135193, -0.04178420081734657, 0.09534625709056854, 0.04879968985915184, -0.2346205711364746, -0.010979905724525452, -0.10601088404655457, 0.025151975452899933, -0.1757896989583969, 0.05614611878991127, 0.17992019653320312, -0.007467930670827627, -0.0302322618663311, -0.18319188058376312, 0.06498275697231293, 0.07983408868312836, -0.03149368613958359, -0.10969732701778412 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gpt-neo-125M-couples_therapist_full_renamed This model is a fine-tuned version of [EleutherAI/gpt-neo-125M](https://huggingface.co/EleutherAI/gpt-neo-125M) on the None dataset. It achieves the following results on the evaluation set: - Loss: 3.0778 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | |:-------------:|:-----:|:----:|:---------------:| | No log | 1.0 | 73 | 3.2091 | | No log | 2.0 | 146 | 3.1027 | | No log | 3.0 | 219 | 3.0778 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "mit", "tags": ["generated_from_trainer"], "base_model": "EleutherAI/gpt-neo-125M", "model-index": [{"name": "gpt-neo-125M-couples_therapist_full_renamed", "results": []}]}
text-generation
ailments/gpt-neo-125M-couples_therapist_full_renamed
[ "transformers", "tensorboard", "safetensors", "gpt_neo", "text-generation", "generated_from_trainer", "base_model:EleutherAI/gpt-neo-125M", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T18:00:45+00:00
[]
[]
TAGS #transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us
gpt-neo-125M-couples\_therapist\_full\_renamed ============================================== This model is a fine-tuned version of EleutherAI/gpt-neo-125M on the None dataset. It achieves the following results on the evaluation set: * Loss: 3.0778 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 2e-05 * train\_batch\_size: 8 * eval\_batch\_size: 8 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 3.0 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu118 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 73, 98, 4, 33 ]
[ "passage: TAGS\n#transformers #tensorboard #safetensors #gpt_neo #text-generation #generated_from_trainer #base_model-EleutherAI/gpt-neo-125M #license-mit #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3.0### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.09340846538543701, 0.06741952151060104, -0.0018559551099315286, 0.10453764349222183, 0.13651709258556366, 0.016949983313679695, 0.1594475656747818, 0.11260412633419037, -0.07910064607858658, 0.060557398945093155, 0.13735359907150269, 0.11269506812095642, 0.018921131268143654, 0.13898272812366486, -0.05404054373502731, -0.22676658630371094, 0.015852589160203934, 0.039878424257040024, -0.05083145573735237, 0.10743747651576996, 0.09119011461734772, -0.13102523982524872, 0.10498366504907608, -0.004528381861746311, -0.18653756380081177, 0.01274097990244627, 0.02288978174328804, -0.053470537066459656, 0.13368836045265198, 0.050591450184583664, 0.12817592918872833, 0.026488762348890305, 0.07816201448440552, -0.16509567201137543, 0.013843819499015808, 0.059130266308784485, -0.0034194367472082376, 0.08001767098903656, 0.046467285603284836, 0.0032968269661068916, 0.09952326864004135, -0.07144073396921158, 0.05813327431678772, 0.016279153525829315, -0.13425257802009583, -0.20619845390319824, -0.08709648996591568, 0.026502031832933426, 0.09018822759389877, 0.08982151001691818, -0.016340691596269608, 0.1508723944425583, -0.028957754373550415, 0.09080027788877487, 0.21699802577495575, -0.3209638297557831, -0.07030823826789856, 0.05039776861667633, 0.0564708448946476, 0.10388913750648499, -0.09976650774478912, 0.00947798416018486, 0.07137077301740646, 0.02705993689596653, 0.12904521822929382, -0.026136990636587143, -0.022079378366470337, 0.014819343574345112, -0.15040358901023865, -0.007206631824374199, 0.15805460512638092, 0.042646411806344986, -0.04538669437170029, -0.04375893250107765, -0.07112518697977066, -0.14443308115005493, -0.03805292770266533, -0.028159506618976593, 0.03866248205304146, -0.0255644079297781, -0.0781865045428276, -0.051967374980449677, -0.10761233419179916, -0.08368779718875885, -0.06781763583421707, 0.16275060176849365, 0.03216961771249771, -0.0000031286228932003723, -0.010622152127325535, 0.1012996956706047, -0.04781263321638107, -0.12811994552612305, 0.011781255714595318, 0.023629438132047653, 0.01663234271109104, -0.05211063474416733, -0.05462479218840599, -0.09996023774147034, 0.011203157715499401, 0.13395370543003082, -0.04396717995405197, 0.0391448549926281, 0.004313960671424866, 0.04769232124090195, -0.10892993956804276, 0.16989482939243317, -0.04974313825368881, -0.0473102405667305, 0.01519484631717205, 0.08105960488319397, 0.05126696452498436, -0.020533280447125435, -0.13336916267871857, 0.024664228782057762, 0.10118542611598969, 0.00492361793294549, -0.05741296336054802, 0.07821020483970642, -0.040083903819322586, -0.002169807907193899, 0.031586240977048874, -0.08138757199048996, 0.02167546935379505, -0.018036292865872383, -0.06304176151752472, -0.05257497355341911, 0.024433020502328873, 0.025598326697945595, 0.020732631906867027, 0.09700154513120651, -0.10882692784070969, 0.0013187802396714687, -0.08617467433214188, -0.13152378797531128, 0.00845503993332386, -0.07547764480113983, 0.020208533853292465, -0.12361025810241699, -0.18587906658649445, -0.004996732342988253, 0.045420993119478226, -0.03449633717536926, -0.01628039963543415, -0.05108046159148216, -0.07622867822647095, 0.02143900655210018, -0.017577799037098885, 0.07106568664312363, -0.05982520058751106, 0.0950159803032875, 0.06359951943159103, 0.06471728533506393, -0.06557336449623108, 0.03034041076898575, -0.09883438050746918, 0.031212320551276207, -0.1905936300754547, 0.011738793924450874, -0.05181790888309479, 0.06645064800977707, -0.07647807896137238, -0.0707361176609993, -0.011806601658463478, 0.018269404768943787, 0.07554299384355545, 0.08901361376047134, -0.1610804945230484, -0.07812689989805222, 0.16975097358226776, -0.09425216168165207, -0.15120205283164978, 0.14144089818000793, -0.04655912518501282, 0.038469959050416946, 0.07669690996408463, 0.18893781304359436, 0.05750339478254318, -0.09655655175447464, -0.021409479901194572, -0.022391660138964653, 0.05393349379301071, -0.04626837372779846, 0.07960451394319534, -0.001834406633861363, 0.014187231659889221, 0.005642542615532875, -0.051191285252571106, 0.05479926988482475, -0.08422450721263885, -0.0746871829032898, -0.03635208681225777, -0.10902412235736847, 0.06326941400766373, 0.03998046740889549, 0.0690087303519249, -0.12317600101232529, -0.10530778765678406, 0.06538917124271393, 0.07192122936248779, -0.07127262651920319, 0.01969650574028492, -0.08594193309545517, 0.09296225011348724, -0.09405974298715591, -0.020548345521092415, -0.12938198447227478, -0.07954070717096329, 0.013998794369399548, 0.010140396654605865, 0.032743342220783234, 0.01717161200940609, 0.08348057419061661, 0.09627056866884232, -0.07146988064050674, -0.031029915437102318, -0.01747305877506733, 0.007084723096340895, -0.12753504514694214, -0.18763010203838348, -0.008464720100164413, -0.03160985931754112, 0.12455727159976959, -0.23301421105861664, 0.05018661543726921, 0.007964402437210083, 0.09732412546873093, 0.041550081223249435, -0.014489129185676575, -0.05069836229085922, 0.059105437248945236, -0.049264758825302124, -0.06924644857645035, 0.04680664837360382, 0.007367973681539297, -0.1068475991487503, -0.021459177136421204, -0.19656150043010712, 0.21375223994255066, 0.1451265513896942, -0.07221601903438568, -0.07543722540140152, 0.010497531853616238, -0.03499339520931244, -0.02894485369324684, -0.0280768945813179, -0.020031947642564774, 0.1394992172718048, -0.017328621819615364, 0.15222439169883728, -0.0817989781498909, -0.03855075314640999, 0.02984793484210968, -0.0443076491355896, -0.003082706592977047, 0.09590935707092285, 0.08426044881343842, -0.10046994686126709, 0.1541910022497177, 0.16831561923027039, -0.09468808770179749, 0.15616044402122498, -0.01868322491645813, -0.06057994067668915, -0.02814668044447899, 0.004406376276165247, 0.021690277382731438, 0.12330693751573563, -0.10552427172660828, -0.009125680662691593, 0.004280764609575272, 0.016137246042490005, 0.025889787822961807, -0.2154330462217331, -0.03348574787378311, 0.038347143679857254, -0.044342365115880966, 0.03287487104535103, -0.01340689230710268, -0.0217022392898798, 0.10206874459981918, 0.006488382816314697, -0.069630928337574, 0.03564329445362091, 0.01146662887185812, -0.07677417993545532, 0.2022230625152588, -0.08244609832763672, -0.13729771971702576, -0.1442173272371292, -0.06720718741416931, -0.05637754127383232, 0.041667405515909195, 0.05736604705452919, -0.07967952638864517, -0.041927460581064224, -0.10909425467252731, 0.038776129484176636, 0.010577295906841755, 0.03329331427812576, 0.025357119739055634, -0.023368796333670616, 0.05984845757484436, -0.1046847552061081, -0.010047974064946175, -0.03786487504839897, -0.06554718315601349, 0.03044622763991356, 0.03158445283770561, 0.1250663548707962, 0.1464512050151825, -0.022802455350756645, -0.0020338804461061954, -0.0391121506690979, 0.22151397168636322, -0.0803830474615097, -0.008974621072411537, 0.1356273591518402, -0.012967083603143692, 0.04924776405096054, 0.12537819147109985, 0.05348687246441841, -0.10191402584314346, 0.02714756317436695, 0.03907126560807228, -0.03811272978782654, -0.1986304074525833, -0.03459606319665909, -0.03775512054562569, 0.015825433656573296, 0.0799705982208252, 0.0410473607480526, 0.047751445323228836, 0.0739952102303505, 0.013063747435808182, 0.08723106235265732, -0.026063328608870506, 0.08231503516435623, 0.1096150204539299, 0.04007735475897789, 0.14366760849952698, -0.046384744346141815, -0.06784116476774216, 0.041722044348716736, 0.006694893818348646, 0.20351086556911469, 0.02464771270751953, 0.13759581744670868, 0.04618169367313385, 0.13352333009243011, 0.005352546460926533, 0.04910392686724663, -0.008897489868104458, -0.05774754285812378, -0.014617325738072395, -0.05053771659731865, -0.00980229303240776, 0.044764790683984756, -0.07679665088653564, 0.051044438034296036, -0.10045947879552841, 0.017545759677886963, 0.05579815432429314, 0.18073302507400513, 0.05066103860735893, -0.36032554507255554, -0.09265457093715668, 0.0329725481569767, -0.01568187028169632, -0.028392843902111053, 0.020241500809788704, 0.1315590888261795, -0.043362684547901154, 0.04141960293054581, -0.08013132214546204, 0.06857790052890778, -0.031217947602272034, 0.04384729266166687, 0.056724801659584045, 0.10378989577293396, -0.026122156530618668, 0.05543261766433716, -0.2731448709964752, 0.27295929193496704, 0.022436536848545074, 0.08247867226600647, -0.036970868706703186, 0.004425073508173227, 0.021336624398827553, 0.08601050823926926, 0.07176819443702698, -0.02572859637439251, -0.07365477085113525, -0.1883000135421753, -0.05177466943860054, 0.02872440218925476, 0.11380628496408463, -0.04572289437055588, 0.12018269300460815, -0.03356243669986725, 0.006751517299562693, 0.08512526005506516, -0.0024880089331418276, -0.08021244406700134, -0.10090342909097672, 0.006530867423862219, 0.03743591532111168, -0.03016456589102745, -0.08261390030384064, -0.101606085896492, -0.13599997758865356, 0.164457306265831, -0.046492308378219604, -0.030099673196673393, -0.10422491282224655, 0.0587075836956501, 0.059897229075431824, -0.08575237542390823, 0.043306101113557816, 0.014249416068196297, 0.09150288999080658, 0.011601768434047699, -0.052446238696575165, 0.13480351865291595, -0.07492634654045105, -0.1688240021467209, -0.0641443207859993, 0.10439112782478333, 0.014776676893234253, 0.045920826494693756, -0.007243471685796976, 0.019626092165708542, -0.01902625896036625, -0.07756128162145615, 0.037132177501916885, -0.014819519594311714, 0.05136004835367203, -0.0020726819057017565, -0.032307419925928116, 0.012638232670724392, -0.052548859268426895, -0.04532736539840698, 0.1508426070213318, 0.2917105555534363, -0.07425422966480255, -0.012764573097229004, 0.057932645082473755, -0.06557603925466537, -0.1930941641330719, 0.05507703498005867, 0.009159471839666367, 0.0019605448469519615, 0.04071231186389923, -0.14343766868114471, 0.09059084206819534, 0.1084979996085167, -0.025578582659363747, 0.12599404156208038, -0.2924797534942627, -0.13976943492889404, 0.11585987359285355, 0.15357418358325958, 0.1322532296180725, -0.1788998246192932, -0.04285441339015961, -0.0345904566347599, -0.11509257555007935, 0.09881594777107239, -0.12110810726881027, 0.11153460294008255, -0.0091604795306921, 0.06458908319473267, 0.0048758890479803085, -0.0603545717895031, 0.1321655660867691, -0.02762565016746521, 0.10463788360357285, -0.07317300885915756, -0.011749879457056522, 0.06008821353316307, -0.04831390082836151, 0.02142236940562725, -0.10984993726015091, 0.02168295904994011, -0.048149775713682175, -0.03848946467041969, -0.04759328439831734, 0.03930085524916649, -0.023697052150964737, -0.0774875059723854, -0.05508623644709587, 0.027984485030174255, 0.015051442198455334, -0.017106065526604652, 0.15100157260894775, 0.007581754121929407, 0.17421592772006989, 0.09508845955133438, 0.07367496192455292, -0.08047401905059814, -0.011921401135623455, 0.006499194540083408, -0.034518539905548096, 0.06247299909591675, -0.14004090428352356, 0.02548781782388687, 0.125431627035141, -0.0021535123232752085, 0.1403299868106842, 0.0781966969370842, -0.04844016209244728, 0.031575269997119904, 0.07599583268165588, -0.1684030294418335, -0.12675099074840546, -0.021355295553803444, -0.02933439239859581, -0.10770855098962784, 0.07892489433288574, 0.13741426169872284, -0.07782255113124847, 0.007445192895829678, -0.010431156493723392, 0.003839283250272274, -0.03459110110998154, 0.18192201852798462, 0.0639912337064743, 0.04010182246565819, -0.07108857482671738, 0.0699920728802681, 0.039367448538541794, -0.07404622435569763, 0.021249467507004738, 0.052507005631923676, -0.07281890511512756, -0.04019024595618248, 0.029114702716469765, 0.1925891637802124, -0.07315678894519806, -0.04289703071117401, -0.15693321824073792, -0.11421196162700653, 0.046813398599624634, 0.1841183453798294, 0.082201287150383, 0.007361748721450567, -0.02820504456758499, 0.036829620599746704, -0.12013575434684753, 0.10982444137334824, 0.028777029365301132, 0.10306892544031143, -0.16643285751342773, 0.1549527794122696, -0.00906008668243885, 0.0043395203538239, -0.028265226632356644, 0.052353017032146454, -0.11301163583993912, -0.008731534704566002, -0.11477985233068466, -0.02545410767197609, -0.027561092749238014, -0.009619781747460365, -0.0029954195488244295, -0.0629439428448677, -0.07686851918697357, 0.007254456169903278, -0.09860145300626755, -0.01874585449695587, 0.043538179248571396, 0.03837084397673607, -0.11994215101003647, -0.0306144617497921, 0.0205730888992548, -0.058554138988256454, 0.06579739600419998, 0.013868069276213646, 0.03933541476726532, 0.06185401603579521, -0.15681473910808563, 0.04533541947603226, 0.06000258028507233, 0.006733884569257498, 0.04607157036662102, -0.05946559086441994, -0.017362922430038452, -0.012071390636265278, 0.06558287143707275, 0.02330869995057583, 0.0707218274474144, -0.11557678878307343, 0.007298069540411234, -0.03161466121673584, -0.04715980216860771, -0.05752509459853172, 0.03457074239850044, 0.07785018533468246, 0.00799924973398447, 0.18410679697990417, -0.09931176155805588, 0.005068773403763771, -0.20347140729427338, 0.012146473862230778, 0.006014558952301741, -0.126397505402565, -0.08179935067892075, -0.04838176816701889, 0.055297575891017914, -0.05813963711261749, 0.14317983388900757, 0.00497810821980238, 0.02294103614985943, 0.04032149910926819, -0.024688472971320152, 0.053278204053640366, 0.02024233527481556, 0.22505879402160645, 0.03532817214727402, -0.03996822237968445, 0.007914183661341667, 0.045991454273462296, 0.12241290509700775, 0.05316048115491867, 0.18579700589179993, 0.1387254297733307, -0.04172413796186447, 0.11575312912464142, 0.051794230937957764, -0.06068650260567665, -0.16796915233135223, 0.029009757563471794, -0.049279723316431046, 0.084405317902565, -0.020798994228243828, 0.2037607878446579, 0.13845396041870117, -0.1434938609600067, 0.000916650053113699, -0.04460619390010834, -0.07996907830238342, -0.1096401959657669, -0.06307537108659744, -0.10052996873855591, -0.15423540771007538, 0.004570727236568928, -0.11458595842123032, 0.015522954985499382, 0.09481615573167801, 0.010820102877914906, -0.018529172986745834, 0.20141571760177612, 0.012132640928030014, 0.02797817997634411, 0.03360072895884514, -0.0013110163854435086, -0.026949442923069, -0.07010170072317123, -0.0924760103225708, 0.0008309579570777714, -0.018251029774546623, 0.025870561599731445, -0.05353248119354248, -0.038007210940122604, 0.04868901148438454, -0.004736776929348707, -0.10316608846187592, -0.0009608439286239445, 0.031804461032152176, 0.05202686786651611, 0.026270641013979912, 0.001850993256084621, -0.0022691849153488874, -0.0050406730733811855, 0.2196269929409027, -0.0774664580821991, -0.05016356334090233, -0.08559688180685043, 0.1929437518119812, 0.023561011999845505, 0.022242993116378784, -0.0025323175359517336, -0.08952666074037552, 0.02533365599811077, 0.2191399186849594, 0.18828435242176056, -0.08493376523256302, -0.0025263044517487288, -0.003618644317612052, -0.007427150849252939, -0.04494783282279968, 0.09282826632261276, 0.09986437112092972, 0.04060587286949158, -0.0739351287484169, -0.05125167965888977, -0.04396805912256241, 0.006093745119869709, -0.03385583311319351, 0.05531302094459534, 0.03918347880244255, 0.028281649574637413, -0.04228479415178299, 0.054826077073812485, -0.031426724046468735, -0.09001324325799942, 0.01844901219010353, -0.20178304612636566, -0.14339803159236908, -0.006316816434264183, 0.12237155437469482, -0.028107961639761925, 0.06417646259069443, -0.02914784476161003, -0.008420825935900211, 0.03776328265666962, -0.011319831013679504, -0.08166877180337906, -0.07727497816085815, 0.05907245725393295, -0.08372912555932999, 0.2396562546491623, -0.04560381919145584, 0.03857458382844925, 0.1339457631111145, 0.028760787099599838, -0.07757771760225296, 0.09674894064664841, 0.04623831436038017, -0.07357596606016159, 0.039675042033195496, 0.0954752117395401, -0.036856766790151596, 0.12414141744375229, 0.05781061202287674, -0.13224495947360992, 0.019490057602524757, -0.049868665635585785, -0.07577546685934067, -0.04669976979494095, -0.04173165559768677, -0.07404816895723343, 0.13999074697494507, 0.16924120485782623, -0.03157460689544678, 0.006163037847727537, -0.03727591782808304, 0.027786999940872192, 0.07878934592008591, 0.08292633295059204, -0.02406875602900982, -0.2487703412771225, 0.01202327199280262, 0.07483787089586258, -0.011740419082343578, -0.3095781207084656, -0.08348195999860764, -0.016715215519070625, -0.03761017695069313, -0.09398409724235535, 0.08110729604959488, 0.1442752480506897, 0.049353018403053284, -0.06592199951410294, -0.09621013700962067, -0.08011601120233536, 0.15307030081748962, -0.1397988200187683, -0.10149189084768295 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ICU_Returns_COReClinicalBioBERT This model is a fine-tuned version of [bvanaken/CORe-clinical-outcome-biobert-v1](https://huggingface.co/bvanaken/CORe-clinical-outcome-biobert-v1) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8391 - F1:: 0.7210 - Roc Auc: 0.7335 - Precision with 0:: 0.9048 - Precision with 1:: 0.6641 - Recall with 0:: 0.5220 - Recal with 1:: 0.9451 - Accuracy:: 0.7335 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 13 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1: | Roc Auc | Precision with 0: | Precision with 1: | Recall with 0: | Recal with 1: | Accuracy: | |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:-----------------:|:-----------------:|:--------------:|:--------------:|:---------:| | No log | 1.0 | 46 | 0.6908 | 0.4103 | 0.5330 | 0.875 | 0.5172 | 0.0769 | 0.9890 | 0.5330 | | No log | 2.0 | 92 | 0.6839 | 0.4361 | 0.5357 | 0.7241 | 0.5194 | 0.1154 | 0.9560 | 0.5357 | | No log | 3.0 | 138 | 0.7113 | 0.4827 | 0.5549 | 0.7174 | 0.5314 | 0.1813 | 0.9286 | 0.5549 | | No log | 4.0 | 184 | 0.6089 | 0.6674 | 0.6703 | 0.7095 | 0.6435 | 0.5769 | 0.7637 | 0.6703 | | No log | 5.0 | 230 | 0.6138 | 0.6533 | 0.6731 | 0.8316 | 0.6171 | 0.4341 | 0.9121 | 0.6731 | | No log | 6.0 | 276 | 0.6892 | 0.7153 | 0.7253 | 0.8596 | 0.664 | 0.5385 | 0.9121 | 0.7253 | | No log | 7.0 | 322 | 1.0376 | 0.6385 | 0.6703 | 0.9189 | 0.6069 | 0.3736 | 0.9670 | 0.6703 | | No log | 8.0 | 368 | 1.1796 | 0.7088 | 0.7225 | 0.8932 | 0.6552 | 0.5055 | 0.9396 | 0.7225 | | No log | 9.0 | 414 | 1.0800 | 0.7749 | 0.7802 | 0.9048 | 0.7143 | 0.6264 | 0.9341 | 0.7802 | | No log | 10.0 | 460 | 2.0318 | 0.6717 | 0.6951 | 0.9176 | 0.6272 | 0.4286 | 0.9615 | 0.6951 | | 0.3613 | 11.0 | 506 | 1.9762 | 0.6796 | 0.7005 | 0.9101 | 0.6327 | 0.4451 | 0.9560 | 0.7005 | | 0.3613 | 12.0 | 552 | 1.7367 | 0.7469 | 0.7555 | 0.9043 | 0.6867 | 0.5714 | 0.9396 | 0.7555 | | 0.3613 | 13.0 | 598 | 1.8391 | 0.7210 | 0.7335 | 0.9048 | 0.6641 | 0.5220 | 0.9451 | 0.7335 | ### Framework versions - Transformers 4.34.0 - Pytorch 2.1.0+cu121 - Datasets 2.14.5 - Tokenizers 0.14.1
{"tags": ["generated_from_trainer"], "base_model": "bvanaken/CORe-clinical-outcome-biobert-v1", "model-index": [{"name": "ICU_Returns_COReClinicalBioBERT", "results": []}]}
text-classification
moro01525/ICU_Returns_COReClinicalBioBERT
[ "transformers", "pytorch", "bert", "text-classification", "generated_from_trainer", "base_model:bvanaken/CORe-clinical-outcome-biobert-v1", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T18:04:18+00:00
[]
[]
TAGS #transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-bvanaken/CORe-clinical-outcome-biobert-v1 #autotrain_compatible #endpoints_compatible #region-us
ICU\_Returns\_COReClinicalBioBERT ================================= This model is a fine-tuned version of bvanaken/CORe-clinical-outcome-biobert-v1 on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 1.8391 * F1:: 0.7210 * Roc Auc: 0.7335 * Precision with 0:: 0.9048 * Precision with 1:: 0.6641 * Recall with 0:: 0.5220 * Recal with 1:: 0.9451 * Accuracy:: 0.7335 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 0.0001 * train\_batch\_size: 32 * eval\_batch\_size: 16 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 13 ### Training results ### Framework versions * Transformers 4.34.0 * Pytorch 2.1.0+cu121 * Datasets 2.14.5 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-bvanaken/CORe-clinical-outcome-biobert-v1 #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13", "### Training results", "### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ 66, 97, 4, 33 ]
[ "passage: TAGS\n#transformers #pytorch #bert #text-classification #generated_from_trainer #base_model-bvanaken/CORe-clinical-outcome-biobert-v1 #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 13### Training results### Framework versions\n\n\n* Transformers 4.34.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.5\n* Tokenizers 0.14.1" ]
[ -0.0993773490190506, 0.0661829262971878, -0.00272336695343256, 0.100232794880867, 0.16182821989059448, 0.04326564818620682, 0.10973936319351196, 0.12175281345844269, -0.09269033372402191, 0.03996890038251877, 0.13132980465888977, 0.1315375566482544, -0.0022924623917788267, 0.13698230683803558, -0.04000755026936531, -0.27129051089286804, -0.010316860862076283, 0.07063473761081696, -0.04823487997055054, 0.1293928176164627, 0.08829107135534286, -0.13805875182151794, 0.08103678375482559, -0.0006048455834388733, -0.20302030444145203, 0.02124049700796604, 0.03092792257666588, -0.034102365374565125, 0.14653834700584412, 0.018087105825543404, 0.1427239179611206, 0.01692541316151619, 0.09451068937778473, -0.18787461519241333, 0.009800671599805355, 0.02182619646191597, 0.012436914257705212, 0.0965130552649498, 0.021564239636063576, -0.023340832442045212, 0.14523762464523315, -0.08924371749162674, 0.06349382549524307, 0.01531866006553173, -0.12037770450115204, -0.21907657384872437, -0.05053859204053879, 0.058916907757520676, 0.059253375977277756, 0.08090019226074219, -0.01670779101550579, 0.12457486987113953, -0.09440927952528, 0.10020481050014496, 0.23695018887519836, -0.2721196413040161, -0.0667378380894661, 0.0308715607970953, 0.04203290864825249, 0.07044436037540436, -0.11291365325450897, -0.02207004278898239, 0.05680042505264282, 0.0327371284365654, 0.10957810282707214, -0.032803937792778015, -0.07165620476007462, 0.023587245494127274, -0.13785101473331451, -0.009634779766201973, 0.15364916622638702, 0.026699425652623177, -0.029987134039402008, -0.018286170437932014, -0.046271100640296936, -0.18314585089683533, -0.03015272505581379, -0.0294716265052557, 0.03652523085474968, -0.06324460357427597, -0.07569067180156708, 0.00828513689339161, -0.08806303888559341, -0.09240484982728958, -0.05177832767367363, 0.16837672889232635, 0.03559504821896553, 0.00013910805864725262, -0.014348025433719158, 0.1248074620962143, -0.010807310231029987, -0.11939217895269394, 0.020603107288479805, 0.02055724710226059, -0.008372506126761436, -0.06905768811702728, -0.06782670319080353, -0.010193444788455963, 0.006266293115913868, 0.12410265952348709, -0.043728336691856384, 0.029895635321736336, 0.041043270379304886, 0.021918635815382004, -0.08104949444532394, 0.17606671154499054, -0.04364713653922081, -0.03644150123000145, -0.007934988476336002, 0.07505147159099579, -0.014159644953906536, 0.018286284059286118, -0.11631323397159576, -0.02042602375149727, 0.11324494332075119, 0.011171737685799599, -0.12350187450647354, 0.06867558509111404, -0.05794449523091316, -0.03667502477765083, -0.007132379803806543, -0.07711658626794815, 0.01885540597140789, 0.009722531773149967, -0.07527200132608414, -0.014329895377159119, 0.03156755119562149, -0.005935213528573513, -0.01364364568144083, 0.12650611996650696, -0.10441102832555771, 0.030337033793330193, -0.08857738971710205, -0.11876992881298065, -0.016757989302277565, -0.07986539602279663, 0.039083611220121384, -0.08912817388772964, -0.1288592517375946, -0.01973288133740425, 0.025816660374403, -0.032935451716184616, -0.04132237285375595, -0.06814923137426376, -0.0541379489004612, 0.012974645011126995, -0.010714261792600155, 0.08750569075345993, -0.061780545860528946, 0.11135458946228027, 0.054552048444747925, 0.08334457874298096, -0.048668522387742996, 0.04864978790283203, -0.09794779866933823, 0.013398000970482826, -0.19292938709259033, 0.03831109404563904, -0.0533132404088974, 0.04634680226445198, -0.08029497414827347, -0.11616446822881699, 0.013474497012794018, -0.00490679731592536, 0.08228351920843124, 0.09161199629306793, -0.13672491908073425, -0.09887808561325073, 0.17474399507045746, -0.056466709822416306, -0.1325162798166275, 0.10366571694612503, -0.0777323916554451, 0.06788172572851181, 0.07382967323064804, 0.18155527114868164, 0.0768110379576683, -0.0785333439707756, 0.0014427189016714692, -0.02312806062400341, 0.07064191997051239, -0.03325282782316208, 0.07034984976053238, 0.013425477780401707, -0.011473150923848152, 0.022737840190529823, -0.0486714206635952, 0.05686158314347267, -0.12010809779167175, -0.07466961443424225, -0.018326913937926292, -0.10285182297229767, 0.08250567317008972, 0.060632847249507904, 0.08334414660930634, -0.11282792687416077, -0.0639459565281868, 0.1033167615532875, 0.09774166345596313, -0.06671939045190811, 0.01886455900967121, -0.05851197615265846, 0.051220182329416275, -0.048370543867349625, -0.03001103363931179, -0.17304351925849915, -0.03274362534284592, 0.012438482604920864, 0.03871525451540947, 0.017663441598415375, 0.013822481967508793, 0.065301813185215, 0.08148398995399475, -0.06603481620550156, -0.026164891198277473, -0.05583519488573074, 0.0001821590994950384, -0.1365533322095871, -0.20398342609405518, -0.029670540243387222, -0.025176798924803734, 0.12956549227237701, -0.22125491499900818, 0.028952671214938164, -0.023954397067427635, 0.08635146915912628, 0.020949892699718475, -0.020643381401896477, -0.038681935518980026, 0.07767882943153381, -0.03260001167654991, -0.0317344106733799, 0.0716463252902031, -0.030905617401003838, -0.09999458491802216, -0.06183650344610214, -0.09342659264802933, 0.16114592552185059, 0.10908424854278564, -0.1245400458574295, -0.1007976233959198, -0.044236522167921066, -0.06110960990190506, -0.024803442880511284, -0.045260652899742126, -0.009482352063059807, 0.19801519811153412, -0.012799677439033985, 0.151034414768219, -0.06637626141309738, -0.0396999791264534, 0.0022385488264262676, -0.028023308143019676, 0.04823726415634155, 0.13083483278751373, 0.10999242961406708, -0.08127617835998535, 0.11769821494817734, 0.12553125619888306, -0.0852278620004654, 0.17588327825069427, -0.02342732436954975, -0.08274075388908386, -0.025707349181175232, -0.04147130995988846, -0.015515399165451527, 0.10770032554864883, -0.18947352468967438, -0.01982482150197029, 0.016252577304840088, 0.010105804540216923, 0.01689516007900238, -0.19593995809555054, -0.042291417717933655, 0.0467119961977005, -0.033546578139066696, -0.018562352284789085, -0.00963638536632061, 0.010963162407279015, 0.12514913082122803, 0.014606934040784836, -0.07684589177370071, 0.015948457643389702, 0.004048028029501438, -0.09112624078989029, 0.21693596243858337, -0.07018425315618515, -0.11064229905605316, -0.12533357739448547, -0.07329437881708145, -0.03334925323724747, 0.02163054049015045, 0.04707092419266701, -0.09135172516107559, -0.012164844200015068, -0.04950576275587082, 0.07174420356750488, 0.01813916675746441, 0.04440480098128319, -0.004453490953892469, -0.0002927338646259159, 0.060184597969055176, -0.10413281619548798, -0.009191647171974182, -0.08766241371631622, -0.05883856490254402, 0.040340129286050797, -0.0003729999589268118, 0.11100557446479797, 0.1628761738538742, -0.03577354922890663, 0.011991056613624096, -0.03881403058767319, 0.2524552643299103, -0.06658369302749634, -0.03843863308429718, 0.11434690654277802, -0.017734427005052567, 0.023071538656949997, 0.10940144211053848, 0.07320083677768707, -0.09182094037532806, 0.028374558314681053, 0.038483258336782455, -0.01777573674917221, -0.22036616504192352, -0.05108914524316788, -0.04078378900885582, -0.036451492458581924, 0.08407023549079895, 0.0003901920572388917, 0.0183104258030653, 0.06834515929222107, 0.04534095898270607, 0.08777833729982376, -0.059207987040281296, 0.06448628008365631, 0.1179756298661232, 0.03213049843907356, 0.1343694031238556, -0.023164959624409676, -0.0809125304222107, 0.02668970450758934, -0.022122904658317566, 0.2308049350976944, -0.0006626078975386918, 0.11714095622301102, 0.06022937595844269, 0.13154107332229614, 0.007562451995909214, 0.0829070433974266, -0.0011458622757345438, -0.05313609912991524, -0.002669837325811386, -0.04263051971793175, -0.052563026547431946, 0.014482975006103516, -0.06470430642366409, 0.08264897018671036, -0.1539979726076126, -0.014428919181227684, 0.050803836435079575, 0.19384397566318512, 0.0638023316860199, -0.3016469478607178, -0.10773104429244995, 0.003893813583999872, -0.030183980241417885, -0.03225719556212425, 0.022760644555091858, 0.116102434694767, -0.09160532802343369, 0.004874006379395723, -0.056271400302648544, 0.0892501100897789, -0.053349241614341736, 0.05991411209106445, 0.04224392771720886, 0.06178241968154907, -0.023886417970061302, 0.08142856508493423, -0.2539684474468231, 0.2829135060310364, 0.005826800130307674, 0.04722587764263153, -0.04660969600081444, -0.015180005691945553, 0.03097853995859623, 0.12190207093954086, 0.061762485653162, -0.004658892285078764, -0.0027939972933381796, -0.22713984549045563, -0.02894686721265316, 0.03367820754647255, 0.11477699875831604, -0.07144826650619507, 0.10931528359651566, -0.017244862392544746, 0.012892529368400574, 0.06023399531841278, -0.024240529164671898, -0.06808679550886154, -0.06332743167877197, -0.007975112646818161, -0.0036963289603590965, 0.0037668016739189625, -0.055965472012758255, -0.1151813492178917, -0.08079895377159119, 0.13565585017204285, -0.043007638305425644, -0.03427336364984512, -0.11525756865739822, 0.11787237226963043, 0.0857214480638504, -0.09177105873823166, 0.031616903841495514, 0.01699506677687168, 0.05599691718816757, 0.03327855095267296, -0.0557103306055069, 0.10927266627550125, -0.0510626956820488, -0.18127016723155975, -0.07529003918170929, 0.1363326907157898, 0.040457118302583694, 0.07950754463672638, -0.010028988122940063, -0.001736804493702948, -0.02563861757516861, -0.082582026720047, 0.03558749333024025, -0.019305836409330368, 0.06284426897764206, 0.05201832205057144, -0.10021515935659409, 0.0251634418964386, -0.06200911104679108, -0.009317558258771896, 0.19158194959163666, 0.2534618675708771, -0.09104007482528687, 0.005029228515923023, 0.023457003757357597, -0.08271536231040955, -0.18090417981147766, 0.06933864951133728, 0.08978072553873062, 0.022900553420186043, 0.02340139076113701, -0.19189397990703583, 0.15034909546375275, 0.09656435251235962, 0.004793497733771801, 0.09522759169340134, -0.25987470149993896, -0.1316421926021576, 0.11411377787590027, 0.148849755525589, 0.13624684512615204, -0.131021648645401, -0.022996479645371437, -0.015237848274409771, -0.1283828169107437, 0.09611974656581879, -0.049856800585985184, 0.1316482126712799, -0.044927798211574554, 0.09444963932037354, 0.02414008602499962, -0.04586878418922424, 0.11371316015720367, 0.03304228559136391, 0.09610959142446518, -0.043502748012542725, -0.07144959270954132, 0.030975766479969025, -0.03321114555001259, 0.01411286648362875, -0.04664585366845131, 0.02493605948984623, -0.13031511008739471, -0.032885730266571045, -0.08867939561605453, 0.023966709151864052, -0.03669847920536995, -0.07562113553285599, -0.045140016824007034, 0.03568935766816139, 0.03544618934392929, -0.005236791446805, 0.11623767763376236, -0.03136122226715088, 0.1459629088640213, 0.05493331700563431, 0.09830659627914429, -0.05364857614040375, -0.03639910742640495, 0.010968747548758984, -0.009677293710410595, 0.056512195616960526, -0.14688153564929962, 0.036534909158945084, 0.1641109585762024, 0.018677223473787308, 0.1529431790113449, 0.08867277204990387, -0.019330516457557678, 0.0009932070970535278, 0.06614530086517334, -0.16582834720611572, -0.03288361430168152, -0.03567465394735336, -0.09243964403867722, -0.13781973719596863, 0.0680084377527237, 0.11628663539886475, -0.06516405940055847, -0.017597835510969162, -0.03459601104259491, -0.019440317526459694, -0.07233057171106339, 0.21196286380290985, 0.07380548864603043, 0.06299431622028351, -0.08228909969329834, 0.017990652471780777, 0.039084531366825104, -0.045906275510787964, 0.0014161417493596673, 0.06635428220033646, -0.08378942310810089, -0.036602407693862915, 0.05731679126620293, 0.22466951608657837, -0.05884537845849991, -0.014833194203674793, -0.1517561674118042, -0.117722287774086, 0.07131858170032501, 0.2217107117176056, 0.11601372808218002, 0.0055859689600765705, -0.05925469845533371, 0.006582258734852076, -0.12878431379795074, 0.1072002425789833, 0.01628081686794758, 0.07784803956747055, -0.11290226131677628, 0.19811701774597168, -0.016409937292337418, 0.03846973180770874, -0.036685843020677567, 0.026307981461286545, -0.12798650562763214, 0.01124641951173544, -0.12670370936393738, -0.042976412922143936, -0.00014565058518201113, 0.0027289255522191525, 0.002414935501292348, -0.06431644409894943, -0.049240123480558395, 0.002966871252283454, -0.11628101766109467, -0.01232626661658287, 0.04733842611312866, 0.057252511382102966, -0.10453832149505615, -0.03772123157978058, 0.02754591405391693, -0.0611552819609642, 0.08822706341743469, 0.04037848860025406, 0.04276455566287041, 0.056997284293174744, -0.12892699241638184, 0.02501394972205162, 0.05865435674786568, 0.018876271322369576, 0.05762875825166702, -0.08591429889202118, 0.0007970063597895205, -0.023937832564115524, 0.06910831481218338, 0.0365823395550251, 0.07917329668998718, -0.1221446618437767, -0.010117151774466038, -0.02584480121731758, -0.07627972960472107, -0.05991428717970848, 0.026489082723855972, 0.05344194918870926, 0.017062291502952576, 0.1658693104982376, -0.07578413188457489, 0.03446836397051811, -0.2125196009874344, -0.012965229339897633, -0.008883370086550713, -0.09977192431688309, -0.11364705860614777, -0.08210797607898712, 0.06490267813205719, -0.04781612008810043, 0.13376885652542114, 0.019073186442255974, 0.04970892518758774, 0.023926202207803726, -0.01460216287523508, 0.054352883249521255, 0.016923200339078903, 0.2046784907579422, 0.04041706398129463, -0.0515940822660923, 0.043269142508506775, 0.060396723449230194, 0.10360898822546005, 0.12838611006736755, 0.19933225214481354, 0.13311631977558136, -0.02897239662706852, 0.07197771966457367, 0.03525283932685852, -0.04141438007354736, -0.14317573606967926, -0.013770896941423416, -0.001892800210043788, 0.060603659600019455, -0.014693052507936954, 0.2030494660139084, 0.06354399025440216, -0.1636890172958374, 0.03772103786468506, -0.04300554841756821, -0.10584700852632523, -0.10711083561182022, 0.0038207469042390585, -0.07616540789604187, -0.1499759554862976, 0.0027432753704488277, -0.14047089219093323, -0.009580875746905804, 0.09565450996160507, 0.01591259427368641, -0.019479837268590927, 0.18603743612766266, 0.008263522759079933, 0.035481564700603485, 0.07478751987218857, 0.01190858893096447, -0.010014394298195839, -0.11025474220514297, -0.054457370191812515, -0.0024916629772633314, -0.029731320217251778, 0.018553953617811203, -0.07356548309326172, -0.06965480744838715, 0.0007122912211343646, -0.01006158348172903, -0.1063147559762001, 0.012961029075086117, 0.006789392326027155, 0.057925015687942505, 0.05301261320710182, 0.007594165857881308, 0.008514964021742344, -0.02346949838101864, 0.22106754779815674, -0.06692403554916382, -0.042831312865018845, -0.10755733400583267, 0.24783015251159668, 0.06155780702829361, 0.01880679652094841, 0.014948309399187565, -0.06459405273199081, 0.017489079385995865, 0.24450156092643738, 0.17266593873500824, -0.08804388344287872, -0.00848478078842163, 0.017359992489218712, -0.004225058946758509, 0.01025648508220911, 0.11369843035936356, 0.11004705727100372, 0.018681099638342857, -0.07803359627723694, -0.04941225424408913, -0.05707982927560806, -0.01162442471832037, -0.008011672645807266, 0.04133708402514458, 0.08159817010164261, 0.010444585233926773, -0.05250284820795059, 0.04446680098772049, -0.07133311778306961, -0.0803041011095047, 0.07591838389635086, -0.22913925349712372, -0.15200193226337433, -0.034916963428258896, 0.08061917126178741, 0.005812185350805521, 0.05891920253634453, -0.03104373812675476, -0.012382613494992256, 0.09887263923883438, -0.013251300901174545, -0.09465576708316803, -0.10427764803171158, 0.10089033842086792, -0.11726770550012589, 0.18908919394016266, -0.04199418053030968, 0.06714749336242676, 0.12429750710725784, 0.047968920320272446, -0.04844583943486214, 0.07651950418949127, 0.037122588604688644, -0.07069000601768494, 0.02738623321056366, 0.12740840017795563, -0.026634901762008667, 0.09847172349691391, 0.03274717554450035, -0.17202652990818024, 0.01252055075019598, -0.06512153893709183, -0.07622236013412476, -0.05381224304437637, -0.024615727365016937, -0.054158635437488556, 0.12619490921497345, 0.24321942031383514, -0.0325518473982811, 0.007103492505848408, -0.0700220912694931, 0.019582979381084442, 0.07346193492412567, 0.05900561809539795, -0.06530792266130447, -0.22897055745124817, 0.013162150979042053, 0.07143085449934006, -0.033886123448610306, -0.2748495638370514, -0.09040389955043793, -0.00721239298582077, -0.05629162862896919, -0.0850660651922226, 0.09668954461812973, 0.06710920482873917, 0.06364146620035172, -0.05234686657786369, -0.10009030252695084, -0.06177227199077606, 0.1507387012243271, -0.15715621411800385, -0.10514328628778458 ]
null
null
peft
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed] ## Training procedure The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: float16 ### Framework versions - PEFT 0.6.1
{"library_name": "peft", "base_model": "meta-llama/Llama-2-7b-hf"}
null
joshswartz/model_d2_llama_wikihow_cc
[ "peft", "arxiv:1910.09700", "base_model:meta-llama/Llama-2-7b-hf", "region:us" ]
2023-11-12T18:05:53+00:00
[ "1910.09700" ]
[]
TAGS #peft #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us
# Model Card for Model ID ## Model Details ### Model Description - Developed by: - Funded by [optional]: - Shared by [optional]: - Model type: - Language(s) (NLP): - License: - Finetuned from model [optional]: ### Model Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Downstream Use [optional] ### Out-of-Scope Use ## Bias, Risks, and Limitations ### Recommendations Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. ## Training Details ### Training Data ### Training Procedure #### Preprocessing [optional] #### Training Hyperparameters - Training regime: #### Speeds, Sizes, Times [optional] ## Evaluation ### Testing Data, Factors & Metrics #### Testing Data #### Factors #### Metrics ### Results #### Summary ## Model Examination [optional] ## Environmental Impact Carbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019). - Hardware Type: - Hours used: - Cloud Provider: - Compute Region: - Carbon Emitted: ## Technical Specifications [optional] ### Model Architecture and Objective ### Compute Infrastructure #### Hardware #### Software [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Model Card Authors [optional] ## Model Card Contact ## Training procedure The following 'bitsandbytes' quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: float16 ### Framework versions - PEFT 0.6.1
[ "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: float16", "### Framework versions\n\n\n- PEFT 0.6.1" ]
[ "TAGS\n#peft #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us \n", "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: float16", "### Framework versions\n\n\n- PEFT 0.6.1" ]
[ 36, 6, 3, 54, 28, 3, 4, 9, 9, 10, 42, 20, 3, 4, 5, 9, 11, 13, 3, 12, 5, 4, 5, 3, 4, 9, 53, 9, 8, 6, 3, 14, 8, 7, 9, 4, 163, 11 ]
[ "passage: TAGS\n#peft #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us \n# Model Card for Model ID## Model Details### Model Description\n\n\n\n\n\n- Developed by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Downstream Use [optional]### Out-of-Scope Use## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.## How to Get Started with the Model\n\nUse the code below to get started with the model.## Training Details### Training Data### Training Procedure#### Preprocessing [optional]#### Training Hyperparameters\n\n- Training regime:#### Speeds, Sizes, Times [optional]## Evaluation### Testing Data, Factors & Metrics#### Testing Data#### Factors#### Metrics### Results#### Summary## Model Examination [optional]## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:## Technical Specifications [optional]### Model Architecture and Objective### Compute Infrastructure#### Hardware#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Model Card Authors [optional]## Model Card Contact" ]
[ -0.10044248402118683, 0.18992742896080017, -0.0031633442267775536, 0.032848432660102844, 0.0898432508111, 0.020555412396788597, 0.0514112152159214, 0.1319137066602707, -0.028625067323446274, 0.10301047563552856, 0.06944341957569122, 0.10447767376899719, 0.10382714867591858, 0.1985284984111786, 0.007701088674366474, -0.1989043653011322, 0.021161379292607307, -0.09108774363994598, -0.014098851941525936, 0.12019253522157669, 0.15123359858989716, -0.10033918917179108, 0.08129947632551193, -0.011050217784941196, -0.013868252746760845, -0.029359282925724983, -0.0780041441321373, -0.02544492855668068, 0.04896214231848717, 0.05032109469175339, 0.055246517062187195, 0.0029978074599057436, 0.08228053152561188, -0.26885733008384705, 0.017903776839375496, 0.03887239843606949, -0.008777724578976631, 0.0843198150396347, 0.08247148245573044, -0.04058905690908432, 0.13803090155124664, -0.03714612126350403, 0.13607865571975708, 0.08215759694576263, -0.09024880081415176, -0.2134598195552826, -0.06507281213998795, 0.07474285364151001, 0.1743130385875702, 0.07597663998603821, -0.04511013999581337, 0.12941290438175201, -0.10415855795145035, 0.014416622929275036, 0.04713919758796692, -0.08095727115869522, -0.06547366082668304, 0.06468883901834488, 0.10521412640810013, 0.05623435601592064, -0.13173222541809082, -0.025902461260557175, 0.023263070732355118, 0.03365810215473175, 0.0756877213716507, 0.016464419662952423, 0.15273715555667877, 0.04139616712927818, -0.1494637280702591, -0.037655144929885864, 0.14393219351768494, 0.03141562640666962, -0.030503667891025543, -0.2192901223897934, 0.008600619621574879, -0.08095161616802216, -0.027750205248594284, -0.04569259285926819, 0.04270664602518082, -0.0014254552079364657, 0.09788589179515839, -0.0322093665599823, -0.091762974858284, -0.010735442861914635, 0.0997031182050705, 0.041473742574453354, 0.023828163743019104, -0.021124323830008507, 0.0009543480700813234, 0.12530238926410675, 0.04867546632885933, -0.1303141713142395, -0.06065090373158455, -0.06405604630708694, -0.04496660828590393, -0.03860054910182953, 0.02864566631615162, 0.03481686860322952, 0.061307307332754135, 0.23982128500938416, -0.017234910279512405, 0.05822354927659035, 0.062443651258945465, 0.027269193902611732, 0.047748107463121414, 0.09029047191143036, -0.061803244054317474, -0.153838649392128, -0.013872322626411915, 0.09942325949668884, -0.005674438085407019, -0.024520007893443108, -0.0603773407638073, 0.04221651703119278, 0.032170433551073074, 0.10558142513036728, 0.09421803057193756, -0.005683776922523975, -0.07525945454835892, -0.05429021641612053, 0.1942201405763626, -0.15101242065429688, 0.03361814096570015, 0.016388189047574997, -0.024884099140763283, -0.058769337832927704, 0.00935014896094799, 0.021586691960692406, -0.02514699101448059, 0.09575219452381134, -0.07048187404870987, -0.036539897322654724, -0.12146998941898346, -0.02083268202841282, 0.03388221189379692, 0.012313410639762878, -0.02551470696926117, -0.023502644151449203, -0.05979127064347267, -0.0899723693728447, 0.10775519907474518, -0.06711988151073456, -0.05872555822134018, -0.03693901374936104, -0.08637169748544693, 0.02214251086115837, 0.02999192290008068, 0.1114182248711586, -0.024670526385307312, 0.042189761996269226, -0.007259692531079054, 0.07018516957759857, 0.07305102050304413, 0.03786170110106468, -0.06486404687166214, 0.059836920350790024, -0.20003770291805267, 0.08701111376285553, -0.08251814544200897, 0.030514534562826157, -0.1604008823633194, -0.01075591892004013, 0.014319881796836853, 0.02763427421450615, 0.033716946840286255, 0.15419122576713562, -0.20763204991817474, -0.031920138746500015, 0.1538572609424591, -0.0940161943435669, -0.12170283496379852, 0.03971891105175018, -0.05934518948197365, 0.1717393398284912, 0.01623929664492607, -0.0033652414567768574, 0.0796918123960495, -0.15143276751041412, -0.023516377434134483, -0.019804341718554497, -0.007825165055692196, 0.09675498306751251, 0.08585907518863678, -0.07855241745710373, 0.03345787897706032, 0.015479263849556446, -0.046355172991752625, -0.033680208027362823, -0.04660557955503464, -0.11667574197053909, 0.003190065501257777, -0.08224474638700485, 0.02117563597857952, -0.011961339972913265, -0.0739326924085617, -0.006161029916256666, -0.1644095480442047, -0.024000022560358047, 0.08550204336643219, 0.015760095790028572, -0.01728491485118866, -0.09634038060903549, 0.03927699476480484, -0.024541659280657768, -0.023626741021871567, -0.15302905440330505, -0.011984584853053093, 0.014251348562538624, -0.14027035236358643, 0.02198829874396324, -0.10273617506027222, 0.0648428425192833, 0.0070882029831409454, -0.06591005623340607, -0.028397442772984505, -0.008555792272090912, 0.008104546926915646, -0.05133191868662834, -0.24766644835472107, -0.019276097416877747, -0.050122279673814774, 0.1633339375257492, -0.22488847374916077, 0.03853673115372658, 0.05151868984103203, 0.12592169642448425, -0.003939240705221891, -0.05382701754570007, 0.02608785778284073, -0.07279044389724731, -0.025669842958450317, -0.06616099178791046, 0.000820607237983495, -0.00863348226994276, -0.056482359766960144, 0.012016871012747288, -0.11235277354717255, -0.05235936865210533, 0.1036778911948204, 0.049049459397792816, -0.15663500130176544, -0.02305593714118004, -0.04101930186152458, -0.06858641654253006, -0.07652970403432846, -0.06328991800546646, 0.10950575768947601, 0.04611774906516075, 0.03776420280337334, -0.076755590736866, -0.07332856953144073, 0.007952043786644936, -0.024132754653692245, -0.018902862444519997, 0.11484012752771378, 0.0817960724234581, -0.1223091185092926, 0.0921926349401474, 0.07625507563352585, 0.02147734723985195, 0.09808528423309326, -0.022767210379242897, -0.10519769042730331, -0.03458017855882645, 0.04204082116484642, 0.007610959932208061, 0.16470123827457428, -0.08829954266548157, 0.047669220715761185, 0.04432448372244835, -0.038364771753549576, 0.052726779133081436, -0.1043141782283783, 0.009411533363163471, 0.004796518012881279, -0.010005949065089226, 0.012025139294564724, -0.017812024801969528, 0.0034013038966804743, 0.0851118341088295, 0.057039182633161545, 0.03549480438232422, 0.03228387236595154, -0.035798728466033936, -0.12894557416439056, 0.18558786809444427, -0.0975983515381813, -0.24044886231422424, -0.15509019792079926, 0.048295993357896805, 0.05326466262340546, -0.02198074758052826, 0.02745210938155651, -0.06245077773928642, -0.1009271889925003, -0.07220818847417831, 0.0015414628433063626, 0.015302390791475773, -0.06344247609376907, -0.07494039833545685, 0.05488257110118866, 0.04043089225888252, -0.12155907601118088, 0.03280698508024216, 0.053153570741415024, -0.008113368414342403, 0.003416787600144744, 0.05671697482466698, 0.08542142063379288, 0.18492209911346436, -0.010098925791680813, 0.0008395772310905159, 0.056079212576150894, 0.2789871096611023, -0.16063286364078522, 0.11090124398469925, 0.11408059298992157, -0.06387202441692352, 0.08216164261102676, 0.18873821198940277, 0.03788645565509796, -0.10160696506500244, 0.03102363646030426, 0.03430721163749695, -0.02565835975110531, -0.26741763949394226, -0.050399597734212875, -0.014976361766457558, -0.10846755653619766, 0.07354896515607834, 0.08648476004600525, 0.08980714529752731, 0.034548550844192505, -0.058307986706495285, -0.07948266714811325, 0.028328167274594307, 0.0998353585600853, -0.014116302132606506, 0.0010356578277423978, 0.08560281246900558, -0.03257919102907181, 0.005785651504993439, 0.09074921905994415, -0.01330981682986021, 0.16637872159481049, 0.054453980177640915, 0.12052901089191437, 0.09107792377471924, 0.08630561083555222, -0.0035174887161701918, 0.016903694719076157, 0.012796309776604176, 0.018955716863274574, 0.008438740856945515, -0.087465301156044, 0.03567832335829735, 0.11654272675514221, 0.04937770962715149, 0.02373127080500126, 0.014013930223882198, -0.03731725737452507, 0.047802120447158813, 0.1789676994085312, 0.011567137204110622, -0.19375576078891754, -0.06979576498270035, 0.06292837113142014, -0.07249032706022263, -0.13199423253536224, -0.01796707697212696, 0.017447955906391144, -0.16388265788555145, 0.011618269607424736, -0.03963584825396538, 0.09954611957073212, -0.08395779132843018, -0.03426161780953407, 0.0880831629037857, 0.06829404830932617, -0.026553891599178314, 0.067540742456913, -0.20641998946666718, 0.13599270582199097, 0.0321977399289608, 0.06387536227703094, -0.093824602663517, 0.09579966962337494, 0.004468117840588093, -0.007860559038817883, 0.16669459640979767, 0.005145237781107426, -0.06974595785140991, -0.05858046934008598, -0.08404671400785446, -0.013840875588357449, 0.10265224426984787, -0.13122035562992096, 0.06550464034080505, -0.016110112890601158, -0.030252711847424507, 0.003915764857083559, -0.07304736226797104, -0.12210891395807266, -0.17797791957855225, 0.06468422710895538, -0.1003674566745758, 0.02231353335082531, -0.08984930068254471, -0.06326913088560104, 0.020478924736380577, 0.18795396387577057, -0.19400256872177124, -0.09489081799983978, -0.14393247663974762, -0.08190172165632248, 0.1569294035434723, -0.0429266020655632, 0.08132395893335342, 0.0013449483085423708, 0.15893405675888062, 0.011292459443211555, -0.005688081495463848, 0.1058691143989563, -0.08298300951719284, -0.1821753829717636, -0.06078406423330307, 0.1656748205423355, 0.1350201666355133, 0.04010360315442085, -0.01576046831905842, 0.01983097940683365, -0.05620177090167999, -0.11325959116220474, 0.030592946335673332, 0.13356854021549225, 0.07688459008932114, -0.011942954733967781, -0.037711989134550095, -0.08192747086286545, -0.06020204350352287, -0.05551832541823387, 0.006783293094485998, 0.1993602067232132, -0.07120006531476974, 0.1680586040019989, 0.12570977210998535, -0.05972565710544586, -0.20626886188983917, 0.04871811345219612, 0.04841099679470062, 0.01591246761381626, 0.03200730308890343, -0.2013317048549652, 0.08476155996322632, -0.00919792614877224, -0.07434682548046112, 0.16161975264549255, -0.16567467153072357, -0.14396801590919495, 0.10138025879859924, 0.03544601425528526, -0.2073034793138504, -0.13763678073883057, -0.10106102377176285, -0.027115946635603905, -0.11901183426380157, 0.057926785200834274, 0.0027565527707338333, 0.019091350957751274, 0.023980356752872467, 0.027124982327222824, 0.02498139813542366, -0.05055643990635872, 0.2048446238040924, -0.020622026175260544, 0.009273788891732693, -0.052721332758665085, -0.10569614917039871, 0.03886573016643524, -0.052420035004615784, 0.10414378345012665, -0.006502528674900532, 0.022677989676594734, -0.16309688985347748, -0.04226570203900337, -0.05809146165847778, 0.028818225488066673, -0.10148394852876663, -0.0926479697227478, -0.04908192530274391, 0.09685041010379791, 0.09519395232200623, -0.027106378227472305, 0.004440506920218468, -0.0919228196144104, 0.056688662618398666, 0.20379489660263062, 0.1955365687608719, 0.062420960515737534, -0.0675617977976799, 0.020117446780204773, -0.027193482965230942, 0.04655174911022186, -0.24840767681598663, 0.04238007217645645, 0.058374397456645966, 0.026463521644473076, 0.09237723052501678, -0.006681269034743309, -0.1587531417608261, -0.07440605014562607, 0.08705008029937744, -0.04610403627157211, -0.1571425497531891, -0.03292759135365486, 0.03571044281125069, -0.20511841773986816, -0.04523792862892151, 0.01691841147840023, -0.017359333112835884, -0.03913749009370804, 0.028136592358350754, 0.0776490643620491, -0.02359675243496895, 0.10429829359054565, 0.09128844738006592, 0.09993388503789902, -0.10221196711063385, 0.07552429288625717, 0.07523641735315323, -0.04358464851975441, 0.028502589091658592, 0.10842984169721603, -0.0476534478366375, -0.0364280566573143, 0.08415549993515015, 0.09706524759531021, 0.014858896844089031, -0.05127701163291931, 0.006819105241447687, -0.0512918122112751, 0.06035584956407547, 0.1120617613196373, 0.034527767449617386, -0.0117933489382267, 0.05332980677485466, 0.031522784382104874, -0.09442190080881119, 0.10945162177085876, 0.04829385504126549, 0.016571877524256706, -0.03307706117630005, -0.04221353679895401, -0.004479140043258667, -0.006683522369712591, -0.018728742375969887, -0.01101082842797041, -0.09595657885074615, -0.004596467595547438, -0.10496339201927185, 0.023392152041196823, -0.06368815898895264, 0.00806488562375307, 0.029130123555660248, -0.049426157027482986, 0.0030025437008589506, 0.003943906165659428, -0.08111342787742615, -0.0463692806661129, -0.012896529398858547, 0.08656172454357147, -0.12385637313127518, 0.03547727316617966, 0.07521878927946091, -0.10324519872665405, 0.06899654120206833, -0.0053674220107495785, 0.008654128760099411, 0.016600966453552246, -0.15143415331840515, 0.05747058615088463, -0.028043299913406372, -0.01262114942073822, 0.024689843878149986, -0.20753160119056702, -0.013175307773053646, -0.05257786437869072, -0.044104281812906265, 0.009588141925632954, -0.03352321311831474, -0.12219370156526566, 0.10052043944597244, -0.006234914530068636, -0.0725678950548172, -0.0220775343477726, 0.04363057389855385, 0.09547104686498642, -0.024448877200484276, 0.12744586169719696, -0.01952536031603813, 0.06998538225889206, -0.17183852195739746, -0.0038975346833467484, -0.011288504116237164, 0.03852435201406479, -0.017187224701046944, -0.03888101875782013, 0.05726081505417824, -0.030799131840467453, 0.18979518115520477, -0.01854889653623104, 0.07342257350683212, 0.05471691116690636, 0.02006877027451992, 0.010011863894760609, 0.08027934283018112, 0.062280088663101196, -0.0064839753322303295, 0.0020977959502488375, 0.040415093302726746, -0.0017644116887822747, -0.04041942581534386, -0.14893858134746552, 0.06990225613117218, 0.15122491121292114, 0.055874209851026535, 0.023882616311311722, 0.03351292759180069, -0.11358572542667389, -0.07746727764606476, 0.150340273976326, -0.005242459941655397, -0.031158527359366417, -0.07364263385534286, 0.1794879287481308, 0.13769802451133728, -0.19829629361629486, 0.07881759107112885, -0.06236400455236435, -0.05567285418510437, -0.13105839490890503, -0.16477283835411072, -0.06281837821006775, -0.04647381231188774, -0.021154697984457016, -0.06299059838056564, 0.05545128881931305, 0.05701001361012459, 0.005569384433329105, -0.02002871222794056, 0.10298950970172882, 0.016889085993170738, -0.02215913124382496, 0.04514675587415695, 0.058769334107637405, 0.026251008734107018, -0.10331796854734421, 0.013996411114931107, -0.003589772153645754, 0.010672002099454403, 0.05782429501414299, 0.01340949535369873, -0.05595279112458229, 0.008748321793973446, -0.016279712319374084, -0.1143040880560875, 0.03918766230344772, -0.017173100262880325, -0.030798835679888725, 0.1427876055240631, 0.027941791340708733, 0.006094928365200758, -0.02193468064069748, 0.2314632087945938, -0.07485973089933395, -0.07531194388866425, -0.1452285796403885, 0.07276340574026108, -0.06750857830047607, 0.0313834547996521, 0.031946852803230286, -0.11672214418649673, 0.01792493648827076, 0.1735544353723526, 0.13617043197155, -0.016971297562122345, 0.010430374182760715, 0.050404686480760574, 0.004769227933138609, -0.03419284150004387, 0.015876198187470436, 0.052125826478004456, 0.13811573386192322, -0.0754384994506836, 0.06343179196119308, -0.015465234406292439, -0.08448497951030731, -0.01257187221199274, 0.11209700256586075, 0.01072657760232687, -0.00022751084179617465, -0.06526169925928116, 0.13449300825595856, -0.08504575490951538, -0.23783501982688904, 0.054112330079078674, -0.07512596994638443, -0.14847709238529205, -0.05084700882434845, 0.0191144160926342, -0.016571911051869392, 0.014183185063302517, 0.06995406746864319, -0.05636376142501831, 0.16951484978199005, 0.04403291270136833, -0.06476660072803497, -0.08452221006155014, 0.06491239368915558, -0.14465785026550293, 0.2719082534313202, 0.01827436126768589, 0.052872978150844574, 0.10590392351150513, -0.013356729410588741, -0.12908883392810822, 0.013263006694614887, 0.10755021870136261, -0.07308419048786163, 0.05594499781727791, 0.18196547031402588, 0.002580154687166214, 0.12793375551700592, 0.056854378432035446, -0.0571434460580349, 0.04368443787097931, -0.08964169770479202, -0.04877006262540817, -0.1078919842839241, 0.07959039509296417, -0.08438344299793243, 0.16074974834918976, 0.13300949335098267, -0.06368637830018997, -0.007650652900338173, -0.024498596787452698, 0.08409105986356735, 0.007341811899095774, 0.10744085907936096, 0.0025576732587069273, -0.18022862076759338, 0.03970180079340935, 0.015342454425990582, 0.09788894653320312, -0.21619246900081635, -0.0639476403594017, 0.05330363288521767, -0.01851370930671692, -0.07330190390348434, 0.12064536660909653, 0.05488927289843559, 0.0369114875793457, -0.04064938426017761, -0.06231514364480972, 0.00356076518073678, 0.14312854409217834, -0.11909060180187225, -0.008164821192622185 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # segformer-finetuned-coasts-final This model is a fine-tuned version of [peldrak/segformer-finetuned-coastalDataset](https://huggingface.co/peldrak/segformer-finetuned-coastalDataset) on the peldrak/coastal2 dataset. It achieves the following results on the evaluation set: - Loss: 0.2563 - Mean Iou: 0.5765 - Mean Accuracy: 0.7934 - Overall Accuracy: 0.8942 - Accuracy Water: 0.9204 - Accuracy Whitewater: 0.6450 - Accuracy Sediment: 0.8936 - Accuracy Other Natural Terrain: 0.5526 - Accuracy Vegetation: 0.9077 - Accuracy Development: 0.8415 - Accuracy Unknown: nan - Iou Water: 0.8847 - Iou Whitewater: 0.4614 - Iou Sediment: 0.7695 - Iou Other Natural Terrain: 0.4632 - Iou Vegetation: 0.8233 - Iou Development: 0.6331 - Iou Unknown: 0.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 6e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 2 ### Training results | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Water | Accuracy Whitewater | Accuracy Sediment | Accuracy Other Natural Terrain | Accuracy Vegetation | Accuracy Development | Accuracy Unknown | Iou Water | Iou Whitewater | Iou Sediment | Iou Other Natural Terrain | Iou Vegetation | Iou Development | Iou Unknown | |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:--------------:|:-------------------:|:-----------------:|:------------------------------:|:-------------------:|:--------------------:|:----------------:|:---------:|:--------------:|:------------:|:-------------------------:|:--------------:|:---------------:|:-----------:| | 0.6692 | 0.01 | 20 | 0.4614 | 0.4642 | 0.6595 | 0.8193 | 0.8313 | 0.4104 | 0.7626 | 0.3112 | 0.9007 | 0.7410 | nan | 0.7682 | 0.3175 | 0.6041 | 0.2766 | 0.7352 | 0.5478 | 0.0 | | 0.6263 | 0.02 | 40 | 0.4235 | 0.4592 | 0.6471 | 0.8295 | 0.9136 | 0.3421 | 0.7720 | 0.2776 | 0.8233 | 0.7542 | nan | 0.8087 | 0.2990 | 0.6199 | 0.2471 | 0.7340 | 0.5055 | 0.0 | | 1.3017 | 0.04 | 60 | 0.4394 | 0.4653 | 0.6590 | 0.8330 | 0.8610 | 0.3290 | 0.9077 | 0.2547 | 0.8425 | 0.7591 | nan | 0.8012 | 0.2886 | 0.6560 | 0.2308 | 0.7409 | 0.5394 | 0.0 | | 0.4226 | 0.05 | 80 | 0.4617 | 0.4769 | 0.6540 | 0.8418 | 0.9305 | 0.4493 | 0.7371 | 0.2628 | 0.8645 | 0.6799 | nan | 0.8198 | 0.3649 | 0.6261 | 0.2443 | 0.7488 | 0.5347 | 0.0 | | 0.8408 | 0.06 | 100 | 0.4295 | 0.4505 | 0.6816 | 0.8087 | 0.8050 | 0.4237 | 0.8763 | 0.2882 | 0.8330 | 0.8634 | nan | 0.7757 | 0.3119 | 0.6338 | 0.2439 | 0.7017 | 0.4861 | 0.0 | | 1.474 | 0.07 | 120 | 0.4297 | 0.4708 | 0.6926 | 0.8290 | 0.8725 | 0.5252 | 0.8807 | 0.2439 | 0.8128 | 0.8206 | nan | 0.8183 | 0.3891 | 0.6635 | 0.2246 | 0.7283 | 0.4714 | 0.0 | | 0.5739 | 0.08 | 140 | 0.3918 | 0.4891 | 0.6964 | 0.8398 | 0.8682 | 0.4291 | 0.8989 | 0.3777 | 0.8434 | 0.7609 | nan | 0.8206 | 0.3611 | 0.6719 | 0.3271 | 0.7499 | 0.4930 | 0.0 | | 1.8433 | 0.1 | 160 | 0.4038 | 0.4801 | 0.7087 | 0.8276 | 0.8590 | 0.4685 | 0.9100 | 0.3518 | 0.7974 | 0.8656 | nan | 0.8192 | 0.3831 | 0.6661 | 0.3156 | 0.7184 | 0.4585 | 0.0 | | 0.3492 | 0.11 | 180 | 0.3186 | 0.5025 | 0.7143 | 0.8507 | 0.9192 | 0.5610 | 0.8231 | 0.3873 | 0.8404 | 0.7547 | nan | 0.8615 | 0.3949 | 0.7214 | 0.3523 | 0.7304 | 0.4573 | 0.0 | | 1.4639 | 0.12 | 200 | 0.4100 | 0.4907 | 0.7152 | 0.8368 | 0.8950 | 0.5659 | 0.8607 | 0.3800 | 0.8092 | 0.7800 | nan | 0.8302 | 0.4098 | 0.6733 | 0.3275 | 0.7245 | 0.4700 | 0.0 | | 0.4228 | 0.13 | 220 | 0.4499 | 0.4825 | 0.7167 | 0.8290 | 0.8528 | 0.6444 | 0.9074 | 0.3113 | 0.8265 | 0.7581 | nan | 0.8071 | 0.4126 | 0.6496 | 0.2843 | 0.7270 | 0.4967 | 0.0 | | 0.4334 | 0.15 | 240 | 0.3982 | 0.4789 | 0.7042 | 0.8384 | 0.8739 | 0.6612 | 0.8529 | 0.2549 | 0.8633 | 0.7188 | nan | 0.8264 | 0.3878 | 0.6766 | 0.2461 | 0.7383 | 0.4771 | 0.0 | | 0.2844 | 0.16 | 260 | 0.4778 | 0.4594 | 0.6958 | 0.8264 | 0.8630 | 0.6156 | 0.8865 | 0.2021 | 0.8209 | 0.7869 | nan | 0.8210 | 0.3735 | 0.6681 | 0.1918 | 0.7157 | 0.4459 | 0.0 | | 0.4044 | 0.17 | 280 | 0.4410 | 0.4585 | 0.6861 | 0.8289 | 0.8873 | 0.5803 | 0.8906 | 0.1865 | 0.8000 | 0.7719 | nan | 0.8319 | 0.3753 | 0.6804 | 0.1753 | 0.7113 | 0.4354 | 0.0 | | 0.3748 | 0.18 | 300 | 0.4839 | 0.4393 | 0.6829 | 0.7970 | 0.8379 | 0.5714 | 0.9223 | 0.2301 | 0.7465 | 0.7892 | nan | 0.7826 | 0.3613 | 0.5881 | 0.1983 | 0.6774 | 0.4673 | 0.0 | | 0.4755 | 0.19 | 320 | 0.3532 | 0.4889 | 0.7063 | 0.8552 | 0.8869 | 0.5985 | 0.8407 | 0.2399 | 0.8972 | 0.7746 | nan | 0.8519 | 0.3580 | 0.7004 | 0.2270 | 0.7688 | 0.5159 | 0.0 | | 0.1647 | 0.21 | 340 | 0.4263 | 0.4482 | 0.6750 | 0.8275 | 0.8739 | 0.4728 | 0.8689 | 0.1326 | 0.8123 | 0.8897 | nan | 0.8275 | 0.3375 | 0.6691 | 0.1279 | 0.7168 | 0.4589 | 0.0 | | 0.1324 | 0.22 | 360 | 0.3186 | 0.4722 | 0.7102 | 0.8414 | 0.9062 | 0.6599 | 0.8066 | 0.1877 | 0.8335 | 0.8674 | nan | 0.8473 | 0.3452 | 0.6860 | 0.1817 | 0.7490 | 0.4958 | 0.0 | | 1.2721 | 0.23 | 380 | 0.4527 | 0.4562 | 0.6524 | 0.8415 | 0.9224 | 0.5242 | 0.7128 | 0.0855 | 0.8813 | 0.7885 | nan | 0.8233 | 0.3347 | 0.6008 | 0.0845 | 0.7683 | 0.5818 | 0.0 | | 1.5569 | 0.24 | 400 | 0.3610 | 0.4686 | 0.7072 | 0.8468 | 0.8868 | 0.6755 | 0.8329 | 0.1835 | 0.8760 | 0.7884 | nan | 0.8391 | 0.3119 | 0.6641 | 0.1672 | 0.7827 | 0.5152 | 0.0 | | 0.3027 | 0.25 | 420 | 0.3637 | 0.4659 | 0.7499 | 0.8159 | 0.8104 | 0.7363 | 0.8482 | 0.4076 | 0.8415 | 0.8554 | nan | 0.7855 | 0.2747 | 0.7003 | 0.3160 | 0.7191 | 0.4658 | 0.0 | | 0.2337 | 0.27 | 440 | 0.3187 | 0.4821 | 0.7265 | 0.8479 | 0.9054 | 0.5871 | 0.8284 | 0.3716 | 0.8357 | 0.8309 | nan | 0.8486 | 0.2510 | 0.7033 | 0.3185 | 0.7569 | 0.4964 | 0.0 | | 0.9241 | 0.28 | 460 | 0.3566 | 0.4981 | 0.6725 | 0.8624 | 0.9099 | 0.4308 | 0.7761 | 0.3893 | 0.9400 | 0.5890 | nan | 0.8466 | 0.3307 | 0.6713 | 0.3407 | 0.7924 | 0.5048 | 0.0 | | 0.3494 | 0.29 | 480 | 0.3679 | 0.5070 | 0.7207 | 0.8460 | 0.8849 | 0.5082 | 0.7560 | 0.4808 | 0.8855 | 0.8088 | nan | 0.8284 | 0.3782 | 0.6844 | 0.4029 | 0.7387 | 0.5163 | 0.0 | | 0.1691 | 0.3 | 500 | 0.3436 | 0.5198 | 0.7262 | 0.8638 | 0.9274 | 0.5366 | 0.7800 | 0.4651 | 0.8808 | 0.7675 | nan | 0.8564 | 0.3869 | 0.6920 | 0.4037 | 0.7766 | 0.5232 | 0.0 | | 0.5782 | 0.32 | 520 | 0.4702 | 0.5067 | 0.7430 | 0.8398 | 0.8207 | 0.5963 | 0.8942 | 0.4582 | 0.8883 | 0.8005 | nan | 0.7941 | 0.3921 | 0.6395 | 0.4094 | 0.7838 | 0.5280 | 0.0 | | 0.7752 | 0.33 | 540 | 0.6535 | 0.4591 | 0.6686 | 0.7923 | 0.7106 | 0.5371 | 0.8396 | 0.3616 | 0.9534 | 0.6090 | nan | 0.6940 | 0.4164 | 0.5151 | 0.3090 | 0.7622 | 0.5171 | 0.0 | | 0.2089 | 0.34 | 560 | 0.3457 | 0.5121 | 0.7382 | 0.8549 | 0.8789 | 0.6058 | 0.8199 | 0.4140 | 0.8932 | 0.8171 | nan | 0.8497 | 0.3949 | 0.7054 | 0.3535 | 0.7516 | 0.5294 | 0.0 | | 0.1504 | 0.35 | 580 | 0.3299 | 0.5208 | 0.7438 | 0.8605 | 0.8912 | 0.6374 | 0.8511 | 0.3967 | 0.8831 | 0.8036 | nan | 0.8542 | 0.4207 | 0.7280 | 0.3507 | 0.7619 | 0.5301 | 0.0 | | 0.1052 | 0.36 | 600 | 0.3514 | 0.5118 | 0.7143 | 0.8557 | 0.9408 | 0.5685 | 0.8064 | 0.3304 | 0.8314 | 0.8086 | nan | 0.8346 | 0.4004 | 0.7211 | 0.3147 | 0.7433 | 0.5682 | 0.0 | | 0.3436 | 0.38 | 620 | 0.3174 | 0.5293 | 0.7225 | 0.8702 | 0.9170 | 0.5454 | 0.8581 | 0.3863 | 0.8901 | 0.7380 | nan | 0.8482 | 0.3866 | 0.7412 | 0.3614 | 0.7797 | 0.5882 | 0.0 | | 0.2951 | 0.39 | 640 | 0.3557 | 0.5297 | 0.7490 | 0.8605 | 0.8547 | 0.6049 | 0.8916 | 0.4654 | 0.9130 | 0.7643 | nan | 0.8272 | 0.4138 | 0.7145 | 0.3913 | 0.7777 | 0.5834 | 0.0 | | 0.3383 | 0.4 | 660 | 0.3802 | 0.5108 | 0.7373 | 0.8460 | 0.9167 | 0.5489 | 0.8977 | 0.4706 | 0.7794 | 0.8104 | nan | 0.8249 | 0.3975 | 0.7184 | 0.3596 | 0.7186 | 0.5569 | 0.0 | | 0.4272 | 0.41 | 680 | 0.2963 | 0.5330 | 0.7518 | 0.8669 | 0.9489 | 0.6026 | 0.7575 | 0.5282 | 0.8565 | 0.8174 | nan | 0.8580 | 0.4109 | 0.7019 | 0.4198 | 0.7806 | 0.5601 | 0.0 | | 0.3004 | 0.42 | 700 | 0.3287 | 0.5242 | 0.7068 | 0.8728 | 0.9485 | 0.4648 | 0.7569 | 0.4087 | 0.8993 | 0.7628 | nan | 0.8661 | 0.3948 | 0.6820 | 0.3687 | 0.7975 | 0.5607 | 0.0 | | 0.368 | 0.44 | 720 | 0.3581 | 0.5145 | 0.7082 | 0.8678 | 0.8976 | 0.4916 | 0.9031 | 0.3196 | 0.8945 | 0.7427 | nan | 0.8625 | 0.4132 | 0.6936 | 0.2926 | 0.7928 | 0.5464 | 0.0 | | 0.5273 | 0.45 | 740 | 0.3392 | 0.5172 | 0.7461 | 0.8659 | 0.8987 | 0.6086 | 0.8829 | 0.3754 | 0.8723 | 0.8384 | nan | 0.8710 | 0.3943 | 0.7320 | 0.3510 | 0.7704 | 0.5018 | 0.0 | | 0.8596 | 0.46 | 760 | 0.2994 | 0.5144 | 0.7075 | 0.8733 | 0.9402 | 0.5261 | 0.8305 | 0.2849 | 0.8849 | 0.7781 | nan | 0.8820 | 0.4074 | 0.7324 | 0.2753 | 0.7808 | 0.5227 | 0.0 | | 0.3141 | 0.47 | 780 | 0.2862 | 0.5220 | 0.7148 | 0.8762 | 0.9304 | 0.6100 | 0.8318 | 0.2835 | 0.9128 | 0.7206 | nan | 0.8776 | 0.4229 | 0.7512 | 0.2734 | 0.7838 | 0.5453 | 0.0 | | 0.4501 | 0.49 | 800 | 0.3087 | 0.5179 | 0.7518 | 0.8713 | 0.9077 | 0.7417 | 0.8486 | 0.3256 | 0.9016 | 0.7858 | nan | 0.8720 | 0.3803 | 0.7325 | 0.3019 | 0.7876 | 0.5509 | 0.0 | | 0.5333 | 0.5 | 820 | 0.3242 | 0.5222 | 0.7365 | 0.8736 | 0.9122 | 0.6724 | 0.8373 | 0.2926 | 0.9118 | 0.7930 | nan | 0.8753 | 0.4221 | 0.7275 | 0.2756 | 0.7903 | 0.5645 | 0.0 | | 0.4082 | 0.51 | 840 | 0.3147 | 0.5108 | 0.7301 | 0.8651 | 0.8951 | 0.6418 | 0.8918 | 0.2432 | 0.8836 | 0.8253 | nan | 0.8640 | 0.4205 | 0.7213 | 0.2295 | 0.7843 | 0.5561 | 0.0 | | 0.1804 | 0.52 | 860 | 0.3430 | 0.5104 | 0.7248 | 0.8642 | 0.8920 | 0.6020 | 0.8910 | 0.2571 | 0.8860 | 0.8208 | nan | 0.8544 | 0.4244 | 0.7103 | 0.2378 | 0.7834 | 0.5628 | 0.0 | | 0.2636 | 0.53 | 880 | 0.2953 | 0.5190 | 0.7434 | 0.8647 | 0.9241 | 0.6135 | 0.8433 | 0.3692 | 0.8507 | 0.8594 | nan | 0.8685 | 0.4108 | 0.7447 | 0.3300 | 0.7575 | 0.5215 | 0.0 | | 0.9171 | 0.55 | 900 | 0.2782 | 0.5373 | 0.7328 | 0.8752 | 0.9118 | 0.5616 | 0.8318 | 0.4369 | 0.9192 | 0.7355 | nan | 0.8710 | 0.4169 | 0.7506 | 0.3880 | 0.7798 | 0.5546 | 0.0 | | 0.1809 | 0.56 | 920 | 0.2736 | 0.5392 | 0.7376 | 0.8749 | 0.9108 | 0.5501 | 0.8474 | 0.4279 | 0.9061 | 0.7835 | nan | 0.8691 | 0.4246 | 0.7473 | 0.3769 | 0.7837 | 0.5728 | 0.0 | | 0.3095 | 0.57 | 940 | 0.2762 | 0.5405 | 0.7460 | 0.8771 | 0.9084 | 0.5879 | 0.8590 | 0.4068 | 0.9076 | 0.8066 | nan | 0.8716 | 0.4199 | 0.7538 | 0.3667 | 0.7874 | 0.5842 | 0.0 | | 0.6095 | 0.58 | 960 | 0.2831 | 0.5220 | 0.7405 | 0.8655 | 0.9084 | 0.6134 | 0.8646 | 0.3468 | 0.8689 | 0.8412 | nan | 0.8645 | 0.4026 | 0.7262 | 0.3220 | 0.7696 | 0.5695 | 0.0 | | 0.6369 | 0.59 | 980 | 0.3259 | 0.5190 | 0.7511 | 0.8612 | 0.8955 | 0.6528 | 0.9060 | 0.3588 | 0.8511 | 0.8425 | nan | 0.8545 | 0.4057 | 0.7171 | 0.3390 | 0.7648 | 0.5522 | 0.0 | | 1.711 | 0.61 | 1000 | 0.3439 | 0.5268 | 0.7580 | 0.8615 | 0.8617 | 0.6757 | 0.8944 | 0.4164 | 0.9026 | 0.7975 | nan | 0.8434 | 0.4338 | 0.7032 | 0.3844 | 0.7818 | 0.5411 | 0.0 | | 0.2443 | 0.62 | 1020 | 0.2789 | 0.5429 | 0.7415 | 0.8809 | 0.8993 | 0.6534 | 0.8807 | 0.4180 | 0.9425 | 0.6553 | nan | 0.8714 | 0.4214 | 0.7501 | 0.3893 | 0.8031 | 0.5651 | 0.0 | | 0.1833 | 0.63 | 1040 | 0.3325 | 0.5356 | 0.7394 | 0.8787 | 0.9275 | 0.6594 | 0.8686 | 0.3051 | 0.8942 | 0.7819 | nan | 0.8675 | 0.4338 | 0.7404 | 0.2979 | 0.7905 | 0.6189 | 0.0 | | 0.0592 | 0.64 | 1060 | 0.3044 | 0.5287 | 0.7271 | 0.8714 | 0.9078 | 0.6267 | 0.8723 | 0.2993 | 0.9020 | 0.7541 | nan | 0.8564 | 0.4381 | 0.7202 | 0.2867 | 0.7899 | 0.6094 | 0.0 | | 0.5377 | 0.65 | 1080 | 0.2896 | 0.5328 | 0.7307 | 0.8780 | 0.9091 | 0.5475 | 0.8544 | 0.3355 | 0.9168 | 0.8209 | nan | 0.8693 | 0.4136 | 0.7217 | 0.3155 | 0.8081 | 0.6012 | 0.0 | | 0.25 | 0.67 | 1100 | 0.3252 | 0.5161 | 0.7578 | 0.8583 | 0.8770 | 0.6339 | 0.9036 | 0.3850 | 0.8583 | 0.8890 | nan | 0.8596 | 0.4195 | 0.7235 | 0.3470 | 0.7676 | 0.4955 | 0.0 | | 0.3291 | 0.68 | 1120 | 0.3295 | 0.5100 | 0.7740 | 0.8550 | 0.8794 | 0.6870 | 0.9169 | 0.4707 | 0.8352 | 0.8547 | nan | 0.8526 | 0.3684 | 0.7032 | 0.3674 | 0.7645 | 0.5141 | 0.0 | | 0.1965 | 0.69 | 1140 | 0.2703 | 0.5336 | 0.7583 | 0.8752 | 0.9154 | 0.6437 | 0.8906 | 0.4315 | 0.8769 | 0.7919 | nan | 0.8785 | 0.4010 | 0.7532 | 0.3803 | 0.7833 | 0.5385 | 0.0 | | 0.129 | 0.7 | 1160 | 0.2486 | 0.5496 | 0.7453 | 0.8864 | 0.9272 | 0.5909 | 0.8937 | 0.4061 | 0.9048 | 0.7493 | nan | 0.8838 | 0.4231 | 0.7696 | 0.3737 | 0.7984 | 0.5990 | 0.0 | | 0.5261 | 0.72 | 1180 | 0.2643 | 0.5498 | 0.7565 | 0.8759 | 0.9306 | 0.6428 | 0.8678 | 0.4760 | 0.8724 | 0.7493 | nan | 0.8654 | 0.4343 | 0.7636 | 0.4222 | 0.7719 | 0.5914 | 0.0 | | 0.1468 | 0.73 | 1200 | 0.2658 | 0.5414 | 0.7664 | 0.8706 | 0.9212 | 0.6083 | 0.9012 | 0.5039 | 0.8395 | 0.8244 | nan | 0.8717 | 0.4338 | 0.7665 | 0.4223 | 0.7572 | 0.5382 | 0.0 | | 1.0941 | 0.74 | 1220 | 0.2715 | 0.5423 | 0.7611 | 0.8718 | 0.9043 | 0.5967 | 0.8793 | 0.4764 | 0.8766 | 0.8333 | nan | 0.8692 | 0.4257 | 0.7705 | 0.4168 | 0.7701 | 0.5438 | 0.0 | | 0.418 | 0.75 | 1240 | 0.2988 | 0.5385 | 0.7575 | 0.8631 | 0.8481 | 0.6181 | 0.8973 | 0.4993 | 0.9246 | 0.7573 | nan | 0.8263 | 0.4090 | 0.6882 | 0.4314 | 0.8081 | 0.6062 | 0.0 | | 0.173 | 0.76 | 1260 | 0.2824 | 0.5602 | 0.7812 | 0.8816 | 0.8983 | 0.6148 | 0.9039 | 0.5562 | 0.8982 | 0.8157 | nan | 0.8658 | 0.4415 | 0.7486 | 0.4542 | 0.8029 | 0.6083 | 0.0 | | 0.2501 | 0.78 | 1280 | 0.2893 | 0.5345 | 0.7801 | 0.8579 | 0.8856 | 0.6174 | 0.8920 | 0.5393 | 0.8337 | 0.9126 | nan | 0.8537 | 0.4336 | 0.7198 | 0.4689 | 0.7618 | 0.5034 | 0.0 | | 0.2213 | 0.79 | 1300 | 0.2579 | 0.5536 | 0.7501 | 0.8840 | 0.9223 | 0.5794 | 0.8305 | 0.4354 | 0.9198 | 0.8130 | nan | 0.8825 | 0.4336 | 0.7614 | 0.4142 | 0.7924 | 0.5915 | 0.0 | | 0.6845 | 0.8 | 1320 | 0.2766 | 0.5574 | 0.7593 | 0.8816 | 0.9103 | 0.5735 | 0.8930 | 0.4722 | 0.8971 | 0.8099 | nan | 0.8684 | 0.4448 | 0.7728 | 0.4200 | 0.7871 | 0.6083 | 0.0 | | 0.3139 | 0.81 | 1340 | 0.2591 | 0.5600 | 0.7617 | 0.8861 | 0.9095 | 0.5474 | 0.9057 | 0.4920 | 0.9052 | 0.8103 | nan | 0.8801 | 0.4356 | 0.7602 | 0.4273 | 0.8075 | 0.6096 | 0.0 | | 0.2207 | 0.82 | 1360 | 0.3023 | 0.5519 | 0.7398 | 0.8842 | 0.9065 | 0.4921 | 0.8773 | 0.4597 | 0.9261 | 0.7774 | nan | 0.8717 | 0.4113 | 0.7559 | 0.4242 | 0.8022 | 0.5979 | 0.0 | | 0.055 | 0.84 | 1380 | 0.2899 | 0.5583 | 0.7640 | 0.8801 | 0.9062 | 0.6091 | 0.8915 | 0.4858 | 0.8992 | 0.7923 | nan | 0.8653 | 0.4651 | 0.7603 | 0.4424 | 0.7933 | 0.5815 | 0.0 | | 0.1648 | 0.85 | 1400 | 0.3026 | 0.5493 | 0.7529 | 0.8830 | 0.9021 | 0.6004 | 0.9090 | 0.4134 | 0.9147 | 0.7780 | nan | 0.8722 | 0.4553 | 0.7478 | 0.3737 | 0.8087 | 0.5877 | 0.0 | | 0.1717 | 0.86 | 1420 | 0.2719 | 0.5577 | 0.7945 | 0.8802 | 0.9051 | 0.6636 | 0.8980 | 0.5765 | 0.8797 | 0.8441 | nan | 0.8782 | 0.4527 | 0.7591 | 0.4554 | 0.7931 | 0.5656 | 0.0 | | 1.2901 | 0.87 | 1440 | 0.2583 | 0.5666 | 0.7906 | 0.8837 | 0.9008 | 0.6385 | 0.8771 | 0.5761 | 0.9061 | 0.8453 | nan | 0.8801 | 0.4551 | 0.7863 | 0.4752 | 0.7929 | 0.5766 | 0.0 | | 0.3958 | 0.89 | 1460 | 0.2567 | 0.5636 | 0.7722 | 0.8856 | 0.9209 | 0.5674 | 0.8749 | 0.5567 | 0.8950 | 0.8186 | nan | 0.8795 | 0.4382 | 0.7648 | 0.4645 | 0.8065 | 0.5913 | 0.0 | | 0.132 | 0.9 | 1480 | 0.3065 | 0.5529 | 0.7824 | 0.8769 | 0.8965 | 0.6280 | 0.8995 | 0.5739 | 0.8871 | 0.8096 | nan | 0.8637 | 0.4402 | 0.7403 | 0.4606 | 0.8000 | 0.5655 | 0.0 | | 0.1086 | 0.91 | 1500 | 0.3508 | 0.5322 | 0.7658 | 0.8560 | 0.8395 | 0.6086 | 0.9102 | 0.5617 | 0.9003 | 0.7743 | nan | 0.8166 | 0.4316 | 0.6703 | 0.4462 | 0.7977 | 0.5629 | 0.0 | | 0.1127 | 0.92 | 1520 | 0.3589 | 0.5310 | 0.7720 | 0.8529 | 0.8399 | 0.6515 | 0.9186 | 0.5537 | 0.8843 | 0.7840 | nan | 0.8100 | 0.4318 | 0.6527 | 0.4291 | 0.7970 | 0.5963 | 0.0 | | 0.3807 | 0.93 | 1540 | 0.2715 | 0.5554 | 0.7927 | 0.8792 | 0.8975 | 0.6704 | 0.8713 | 0.5857 | 0.8995 | 0.8319 | nan | 0.8742 | 0.4380 | 0.7658 | 0.4537 | 0.7898 | 0.5663 | 0.0 | | 0.3847 | 0.95 | 1560 | 0.2537 | 0.5589 | 0.7988 | 0.8847 | 0.9184 | 0.6811 | 0.8729 | 0.5827 | 0.8849 | 0.8530 | nan | 0.8863 | 0.4253 | 0.7846 | 0.4582 | 0.7933 | 0.5645 | 0.0 | | 0.2076 | 0.96 | 1580 | 0.2669 | 0.5517 | 0.7704 | 0.8865 | 0.9249 | 0.6891 | 0.8583 | 0.4244 | 0.9084 | 0.8171 | nan | 0.8811 | 0.4304 | 0.7516 | 0.3920 | 0.8123 | 0.5943 | 0.0 | | 0.1031 | 0.97 | 1600 | 0.2806 | 0.5474 | 0.7524 | 0.8853 | 0.9163 | 0.6332 | 0.8666 | 0.3833 | 0.9215 | 0.7932 | nan | 0.8782 | 0.4432 | 0.7448 | 0.3527 | 0.8112 | 0.6020 | 0.0 | | 0.2727 | 0.98 | 1620 | 0.3023 | 0.5370 | 0.7711 | 0.8710 | 0.8763 | 0.6717 | 0.9048 | 0.4370 | 0.8998 | 0.8370 | nan | 0.8557 | 0.4296 | 0.7258 | 0.3803 | 0.7962 | 0.5714 | 0.0 | | 0.2601 | 0.99 | 1640 | 0.3792 | 0.5187 | 0.7649 | 0.8554 | 0.8331 | 0.7026 | 0.9221 | 0.4155 | 0.9061 | 0.8097 | nan | 0.8181 | 0.3941 | 0.6737 | 0.3592 | 0.7989 | 0.5868 | 0.0 | | 0.239 | 1.01 | 1660 | 0.3111 | 0.5377 | 0.7876 | 0.8674 | 0.8753 | 0.6883 | 0.9208 | 0.5076 | 0.8718 | 0.8621 | nan | 0.8552 | 0.4240 | 0.7277 | 0.4169 | 0.7868 | 0.5533 | 0.0 | | 0.2939 | 1.02 | 1680 | 0.2581 | 0.5698 | 0.7874 | 0.8910 | 0.9218 | 0.6397 | 0.9075 | 0.5464 | 0.8947 | 0.8144 | nan | 0.8834 | 0.4488 | 0.7785 | 0.4475 | 0.8142 | 0.6161 | 0.0 | | 0.1292 | 1.03 | 1700 | 0.2495 | 0.5694 | 0.7724 | 0.8886 | 0.9291 | 0.6174 | 0.8777 | 0.5010 | 0.8964 | 0.8129 | nan | 0.8768 | 0.4587 | 0.7841 | 0.4434 | 0.8067 | 0.6160 | 0.0 | | 0.4733 | 1.04 | 1720 | 0.2527 | 0.5662 | 0.7743 | 0.8881 | 0.9246 | 0.6255 | 0.8860 | 0.4842 | 0.8958 | 0.8297 | nan | 0.8793 | 0.4605 | 0.7759 | 0.4335 | 0.8041 | 0.6104 | 0.0 | | 0.1835 | 1.06 | 1740 | 0.2607 | 0.5558 | 0.7705 | 0.8842 | 0.8955 | 0.6777 | 0.8876 | 0.4263 | 0.9280 | 0.8080 | nan | 0.8712 | 0.4401 | 0.7581 | 0.4051 | 0.8096 | 0.6061 | 0.0 | | 0.4408 | 1.07 | 1760 | 0.2765 | 0.5536 | 0.7802 | 0.8793 | 0.8830 | 0.6812 | 0.8959 | 0.4971 | 0.9197 | 0.8046 | nan | 0.8569 | 0.4205 | 0.7288 | 0.4439 | 0.8175 | 0.6073 | 0.0 | | 0.1692 | 1.08 | 1780 | 0.2987 | 0.5517 | 0.7740 | 0.8819 | 0.8844 | 0.6921 | 0.9004 | 0.4412 | 0.9298 | 0.7961 | nan | 0.8635 | 0.4247 | 0.7442 | 0.4119 | 0.8119 | 0.6054 | 0.0 | | 0.0829 | 1.09 | 1800 | 0.2804 | 0.5394 | 0.7624 | 0.8797 | 0.9094 | 0.7067 | 0.8563 | 0.3608 | 0.9121 | 0.8289 | nan | 0.8706 | 0.4162 | 0.7470 | 0.3457 | 0.8019 | 0.5943 | 0.0 | | 0.6807 | 1.1 | 1820 | 0.3080 | 0.5373 | 0.7638 | 0.8729 | 0.8821 | 0.6748 | 0.9134 | 0.4184 | 0.9030 | 0.7915 | nan | 0.8532 | 0.4092 | 0.7250 | 0.3750 | 0.7992 | 0.5993 | 0.0 | | 0.2518 | 1.12 | 1840 | 0.2835 | 0.5332 | 0.7744 | 0.8669 | 0.8880 | 0.6827 | 0.9181 | 0.4495 | 0.8632 | 0.8447 | nan | 0.8575 | 0.4012 | 0.7213 | 0.3993 | 0.7805 | 0.5729 | 0.0 | | 0.7864 | 1.13 | 1860 | 0.2637 | 0.5532 | 0.7737 | 0.8879 | 0.9086 | 0.6829 | 0.9120 | 0.4406 | 0.9137 | 0.7842 | nan | 0.8832 | 0.4071 | 0.7702 | 0.3902 | 0.8153 | 0.6065 | 0.0 | | 0.0924 | 1.14 | 1880 | 0.2864 | 0.5621 | 0.7705 | 0.8949 | 0.9200 | 0.6569 | 0.8994 | 0.4515 | 0.9284 | 0.7665 | nan | 0.8908 | 0.4259 | 0.7806 | 0.4054 | 0.8209 | 0.6114 | 0.0 | | 0.1686 | 1.15 | 1900 | 0.2654 | 0.5637 | 0.7854 | 0.8912 | 0.9187 | 0.6555 | 0.8953 | 0.4942 | 0.9044 | 0.8445 | nan | 0.8892 | 0.4345 | 0.7787 | 0.4371 | 0.8110 | 0.5950 | 0.0 | | 0.1225 | 1.16 | 1920 | 0.2667 | 0.5587 | 0.7855 | 0.8852 | 0.9104 | 0.6479 | 0.9008 | 0.5066 | 0.8918 | 0.8552 | nan | 0.8832 | 0.4434 | 0.7668 | 0.4407 | 0.8031 | 0.5738 | 0.0 | | 0.1435 | 1.18 | 1940 | 0.2704 | 0.5619 | 0.7807 | 0.8860 | 0.8959 | 0.6520 | 0.8953 | 0.5080 | 0.9226 | 0.8107 | nan | 0.8726 | 0.4417 | 0.7566 | 0.4386 | 0.8158 | 0.6082 | 0.0 | | 0.2853 | 1.19 | 1960 | 0.2792 | 0.5523 | 0.7959 | 0.8822 | 0.9074 | 0.7239 | 0.8853 | 0.5060 | 0.8900 | 0.8628 | nan | 0.8801 | 0.4154 | 0.7708 | 0.4392 | 0.7978 | 0.5625 | 0.0 | | 0.1169 | 1.2 | 1980 | 0.2879 | 0.5629 | 0.7787 | 0.8900 | 0.9085 | 0.6766 | 0.8683 | 0.4738 | 0.9324 | 0.8124 | nan | 0.8797 | 0.4266 | 0.7629 | 0.4292 | 0.8164 | 0.6251 | 0.0 | | 0.1685 | 1.21 | 2000 | 0.2960 | 0.5645 | 0.7948 | 0.8899 | 0.9075 | 0.6827 | 0.8744 | 0.5332 | 0.9189 | 0.8520 | nan | 0.8793 | 0.4256 | 0.7577 | 0.4546 | 0.8224 | 0.6117 | 0.0 | | 0.7038 | 1.22 | 2020 | 0.2923 | 0.5641 | 0.7853 | 0.8874 | 0.8949 | 0.6525 | 0.8828 | 0.5251 | 0.9296 | 0.8266 | nan | 0.8714 | 0.4343 | 0.7489 | 0.4541 | 0.8219 | 0.6178 | 0.0 | | 0.6504 | 1.24 | 2040 | 0.2727 | 0.5656 | 0.7799 | 0.8906 | 0.9181 | 0.6237 | 0.9048 | 0.5272 | 0.9039 | 0.8017 | nan | 0.8771 | 0.4327 | 0.7616 | 0.4410 | 0.8245 | 0.6224 | 0.0 | | 0.3215 | 1.25 | 2060 | 0.2697 | 0.5692 | 0.7798 | 0.8921 | 0.9109 | 0.6537 | 0.9027 | 0.4922 | 0.9214 | 0.7980 | nan | 0.8794 | 0.4591 | 0.7621 | 0.4293 | 0.8272 | 0.6271 | 0.0 | | 0.7559 | 1.26 | 2080 | 0.2841 | 0.5703 | 0.7909 | 0.8938 | 0.9121 | 0.6957 | 0.8969 | 0.4812 | 0.9210 | 0.8386 | nan | 0.8861 | 0.4526 | 0.7802 | 0.4258 | 0.8202 | 0.6275 | 0.0 | | 0.0888 | 1.27 | 2100 | 0.2607 | 0.5670 | 0.7677 | 0.8935 | 0.9342 | 0.6483 | 0.8855 | 0.4075 | 0.9070 | 0.8234 | nan | 0.8861 | 0.4785 | 0.7798 | 0.3772 | 0.8168 | 0.6305 | 0.0 | | 0.4145 | 1.29 | 2120 | 0.2646 | 0.5615 | 0.7702 | 0.8867 | 0.9221 | 0.6530 | 0.8885 | 0.4237 | 0.8969 | 0.8367 | nan | 0.8752 | 0.4703 | 0.7690 | 0.3822 | 0.8086 | 0.6253 | 0.0 | | 0.059 | 1.3 | 2140 | 0.2673 | 0.5619 | 0.7662 | 0.8893 | 0.9142 | 0.6207 | 0.9007 | 0.4401 | 0.9140 | 0.8076 | nan | 0.8776 | 0.4666 | 0.7564 | 0.3822 | 0.8222 | 0.6286 | 0.0 | | 0.2973 | 1.31 | 2160 | 0.2817 | 0.5660 | 0.7736 | 0.8912 | 0.9093 | 0.6434 | 0.8961 | 0.4758 | 0.9269 | 0.7901 | nan | 0.8798 | 0.4608 | 0.7590 | 0.4140 | 0.8216 | 0.6265 | 0.0 | | 0.4088 | 1.32 | 2180 | 0.2859 | 0.5479 | 0.7895 | 0.8790 | 0.9071 | 0.6934 | 0.9058 | 0.4804 | 0.8722 | 0.8782 | nan | 0.8749 | 0.4309 | 0.7583 | 0.4013 | 0.8000 | 0.5703 | 0.0 | | 0.6954 | 1.33 | 2200 | 0.2905 | 0.5541 | 0.7861 | 0.8764 | 0.8848 | 0.6835 | 0.8968 | 0.5118 | 0.9013 | 0.8386 | nan | 0.8572 | 0.4295 | 0.7496 | 0.4346 | 0.8012 | 0.6064 | 0.0 | | 0.5832 | 1.35 | 2220 | 0.3119 | 0.5508 | 0.7683 | 0.8747 | 0.8692 | 0.6771 | 0.8894 | 0.4628 | 0.9345 | 0.7771 | nan | 0.8459 | 0.4351 | 0.7185 | 0.4202 | 0.8111 | 0.6249 | 0.0 | | 0.0921 | 1.36 | 2240 | 0.3298 | 0.5461 | 0.7714 | 0.8718 | 0.8627 | 0.7034 | 0.8801 | 0.4590 | 0.9362 | 0.7869 | nan | 0.8396 | 0.4272 | 0.6938 | 0.4216 | 0.8149 | 0.6257 | 0.0 | | 0.3714 | 1.37 | 2260 | 0.2859 | 0.5629 | 0.7798 | 0.8866 | 0.9126 | 0.6515 | 0.8707 | 0.4921 | 0.9104 | 0.8414 | nan | 0.8783 | 0.4474 | 0.7555 | 0.4428 | 0.8119 | 0.6045 | 0.0 | | 0.1119 | 1.38 | 2280 | 0.2804 | 0.5521 | 0.7694 | 0.8821 | 0.9107 | 0.6123 | 0.8938 | 0.4466 | 0.8905 | 0.8629 | nan | 0.8753 | 0.4630 | 0.7505 | 0.3926 | 0.8054 | 0.5777 | 0.0 | | 0.256 | 1.39 | 2300 | 0.2871 | 0.5641 | 0.7823 | 0.8843 | 0.8895 | 0.6507 | 0.8879 | 0.5237 | 0.9270 | 0.8150 | nan | 0.8622 | 0.4488 | 0.7299 | 0.4582 | 0.8236 | 0.6261 | 0.0 | | 1.856 | 1.41 | 2320 | 0.3132 | 0.5445 | 0.7815 | 0.8742 | 0.8851 | 0.6856 | 0.9064 | 0.4533 | 0.8902 | 0.8684 | nan | 0.8555 | 0.4349 | 0.7115 | 0.4077 | 0.8106 | 0.5913 | 0.0 | | 0.2095 | 1.42 | 2340 | 0.2954 | 0.5624 | 0.7891 | 0.8844 | 0.8890 | 0.6993 | 0.8840 | 0.4935 | 0.9264 | 0.8427 | nan | 0.8638 | 0.4331 | 0.7320 | 0.4606 | 0.8214 | 0.6255 | 0.0 | | 0.4009 | 1.43 | 2360 | 0.2798 | 0.5565 | 0.8003 | 0.8818 | 0.9017 | 0.7110 | 0.9015 | 0.5252 | 0.8853 | 0.8769 | nan | 0.8714 | 0.4175 | 0.7538 | 0.4672 | 0.8064 | 0.5789 | 0.0 | | 0.324 | 1.44 | 2380 | 0.2871 | 0.5622 | 0.7878 | 0.8838 | 0.8877 | 0.6549 | 0.9034 | 0.5290 | 0.9171 | 0.8345 | nan | 0.8595 | 0.4275 | 0.7316 | 0.4622 | 0.8243 | 0.6302 | 0.0 | | 0.1967 | 1.46 | 2400 | 0.2866 | 0.5607 | 0.7792 | 0.8862 | 0.8918 | 0.6889 | 0.9070 | 0.5223 | 0.9336 | 0.7318 | nan | 0.8663 | 0.4122 | 0.7426 | 0.4698 | 0.8234 | 0.6108 | 0.0 | | 0.3549 | 1.47 | 2420 | 0.2846 | 0.5661 | 0.7802 | 0.8907 | 0.9066 | 0.6648 | 0.8892 | 0.4930 | 0.9283 | 0.7996 | nan | 0.8749 | 0.4221 | 0.7503 | 0.4583 | 0.8237 | 0.6336 | 0.0 | | 0.1298 | 1.48 | 2440 | 0.2927 | 0.5576 | 0.7861 | 0.8844 | 0.8865 | 0.7135 | 0.9022 | 0.4713 | 0.9275 | 0.8154 | nan | 0.8638 | 0.4191 | 0.7356 | 0.4254 | 0.8226 | 0.6368 | 0.0 | | 0.1442 | 1.49 | 2460 | 0.2714 | 0.5632 | 0.7953 | 0.8880 | 0.9137 | 0.6747 | 0.8955 | 0.5360 | 0.8946 | 0.8574 | nan | 0.8757 | 0.4123 | 0.7575 | 0.4568 | 0.8186 | 0.6218 | 0.0 | | 0.1179 | 1.5 | 2480 | 0.2873 | 0.5739 | 0.7854 | 0.8936 | 0.9142 | 0.6517 | 0.8819 | 0.5194 | 0.9252 | 0.8201 | nan | 0.8803 | 0.4460 | 0.7597 | 0.4696 | 0.8249 | 0.6369 | 0.0 | | 0.8153 | 1.52 | 2500 | 0.3244 | 0.5628 | 0.7819 | 0.8828 | 0.8855 | 0.6434 | 0.8990 | 0.4903 | 0.9196 | 0.8538 | nan | 0.8601 | 0.4594 | 0.7278 | 0.4458 | 0.8205 | 0.6262 | 0.0 | | 0.1386 | 1.53 | 2520 | 0.3220 | 0.5548 | 0.7648 | 0.8788 | 0.8843 | 0.6161 | 0.9062 | 0.4492 | 0.9163 | 0.8168 | nan | 0.8494 | 0.4596 | 0.7033 | 0.4101 | 0.8261 | 0.6350 | 0.0 | | 0.3631 | 1.54 | 2540 | 0.2927 | 0.5611 | 0.7734 | 0.8852 | 0.9010 | 0.6518 | 0.8930 | 0.4558 | 0.9176 | 0.8214 | nan | 0.8666 | 0.4574 | 0.7316 | 0.4180 | 0.8232 | 0.6306 | 0.0 | | 0.1358 | 1.55 | 2560 | 0.2741 | 0.5618 | 0.7824 | 0.8875 | 0.9088 | 0.6661 | 0.8972 | 0.4611 | 0.9061 | 0.8551 | nan | 0.8760 | 0.4523 | 0.7497 | 0.4236 | 0.8184 | 0.6127 | 0.0 | | 0.1636 | 1.56 | 2580 | 0.2639 | 0.5716 | 0.7802 | 0.8943 | 0.9340 | 0.6282 | 0.8823 | 0.4880 | 0.9004 | 0.8484 | nan | 0.8878 | 0.4554 | 0.7736 | 0.4482 | 0.8186 | 0.6174 | 0.0 | | 0.218 | 1.58 | 2600 | 0.2771 | 0.5722 | 0.7804 | 0.8914 | 0.9113 | 0.6134 | 0.8914 | 0.4984 | 0.9160 | 0.8517 | nan | 0.8823 | 0.4693 | 0.7641 | 0.4576 | 0.8154 | 0.6165 | 0.0 | | 0.6271 | 1.59 | 2620 | 0.2715 | 0.5713 | 0.7803 | 0.8871 | 0.8985 | 0.6390 | 0.8907 | 0.5256 | 0.9244 | 0.8038 | nan | 0.8682 | 0.4587 | 0.7446 | 0.4759 | 0.8199 | 0.6316 | 0.0 | | 0.307 | 1.6 | 2640 | 0.3499 | 0.5620 | 0.7852 | 0.8789 | 0.8686 | 0.6577 | 0.9099 | 0.5258 | 0.9260 | 0.8234 | nan | 0.8479 | 0.4454 | 0.7133 | 0.4692 | 0.8217 | 0.6363 | 0.0 | | 0.2482 | 1.61 | 2660 | 0.2980 | 0.5654 | 0.7890 | 0.8830 | 0.8834 | 0.6598 | 0.9082 | 0.5263 | 0.9173 | 0.8390 | nan | 0.8598 | 0.4448 | 0.7340 | 0.4643 | 0.8210 | 0.6338 | 0.0 | | 0.1496 | 1.63 | 2680 | 0.2893 | 0.5750 | 0.7881 | 0.8922 | 0.9088 | 0.6366 | 0.8930 | 0.5268 | 0.9191 | 0.8445 | nan | 0.8799 | 0.4588 | 0.7638 | 0.4720 | 0.8201 | 0.6304 | 0.0 | | 0.2405 | 1.64 | 2700 | 0.2712 | 0.5738 | 0.7958 | 0.8884 | 0.8965 | 0.6660 | 0.8927 | 0.5553 | 0.9199 | 0.8441 | nan | 0.8731 | 0.4552 | 0.7597 | 0.4861 | 0.8167 | 0.6260 | 0.0 | | 0.0689 | 1.65 | 2720 | 0.2699 | 0.5773 | 0.8004 | 0.8916 | 0.9093 | 0.6645 | 0.8839 | 0.5675 | 0.9127 | 0.8645 | nan | 0.8818 | 0.4697 | 0.7649 | 0.4878 | 0.8179 | 0.6192 | 0.0 | | 1.3023 | 1.66 | 2740 | 0.2776 | 0.5739 | 0.8043 | 0.8902 | 0.9055 | 0.6846 | 0.8874 | 0.5705 | 0.9107 | 0.8673 | nan | 0.8798 | 0.4549 | 0.7620 | 0.4845 | 0.8171 | 0.6193 | 0.0 | | 0.401 | 1.67 | 2760 | 0.2705 | 0.5721 | 0.7931 | 0.8883 | 0.9074 | 0.6542 | 0.8896 | 0.5540 | 0.9072 | 0.8462 | nan | 0.8721 | 0.4551 | 0.7475 | 0.4782 | 0.8221 | 0.6295 | 0.0 | | 0.2472 | 1.69 | 2780 | 0.3258 | 0.5705 | 0.7939 | 0.8852 | 0.8866 | 0.6562 | 0.9042 | 0.5490 | 0.9180 | 0.8494 | nan | 0.8631 | 0.4681 | 0.7336 | 0.4728 | 0.8223 | 0.6338 | 0.0 | | 0.4908 | 1.7 | 2800 | 0.3074 | 0.5651 | 0.7893 | 0.8807 | 0.8882 | 0.6278 | 0.9025 | 0.5567 | 0.9016 | 0.8588 | nan | 0.8577 | 0.4620 | 0.7193 | 0.4756 | 0.8178 | 0.6234 | 0.0 | | 0.1247 | 1.71 | 2820 | 0.2845 | 0.5739 | 0.7789 | 0.8909 | 0.9063 | 0.6080 | 0.8883 | 0.5422 | 0.9268 | 0.8014 | nan | 0.8726 | 0.4572 | 0.7445 | 0.4782 | 0.8260 | 0.6386 | 0.0 | | 0.9431 | 1.72 | 2840 | 0.2788 | 0.5753 | 0.7823 | 0.8900 | 0.8967 | 0.6380 | 0.8976 | 0.5355 | 0.9333 | 0.7929 | nan | 0.8721 | 0.4694 | 0.7523 | 0.4729 | 0.8222 | 0.6386 | 0.0 | | 0.1346 | 1.73 | 2860 | 0.2942 | 0.5749 | 0.7819 | 0.8907 | 0.8988 | 0.6226 | 0.9048 | 0.5251 | 0.9280 | 0.8121 | nan | 0.8736 | 0.4707 | 0.7525 | 0.4630 | 0.8226 | 0.6417 | 0.0 | | 0.1469 | 1.75 | 2880 | 0.2782 | 0.5762 | 0.7808 | 0.8933 | 0.9093 | 0.6225 | 0.8923 | 0.5319 | 0.9289 | 0.8001 | nan | 0.8793 | 0.4653 | 0.7595 | 0.4657 | 0.8239 | 0.6395 | 0.0 | | 0.1829 | 1.76 | 2900 | 0.2748 | 0.5727 | 0.7899 | 0.8908 | 0.9105 | 0.6307 | 0.9093 | 0.5512 | 0.9050 | 0.8326 | nan | 0.8795 | 0.4647 | 0.7606 | 0.4629 | 0.8195 | 0.6217 | 0.0 | | 0.1476 | 1.77 | 2920 | 0.2591 | 0.5729 | 0.7936 | 0.8911 | 0.9130 | 0.6489 | 0.8955 | 0.5441 | 0.9053 | 0.8549 | nan | 0.8828 | 0.4653 | 0.7657 | 0.4666 | 0.8169 | 0.6131 | 0.0 | | 0.09 | 1.78 | 2940 | 0.2802 | 0.5692 | 0.7887 | 0.8892 | 0.9127 | 0.6333 | 0.9030 | 0.5209 | 0.8974 | 0.8651 | nan | 0.8830 | 0.4768 | 0.7639 | 0.4502 | 0.8116 | 0.5989 | 0.0 | | 0.2123 | 1.8 | 2960 | 0.2605 | 0.5653 | 0.7857 | 0.8868 | 0.9139 | 0.6362 | 0.8964 | 0.5169 | 0.8930 | 0.8581 | nan | 0.8819 | 0.4653 | 0.7632 | 0.4462 | 0.8091 | 0.5913 | 0.0 | | 0.4904 | 1.81 | 2980 | 0.2484 | 0.5707 | 0.7754 | 0.8939 | 0.9314 | 0.5966 | 0.8876 | 0.5218 | 0.9042 | 0.8109 | nan | 0.8847 | 0.4367 | 0.7715 | 0.4485 | 0.8230 | 0.6308 | 0.0 | | 0.3414 | 1.82 | 3000 | 0.2569 | 0.5722 | 0.7811 | 0.8929 | 0.9166 | 0.6386 | 0.8934 | 0.5163 | 0.9180 | 0.8038 | nan | 0.8820 | 0.4596 | 0.7572 | 0.4440 | 0.8246 | 0.6380 | 0.0 | | 0.0498 | 1.83 | 3020 | 0.2547 | 0.5738 | 0.7870 | 0.8923 | 0.9148 | 0.6430 | 0.8918 | 0.5312 | 0.9144 | 0.8268 | nan | 0.8817 | 0.4658 | 0.7594 | 0.4496 | 0.8235 | 0.6365 | 0.0 | | 0.3362 | 1.84 | 3040 | 0.2595 | 0.5756 | 0.7856 | 0.8937 | 0.9173 | 0.6259 | 0.8873 | 0.5339 | 0.9167 | 0.8325 | nan | 0.8833 | 0.4694 | 0.7628 | 0.4537 | 0.8248 | 0.6352 | 0.0 | | 0.1219 | 1.86 | 3060 | 0.2537 | 0.5750 | 0.7841 | 0.8947 | 0.9217 | 0.6318 | 0.8915 | 0.5239 | 0.9145 | 0.8211 | nan | 0.8846 | 0.4633 | 0.7649 | 0.4475 | 0.8255 | 0.6395 | 0.0 | | 0.123 | 1.87 | 3080 | 0.2808 | 0.5758 | 0.7945 | 0.8934 | 0.9069 | 0.6636 | 0.9036 | 0.5391 | 0.9199 | 0.8339 | nan | 0.8822 | 0.4694 | 0.7664 | 0.4539 | 0.8238 | 0.6346 | 0.0 | | 0.3721 | 1.88 | 3100 | 0.2523 | 0.5777 | 0.7885 | 0.8954 | 0.9207 | 0.6431 | 0.8826 | 0.5526 | 0.9209 | 0.8111 | nan | 0.8858 | 0.4599 | 0.7679 | 0.4677 | 0.8257 | 0.6367 | 0.0 | | 0.284 | 1.89 | 3120 | 0.2545 | 0.5786 | 0.7927 | 0.8948 | 0.9151 | 0.6464 | 0.8905 | 0.5516 | 0.9188 | 0.8336 | nan | 0.8854 | 0.4687 | 0.7707 | 0.4664 | 0.8241 | 0.6350 | 0.0 | | 0.5131 | 1.9 | 3140 | 0.2502 | 0.5768 | 0.7875 | 0.8948 | 0.9234 | 0.6250 | 0.8978 | 0.5439 | 0.9072 | 0.8275 | nan | 0.8856 | 0.4619 | 0.7722 | 0.4582 | 0.8243 | 0.6357 | 0.0 | | 0.1381 | 1.92 | 3160 | 0.2580 | 0.5760 | 0.7772 | 0.8954 | 0.9194 | 0.6006 | 0.9047 | 0.5318 | 0.9194 | 0.7870 | nan | 0.8853 | 0.4661 | 0.7702 | 0.4505 | 0.8260 | 0.6338 | 0.0 | | 0.3162 | 1.93 | 3180 | 0.2739 | 0.5775 | 0.7918 | 0.8935 | 0.9066 | 0.6439 | 0.9006 | 0.5423 | 0.9222 | 0.8350 | nan | 0.8822 | 0.4750 | 0.7676 | 0.4609 | 0.8230 | 0.6338 | 0.0 | | 0.1153 | 1.94 | 3200 | 0.2678 | 0.5704 | 0.7631 | 0.8953 | 0.9273 | 0.5727 | 0.8881 | 0.5334 | 0.9260 | 0.7312 | nan | 0.8843 | 0.4380 | 0.7696 | 0.4600 | 0.8251 | 0.6161 | 0.0 | | 0.0417 | 1.95 | 3220 | 0.2650 | 0.5775 | 0.7906 | 0.8950 | 0.9137 | 0.6581 | 0.8944 | 0.5503 | 0.9244 | 0.8028 | nan | 0.8844 | 0.4627 | 0.7687 | 0.4657 | 0.8250 | 0.6359 | 0.0 | | 0.1364 | 1.96 | 3240 | 0.2696 | 0.5771 | 0.7861 | 0.8953 | 0.9133 | 0.6531 | 0.8970 | 0.5482 | 0.9295 | 0.7756 | nan | 0.8844 | 0.4652 | 0.7692 | 0.4642 | 0.8254 | 0.6317 | 0.0 | | 0.0806 | 1.98 | 3260 | 0.2718 | 0.5771 | 0.7957 | 0.8945 | 0.9151 | 0.6654 | 0.9009 | 0.5405 | 0.9130 | 0.8391 | nan | 0.8844 | 0.4683 | 0.7680 | 0.4590 | 0.8247 | 0.6356 | 0.0 | | 0.1196 | 1.99 | 3280 | 0.2563 | 0.5765 | 0.7934 | 0.8942 | 0.9204 | 0.6450 | 0.8936 | 0.5526 | 0.9077 | 0.8415 | nan | 0.8847 | 0.4614 | 0.7695 | 0.4632 | 0.8233 | 0.6331 | 0.0 | ### Framework versions - Transformers 4.33.0 - Pytorch 2.0.0 - Datasets 2.1.0 - Tokenizers 0.13.3
{"license": "other", "tags": ["vision", "image-segmentation", "generated_from_trainer"], "base_model": "peldrak/segformer-finetuned-coastalDataset", "model-index": [{"name": "segformer-finetuned-coasts-final", "results": []}]}
image-segmentation
peldrak/segformer-finetuned-coasts-final
[ "transformers", "pytorch", "segformer", "vision", "image-segmentation", "generated_from_trainer", "base_model:peldrak/segformer-finetuned-coastalDataset", "license:other", "endpoints_compatible", "region:us" ]
2023-11-12T18:23:08+00:00
[]
[]
TAGS #transformers #pytorch #segformer #vision #image-segmentation #generated_from_trainer #base_model-peldrak/segformer-finetuned-coastalDataset #license-other #endpoints_compatible #region-us
segformer-finetuned-coasts-final ================================ This model is a fine-tuned version of peldrak/segformer-finetuned-coastalDataset on the peldrak/coastal2 dataset. It achieves the following results on the evaluation set: * Loss: 0.2563 * Mean Iou: 0.5765 * Mean Accuracy: 0.7934 * Overall Accuracy: 0.8942 * Accuracy Water: 0.9204 * Accuracy Whitewater: 0.6450 * Accuracy Sediment: 0.8936 * Accuracy Other Natural Terrain: 0.5526 * Accuracy Vegetation: 0.9077 * Accuracy Development: 0.8415 * Accuracy Unknown: nan * Iou Water: 0.8847 * Iou Whitewater: 0.4614 * Iou Sediment: 0.7695 * Iou Other Natural Terrain: 0.4632 * Iou Vegetation: 0.8233 * Iou Development: 0.6331 * Iou Unknown: 0.0 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 6e-05 * train\_batch\_size: 4 * eval\_batch\_size: 4 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 2 ### Training results ### Framework versions * Transformers 4.33.0 * Pytorch 2.0.0 * Datasets 2.1.0 * Tokenizers 0.13.3
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2", "### Training results", "### Framework versions\n\n\n* Transformers 4.33.0\n* Pytorch 2.0.0\n* Datasets 2.1.0\n* Tokenizers 0.13.3" ]
[ "TAGS\n#transformers #pytorch #segformer #vision #image-segmentation #generated_from_trainer #base_model-peldrak/segformer-finetuned-coastalDataset #license-other #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2", "### Training results", "### Framework versions\n\n\n* Transformers 4.33.0\n* Pytorch 2.0.0\n* Datasets 2.1.0\n* Tokenizers 0.13.3" ]
[ 65, 98, 4, 30 ]
[ "passage: TAGS\n#transformers #pytorch #segformer #vision #image-segmentation #generated_from_trainer #base_model-peldrak/segformer-finetuned-coastalDataset #license-other #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2### Training results### Framework versions\n\n\n* Transformers 4.33.0\n* Pytorch 2.0.0\n* Datasets 2.1.0\n* Tokenizers 0.13.3" ]
[ -0.10850391536951065, 0.035926368087530136, -0.0028501360211521387, 0.11110061407089233, 0.14993955194950104, 0.038158874958753586, 0.11395425349473953, 0.10601773113012314, -0.11149395257234573, 0.030759459361433983, 0.10913681983947754, 0.12291571497917175, 0.006016973406076431, 0.12473491579294205, -0.005002107936888933, -0.3055165708065033, 0.0023029842413961887, 0.04331742599606514, -0.06718599051237106, 0.11943379044532776, 0.09443128854036331, -0.14599530398845673, 0.09821508079767227, 0.010649154894053936, -0.2250249981880188, 0.028194192796945572, -0.01174291130155325, -0.027309859171509743, 0.15218928456306458, 0.03539392724633217, 0.11058694869279861, 0.000719573930837214, 0.11187814176082611, -0.20365262031555176, 0.014463700354099274, 0.02776920050382614, -0.006540299858897924, 0.05310538411140442, 0.028479771688580513, 0.0037406873889267445, 0.15143175423145294, -0.08137070387601852, 0.0646517276763916, 0.008320271968841553, -0.12732191383838654, -0.20917002856731415, -0.05758461728692055, 0.08294972777366638, 0.10721895843744278, 0.09659594297409058, 0.002245945855975151, 0.06110665202140808, -0.08580881357192993, 0.09635540097951889, 0.26709842681884766, -0.22948473691940308, -0.07619840651750565, 0.033666592091321945, -0.009108345955610275, 0.0527736060321331, -0.11845391988754272, -0.008107641711831093, 0.04410420358181, 0.037817128002643585, 0.10057634115219116, -0.03305961191654205, -0.03443325310945511, 0.021052543073892593, -0.14332452416419983, -0.057842761278152466, 0.1614707112312317, 0.06299645453691483, -0.030161458998918533, -0.03768898919224739, -0.06627878546714783, -0.15740175545215607, -0.0547383613884449, 0.011557604186236858, 0.05444057658314705, -0.04945966228842735, -0.07859962433576584, -0.03362671285867691, -0.12176019698381424, -0.10582638531923294, -0.04772716015577316, 0.16485735774040222, 0.04216697812080383, 0.03256179392337799, -0.028818747028708458, 0.11795257031917572, -0.05141182616353035, -0.10903763771057129, 0.0010535350302234292, 0.015262688510119915, -0.01838258095085621, -0.007332163862884045, -0.06681672483682632, -0.02557050995528698, -0.01397106796503067, 0.16457374393939972, -0.05293131619691849, 0.04186972603201866, 0.11035285890102386, 0.062439896166324615, -0.10428740829229355, 0.1752108484506607, -0.046497952193021774, 0.006215519737452269, -0.028860684484243393, 0.041257500648498535, 0.02122453972697258, -0.009291651658713818, -0.12297166138887405, -0.016419077292084694, 0.07986705005168915, -0.029572024941444397, -0.09447278827428818, 0.0641193687915802, -0.04679086431860924, -0.021118231117725372, -0.0073065985925495625, -0.08027855306863785, 0.022181659936904907, -0.021479513496160507, -0.06461302936077118, -0.03455812856554985, 0.057129159569740295, 0.013757666572928429, 0.017666583880782127, 0.1294388324022293, -0.0872865542769432, 0.04702506586909294, -0.0968475341796875, -0.06922587007284164, -0.009879517368972301, -0.08766280114650726, 0.04938525706529617, -0.09888608753681183, -0.1999790221452713, -0.015519790351390839, 0.06105172261595726, -0.01483090315014124, -0.04489145800471306, -0.051192767918109894, -0.07656998932361603, 0.001399275497533381, -0.005856335628777742, 0.0844058021903038, -0.05644841492176056, 0.10573431104421616, 0.06161559745669365, 0.06553229689598083, -0.08408017456531525, 0.03300617262721062, -0.09885680675506592, 0.02225453592836857, -0.1793733388185501, 0.04370702803134918, -0.04246487095952034, 0.06538116186857224, -0.05607981234788895, -0.11206036806106567, -0.028339402750134468, 0.018459586426615715, 0.06667561829090118, 0.11905202269554138, -0.20503005385398865, -0.09582361578941345, 0.14479875564575195, -0.11178787052631378, -0.12898696959018707, 0.10561224818229675, -0.05350489541888237, 0.0326332226395607, 0.05519770085811615, 0.19595585763454437, 0.07744117826223373, -0.10768086463212967, 0.005122627597302198, 0.007231234107166529, 0.048816390335559845, -0.04667551815509796, 0.08084999769926071, 0.011914649046957493, 0.06855868548154831, 0.030426349490880966, -0.09179767221212387, 0.05800195783376694, -0.10881490260362625, -0.10366553068161011, -0.03685310110449791, -0.08047116547822952, 0.05227552726864815, 0.078850656747818, 0.06975335627794266, -0.10698188096284866, -0.07706194370985031, 0.10520894080400467, 0.09252676367759705, -0.07028625905513763, 0.0447247214615345, -0.0677017793059349, 0.053432781249284744, -0.025700077414512634, -0.03187491372227669, -0.15297196805477142, -0.052423495799303055, -0.02814931422472, 0.02322409674525261, 0.03057185374200344, 0.01888407953083515, 0.07507641613483429, 0.07021388411521912, -0.061232469975948334, -0.05294211953878403, -0.10574408620595932, 0.002212133491411805, -0.10413520783185959, -0.1827055960893631, -0.05754437297582626, -0.00524927070364356, 0.10796550661325455, -0.1934923380613327, 0.045682523399591446, 0.026860831305384636, 0.09364362806081772, 0.017002703621983528, -0.03559914231300354, -0.06964228302240372, 0.06724565476179123, -0.018374178558588028, -0.04378768429160118, 0.06521198153495789, 0.004841526970267296, -0.08713162690401077, -0.06772248446941376, -0.0918022021651268, 0.18452946841716766, 0.12100920081138611, -0.16808204352855682, -0.08372275531291962, -0.031144728884100914, -0.07295835018157959, -0.03607318922877312, -0.0414501316845417, -0.011111038736999035, 0.15685485303401947, -0.019942959770560265, 0.1259891539812088, -0.05818958953022957, -0.03127386048436165, 0.02464103139936924, -0.04914209619164467, 0.0035461564548313618, 0.10839083045721054, 0.12053194642066956, -0.02633117325603962, 0.13917121291160583, 0.13942469656467438, -0.09971550852060318, 0.13811081647872925, -0.04053269326686859, -0.08049671351909637, -0.012006505392491817, -0.027303028851747513, -0.02872568927705288, 0.17338307201862335, -0.18861672282218933, -0.03185313567519188, 0.004413913004100323, 0.014019221067428589, 0.031055260449647903, -0.2474193125963211, -0.052163176238536835, 0.05130935460329056, -0.039206862449645996, 0.017836090177297592, -0.01100681908428669, -0.02585393190383911, 0.08700203150510788, -0.00471503846347332, -0.08995988965034485, 0.028647659346461296, -0.0031577935442328453, -0.06613083183765411, 0.1953236162662506, -0.058505695313215256, -0.1338813155889511, -0.13422445952892303, -0.030447382479906082, -0.05932622775435448, 0.024016503244638443, 0.07703477144241333, -0.077860988676548, -0.01901133731007576, -0.0680289939045906, 0.030020924285054207, -0.015223708003759384, 0.04530373215675354, 0.030987698584794998, -0.010067948140203953, 0.05390014871954918, -0.08554796874523163, -0.020959600806236267, -0.064821258187294, -0.04662620276212692, 0.03127321973443031, 0.02543957345187664, 0.15804031491279602, 0.1353643387556076, -0.016773387789726257, 0.030250700190663338, -0.03014836646616459, 0.2881605327129364, -0.07833221554756165, -0.0436580553650856, 0.16347552835941315, -0.0032852310687303543, 0.043508682399988174, 0.11864468455314636, 0.07636096328496933, -0.09803284704685211, -0.005842266138643026, 0.05895484238862991, -0.05601280927658081, -0.1346355676651001, -0.041236359626054764, -0.05759256333112717, -0.027715131640434265, 0.07532566040754318, 0.04069826379418373, -0.01044518407434225, 0.07566467672586441, 0.027435746043920517, 0.055209171026945114, -0.005259589292109013, 0.07041749358177185, 0.14623624086380005, 0.02125156670808792, 0.10979712009429932, -0.04131246358156204, -0.06009359285235405, 0.0298952367156744, 0.027371827512979507, 0.22984111309051514, 0.005998141132295132, 0.09940160810947418, 0.08126574009656906, 0.1307212859392166, -0.012220904231071472, 0.021234268322587013, -0.01530067902058363, -0.060584843158721924, -0.011629685759544373, -0.03775867074728012, -0.04023215174674988, 0.043566517531871796, -0.07084232568740845, 0.06730787456035614, -0.14661361277103424, 0.02320307306945324, 0.0752883180975914, 0.22363775968551636, 0.028319289907813072, -0.31675246357917786, -0.09460033476352692, 0.016171997413039207, -0.015495898202061653, -0.0033865112345665693, 0.024689409881830215, 0.1189519390463829, -0.07715308666229248, 0.03634952753782272, -0.062225356698036194, 0.0807991623878479, -0.03985650837421417, 0.05519455671310425, 0.07302340120077133, 0.051578719168901443, 0.02360561676323414, 0.0690913200378418, -0.24050557613372803, 0.2835291922092438, -0.010628534480929375, 0.060222696512937546, -0.03401454910635948, -0.0115330396220088, 0.008159266784787178, 0.1351551115512848, 0.12385357171297073, -0.007118493318557739, -0.036952905356884, -0.20036159455776215, 0.00987860094755888, 0.03228490427136421, 0.11816004663705826, -0.06739991158246994, 0.09181427210569382, -0.027048146352171898, 0.0174388587474823, 0.06339016556739807, 0.04286644235253334, -0.030284970998764038, -0.10991909354925156, -0.01006441842764616, -0.023943541571497917, -0.038743581622838974, -0.07931198179721832, -0.10472802817821503, -0.11635930836200714, 0.16804945468902588, -0.026777174323797226, -0.03653198108077049, -0.11558020859956741, 0.06354091316461563, 0.06326431781053543, -0.08567753434181213, 0.06117299199104309, 0.03479578718543053, 0.0827098935842514, 0.020034706220030785, -0.05824151262640953, 0.09703654050827026, -0.07129930704832077, -0.14061453938484192, -0.05109865218400955, 0.11593875288963318, 0.01954696513712406, 0.03886682912707329, -0.013111374340951443, 0.0037698668893426657, -0.029896680265665054, -0.09619233757257462, 0.04563077166676521, 0.02803300879895687, 0.04797425493597984, 0.022229105234146118, -0.03089218959212303, 0.07353106141090393, -0.033870842307806015, -0.015023223124444485, 0.15305785834789276, 0.2869255244731903, -0.09295844286680222, 0.0004229741171002388, 0.00982910580933094, -0.07408209890127182, -0.2001606673002243, 0.034955982118844986, 0.058172162622213364, 0.005137953441590071, 0.04926137998700142, -0.15989834070205688, 0.096367746591568, 0.12396978586912155, -0.012686488218605518, 0.05588982626795769, -0.37960851192474365, -0.126799076795578, 0.10342741757631302, 0.1715429276227951, 0.12235421687364578, -0.15620096027851105, -0.014525679871439934, -0.03106299601495266, -0.16725002229213715, 0.07096437364816666, -0.02101931720972061, 0.12396930158138275, -0.0340137742459774, 0.09268105775117874, 0.011226100847125053, -0.04931305721402168, 0.14436988532543182, 0.02006492391228676, 0.11907245218753815, -0.06317581236362457, -0.023396793752908707, 0.044769536703825, -0.05050819739699364, 0.018853692337870598, -0.02611689269542694, 0.03858635202050209, -0.08234140276908875, -0.024610497057437897, -0.08548519760370255, 0.030806953087449074, -0.02575111947953701, -0.06964490562677383, -0.05754539743065834, 0.02427392080426216, 0.026887992396950722, -0.0012904079630970955, 0.17634955048561096, -0.002268852200359106, 0.08858326822519302, 0.02717609331011772, 0.04518516734242439, -0.08103911578655243, -0.15716929733753204, -0.032689858227968216, -0.0026323336642235518, 0.06326192617416382, -0.12589138746261597, 0.0254711601883173, 0.14173339307308197, 0.05427620932459831, 0.14487197995185852, 0.09249129146337509, -0.023703845217823982, 0.023625865578651428, 0.07587764412164688, -0.16536566615104675, -0.14112801849842072, -0.04030463844537735, -0.06899195164442062, -0.09937172383069992, 0.09624314308166504, 0.08477648347616196, -0.0822187289595604, 0.004092120099812746, -0.022550223395228386, 0.0011070246109738946, -0.07727744430303574, 0.1784883439540863, 0.06303635984659195, 0.03257312625646591, -0.08665035665035248, 0.0786595493555069, -0.0025059436447918415, -0.06575946509838104, -0.0072514209896326065, 0.0714951753616333, -0.06270817667245865, -0.03682161495089531, 0.03202791139483452, 0.1458403468132019, -0.11509168893098831, -0.034649401903152466, -0.15484924614429474, -0.10663383454084396, 0.06556554138660431, 0.1559063345193863, 0.12840574979782104, 0.008572063408792019, -0.03773299977183342, 0.051537174731492996, -0.09651344269514084, 0.07108031958341599, 0.023391617462038994, 0.09591496735811234, -0.18150964379310608, 0.14274083077907562, 0.001537928357720375, 0.06565775722265244, -0.02250841073691845, 0.02217954583466053, -0.11547326296567917, 0.03736831247806549, -0.08644749224185944, -0.030136123299598694, -0.03399656340479851, 0.01272339653223753, 0.012541417963802814, -0.05010838061571121, -0.056913089007139206, 0.024647606536746025, -0.11670061945915222, -0.022098248824477196, 0.043985866010189056, 0.04928762465715408, -0.11712416261434555, -0.018488502129912376, 0.03602299094200134, -0.06778296083211899, 0.058820925652980804, 0.03814820572733879, 0.034534651786088943, 0.06807845085859299, -0.1670064926147461, -0.0010674407240003347, 0.0944424718618393, 0.0008830325677990913, 0.03465669974684715, -0.0380205437541008, -0.02269049547612667, -0.008767545223236084, 0.05152977257966995, 0.00010186823055846617, 0.04378462955355644, -0.13710656762123108, -0.016005897894501686, -0.02436612918972969, -0.09199099987745285, -0.06342646479606628, 0.03584805503487587, 0.08178574591875076, 0.05354538932442665, 0.17310462892055511, -0.07437688112258911, 0.02838192693889141, -0.19860389828681946, 0.00005309957487042993, 0.011413086205720901, -0.0871848315000534, -0.07489215582609177, -0.08884952962398529, 0.05016256496310234, -0.06323330104351044, 0.1290445625782013, 0.021285057067871094, 0.05818932503461838, 0.03157230094075203, -0.01741061545908451, 0.05532274767756462, 0.03127364069223404, 0.25315478444099426, 0.01818656548857689, -0.028330929577350616, 0.07734901458024979, 0.06842772662639618, 0.10969460755586624, 0.13255032896995544, 0.17388072609901428, 0.13683684170246124, -0.06670544296503067, 0.11415994167327881, 0.06849442422389984, -0.034663520753383636, -0.18071472644805908, 0.002895937766879797, -0.014659974724054337, 0.07019538432359695, -0.04974663257598877, 0.19971488416194916, 0.13412654399871826, -0.18179303407669067, 0.05081643909215927, -0.015590705908834934, -0.09764290601015091, -0.08636023849248886, -0.0863294005393982, -0.07997959107160568, -0.15723487734794617, 0.03621067479252815, -0.11200536787509918, 0.0071076927706599236, 0.13713359832763672, -0.009374172426760197, -0.01448940671980381, 0.19056755304336548, 0.011548206210136414, 0.03063003346323967, 0.03289981186389923, 0.0016038281610235572, -0.04429076611995697, -0.09255538880825043, -0.06364329904317856, 0.03430388495326042, -0.044092535972595215, 0.013367255218327045, -0.07672446966171265, -0.05699998140335083, 0.006133455783128738, 0.0012746824650093913, -0.07166796922683716, 0.004181234631687403, 0.013868764042854309, 0.07625385373830795, 0.018769871443510056, 0.01185208186507225, 0.0006041050655767322, -0.02336648665368557, 0.24757350981235504, -0.09521733224391937, -0.07244601845741272, -0.09404576569795609, 0.19479753077030182, 0.05316925048828125, 0.02047479897737503, 0.011605641804635525, -0.09216508269309998, 0.010882750153541565, 0.21143989264965057, 0.16492871940135956, -0.09730468690395355, 0.005309950094670057, 0.013579631224274635, 0.004842341411858797, -0.030567949637770653, 0.08983022719621658, 0.10675537586212158, 0.06514804810285568, -0.0790916234254837, -0.038175929337739944, -0.0496138297021389, -0.004884653724730015, -0.03739563003182411, 0.06364656239748001, 0.060184963047504425, 0.019593164324760437, -0.07625347375869751, 0.058551568537950516, -0.057579681277275085, -0.09090150147676468, 0.11426205933094025, -0.1957719326019287, -0.13046492636203766, 0.0035776826553046703, 0.11610617488622665, 0.004813019186258316, 0.06172168254852295, -0.05024360120296478, -0.009958135895431042, 0.07679039984941483, 0.011914287693798542, -0.11270550638437271, -0.11886894702911377, 0.08602982014417648, -0.08438990265130997, 0.23701201379299164, -0.053810086101293564, 0.059075403958559036, 0.09985921531915665, 0.0666944682598114, -0.05796262249350548, 0.0741284042596817, 0.029631441459059715, -0.07942283898591995, -0.013361659832298756, 0.12258840352296829, -0.03525754064321518, 0.09123314172029495, 0.03342950716614723, -0.14867939054965973, 0.007433925289660692, -0.029391659423708916, -0.04259856045246124, -0.03108440525829792, -0.03154029697179794, -0.08523346483707428, 0.11728634685277939, 0.1784176528453827, -0.014979204162955284, -0.010340629145503044, -0.0721476599574089, 0.03672625496983528, 0.0834159255027771, 0.046178147196769714, -0.04458455368876457, -0.18634162843227386, 0.0014926719013601542, 0.06270145624876022, -0.04286644980311394, -0.17222654819488525, -0.13231663405895233, 0.019540974870324135, -0.03790561482310295, -0.05374513566493988, 0.08871320635080338, 0.11430633813142776, 0.041192151606082916, -0.06321034580469131, -0.09707876294851303, -0.055751245468854904, 0.1511128693819046, -0.1358138620853424, -0.10323872417211533 ]
null
null
null
# Lora of akira_makino_onichichi This model is trained with [HCP-Diffusion](https://github.com/7eu7d7/HCP-Diffusion). And the auto-training framework is maintained by [DeepGHS Team](https://huggingface.co/deepghs). The base model used during training is [NAI](https://huggingface.co/deepghs/animefull-latest), and the base model used for generating preview images is [Meina/MeinaMix_V11](https://huggingface.co/Meina/MeinaMix_V11). After downloading the pt and safetensors files for the specified step, you need to use them simultaneously. The pt file will be used as an embedding, while the safetensors file will be loaded for Lora. For example, if you want to use the model from step 5600, you need to download `5600/akira_makino_onichichi.pt` as the embedding and `5600/akira_makino_onichichi.safetensors` for loading Lora. By using both files together, you can generate images for the desired characters. **The best step we recommend is 5600**, with the score of 0.792. The trigger words are: 1. `akira_makino_onichichi` 2. `blush, short_hair, purple_eyes, blue_hair, breasts, open_mouth, black_hair, large_breasts` For the following groups, it is not recommended to use this model and we express regret: 1. Individuals who cannot tolerate any deviations from the original character design, even in the slightest detail. 2. Individuals who are facing the application scenarios with high demands for accuracy in recreating character outfits. 3. Individuals who cannot accept the potential randomness in AI-generated images based on the Stable Diffusion algorithm. 4. Individuals who are not comfortable with the fully automated process of training character models using LoRA, or those who believe that training character models must be done purely through manual operations to avoid disrespecting the characters. 5. Individuals who finds the generated image content offensive to their values. These are available steps: | Steps | Score | Download | pattern_1 | pattern_2 | pattern_3 | pattern_4 | pattern_5 | pattern_6 | pattern_7 | pattern_8 | pattern_9 | pattern_10 | pattern_11 | pattern_12 | pattern_13 | pattern_14 | pattern_15 | pattern_16 | bikini | bondage | free | maid | miko | nude | nude2 | suit | yukata | |:---------|:----------|:------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:----------------------------------------------------|:-----------------------------------------------------|:-----------------------------------------------------|:-----------------------------------------------------|:-------------------------------------------------|:-------------------------------------------------|:-----------------------------------------------------|:-----------------------------------------------------|:-----------------------------------------|:--------------------------------------------------|:-----------------------------------------------|:-------------------------------------|:-------------------------------------|:-----------------------------------------------|:------------------------------------------------|:-------------------------------------|:-------------------------------------------------| | 6000 | 0.771 | [Download](6000/akira_makino_onichichi.zip) | [<NSFW, click to see>](6000/previews/pattern_1.png) | [<NSFW, click to see>](6000/previews/pattern_2.png) | [<NSFW, click to see>](6000/previews/pattern_3.png) | [<NSFW, click to see>](6000/previews/pattern_4.png) | [<NSFW, click to see>](6000/previews/pattern_5.png) | [<NSFW, click to see>](6000/previews/pattern_6.png) | [<NSFW, click to see>](6000/previews/pattern_7.png) | [<NSFW, click to see>](6000/previews/pattern_8.png) | [<NSFW, click to see>](6000/previews/pattern_9.png) | [<NSFW, click to see>](6000/previews/pattern_10.png) | [<NSFW, click to see>](6000/previews/pattern_11.png) | [<NSFW, click to see>](6000/previews/pattern_12.png) | ![pattern_13-6000](6000/previews/pattern_13.png) | ![pattern_14-6000](6000/previews/pattern_14.png) | [<NSFW, click to see>](6000/previews/pattern_15.png) | [<NSFW, click to see>](6000/previews/pattern_16.png) | ![bikini-6000](6000/previews/bikini.png) | [<NSFW, click to see>](6000/previews/bondage.png) | [<NSFW, click to see>](6000/previews/free.png) | ![maid-6000](6000/previews/maid.png) | ![miko-6000](6000/previews/miko.png) | [<NSFW, click to see>](6000/previews/nude.png) | [<NSFW, click to see>](6000/previews/nude2.png) | ![suit-6000](6000/previews/suit.png) | [<NSFW, click to see>](6000/previews/yukata.png) | | **5600** | **0.792** | [**Download**](5600/akira_makino_onichichi.zip) | [<NSFW, click to see>](5600/previews/pattern_1.png) | [<NSFW, click to see>](5600/previews/pattern_2.png) | [<NSFW, click to see>](5600/previews/pattern_3.png) | [<NSFW, click to see>](5600/previews/pattern_4.png) | [<NSFW, click to see>](5600/previews/pattern_5.png) | [<NSFW, click to see>](5600/previews/pattern_6.png) | [<NSFW, click to see>](5600/previews/pattern_7.png) | [<NSFW, click to see>](5600/previews/pattern_8.png) | [<NSFW, click to see>](5600/previews/pattern_9.png) | [<NSFW, click to see>](5600/previews/pattern_10.png) | [<NSFW, click to see>](5600/previews/pattern_11.png) | [<NSFW, click to see>](5600/previews/pattern_12.png) | ![pattern_13-5600](5600/previews/pattern_13.png) | ![pattern_14-5600](5600/previews/pattern_14.png) | [<NSFW, click to see>](5600/previews/pattern_15.png) | [<NSFW, click to see>](5600/previews/pattern_16.png) | ![bikini-5600](5600/previews/bikini.png) | [<NSFW, click to see>](5600/previews/bondage.png) | [<NSFW, click to see>](5600/previews/free.png) | ![maid-5600](5600/previews/maid.png) | ![miko-5600](5600/previews/miko.png) | [<NSFW, click to see>](5600/previews/nude.png) | [<NSFW, click to see>](5600/previews/nude2.png) | ![suit-5600](5600/previews/suit.png) | [<NSFW, click to see>](5600/previews/yukata.png) | | 5200 | 0.778 | [Download](5200/akira_makino_onichichi.zip) | [<NSFW, click to see>](5200/previews/pattern_1.png) | [<NSFW, click to see>](5200/previews/pattern_2.png) | [<NSFW, click to see>](5200/previews/pattern_3.png) | [<NSFW, click to see>](5200/previews/pattern_4.png) | [<NSFW, click to see>](5200/previews/pattern_5.png) | [<NSFW, click to see>](5200/previews/pattern_6.png) | [<NSFW, click to see>](5200/previews/pattern_7.png) | [<NSFW, click to see>](5200/previews/pattern_8.png) | [<NSFW, click to see>](5200/previews/pattern_9.png) | [<NSFW, click to see>](5200/previews/pattern_10.png) | [<NSFW, click to see>](5200/previews/pattern_11.png) | [<NSFW, click to see>](5200/previews/pattern_12.png) | ![pattern_13-5200](5200/previews/pattern_13.png) | ![pattern_14-5200](5200/previews/pattern_14.png) | [<NSFW, click to see>](5200/previews/pattern_15.png) | [<NSFW, click to see>](5200/previews/pattern_16.png) | ![bikini-5200](5200/previews/bikini.png) | [<NSFW, click to see>](5200/previews/bondage.png) | [<NSFW, click to see>](5200/previews/free.png) | ![maid-5200](5200/previews/maid.png) | ![miko-5200](5200/previews/miko.png) | [<NSFW, click to see>](5200/previews/nude.png) | [<NSFW, click to see>](5200/previews/nude2.png) | ![suit-5200](5200/previews/suit.png) | [<NSFW, click to see>](5200/previews/yukata.png) | | 4800 | 0.773 | [Download](4800/akira_makino_onichichi.zip) | [<NSFW, click to see>](4800/previews/pattern_1.png) | [<NSFW, click to see>](4800/previews/pattern_2.png) | [<NSFW, click to see>](4800/previews/pattern_3.png) | [<NSFW, click to see>](4800/previews/pattern_4.png) | [<NSFW, click to see>](4800/previews/pattern_5.png) | [<NSFW, click to see>](4800/previews/pattern_6.png) | [<NSFW, click to see>](4800/previews/pattern_7.png) | [<NSFW, click to see>](4800/previews/pattern_8.png) | [<NSFW, click to see>](4800/previews/pattern_9.png) | [<NSFW, click to see>](4800/previews/pattern_10.png) | [<NSFW, click to see>](4800/previews/pattern_11.png) | [<NSFW, click to see>](4800/previews/pattern_12.png) | ![pattern_13-4800](4800/previews/pattern_13.png) | ![pattern_14-4800](4800/previews/pattern_14.png) | [<NSFW, click to see>](4800/previews/pattern_15.png) | [<NSFW, click to see>](4800/previews/pattern_16.png) | ![bikini-4800](4800/previews/bikini.png) | [<NSFW, click to see>](4800/previews/bondage.png) | [<NSFW, click to see>](4800/previews/free.png) | ![maid-4800](4800/previews/maid.png) | ![miko-4800](4800/previews/miko.png) | [<NSFW, click to see>](4800/previews/nude.png) | [<NSFW, click to see>](4800/previews/nude2.png) | ![suit-4800](4800/previews/suit.png) | [<NSFW, click to see>](4800/previews/yukata.png) | | 4400 | 0.750 | [Download](4400/akira_makino_onichichi.zip) | [<NSFW, click to see>](4400/previews/pattern_1.png) | [<NSFW, click to see>](4400/previews/pattern_2.png) | [<NSFW, click to see>](4400/previews/pattern_3.png) | [<NSFW, click to see>](4400/previews/pattern_4.png) | [<NSFW, click to see>](4400/previews/pattern_5.png) | [<NSFW, click to see>](4400/previews/pattern_6.png) | [<NSFW, click to see>](4400/previews/pattern_7.png) | [<NSFW, click to see>](4400/previews/pattern_8.png) | [<NSFW, click to see>](4400/previews/pattern_9.png) | [<NSFW, click to see>](4400/previews/pattern_10.png) | [<NSFW, click to see>](4400/previews/pattern_11.png) | [<NSFW, click to see>](4400/previews/pattern_12.png) | ![pattern_13-4400](4400/previews/pattern_13.png) | ![pattern_14-4400](4400/previews/pattern_14.png) | [<NSFW, click to see>](4400/previews/pattern_15.png) | [<NSFW, click to see>](4400/previews/pattern_16.png) | ![bikini-4400](4400/previews/bikini.png) | [<NSFW, click to see>](4400/previews/bondage.png) | [<NSFW, click to see>](4400/previews/free.png) | ![maid-4400](4400/previews/maid.png) | ![miko-4400](4400/previews/miko.png) | [<NSFW, click to see>](4400/previews/nude.png) | [<NSFW, click to see>](4400/previews/nude2.png) | ![suit-4400](4400/previews/suit.png) | [<NSFW, click to see>](4400/previews/yukata.png) | | 4000 | 0.713 | [Download](4000/akira_makino_onichichi.zip) | [<NSFW, click to see>](4000/previews/pattern_1.png) | [<NSFW, click to see>](4000/previews/pattern_2.png) | [<NSFW, click to see>](4000/previews/pattern_3.png) | [<NSFW, click to see>](4000/previews/pattern_4.png) | [<NSFW, click to see>](4000/previews/pattern_5.png) | [<NSFW, click to see>](4000/previews/pattern_6.png) | [<NSFW, click to see>](4000/previews/pattern_7.png) | [<NSFW, click to see>](4000/previews/pattern_8.png) | [<NSFW, click to see>](4000/previews/pattern_9.png) | [<NSFW, click to see>](4000/previews/pattern_10.png) | [<NSFW, click to see>](4000/previews/pattern_11.png) | [<NSFW, click to see>](4000/previews/pattern_12.png) | ![pattern_13-4000](4000/previews/pattern_13.png) | ![pattern_14-4000](4000/previews/pattern_14.png) | [<NSFW, click to see>](4000/previews/pattern_15.png) | [<NSFW, click to see>](4000/previews/pattern_16.png) | ![bikini-4000](4000/previews/bikini.png) | [<NSFW, click to see>](4000/previews/bondage.png) | [<NSFW, click to see>](4000/previews/free.png) | ![maid-4000](4000/previews/maid.png) | ![miko-4000](4000/previews/miko.png) | [<NSFW, click to see>](4000/previews/nude.png) | [<NSFW, click to see>](4000/previews/nude2.png) | ![suit-4000](4000/previews/suit.png) | [<NSFW, click to see>](4000/previews/yukata.png) | | 3600 | 0.743 | [Download](3600/akira_makino_onichichi.zip) | [<NSFW, click to see>](3600/previews/pattern_1.png) | [<NSFW, click to see>](3600/previews/pattern_2.png) | [<NSFW, click to see>](3600/previews/pattern_3.png) | [<NSFW, click to see>](3600/previews/pattern_4.png) | [<NSFW, click to see>](3600/previews/pattern_5.png) | [<NSFW, click to see>](3600/previews/pattern_6.png) | [<NSFW, click to see>](3600/previews/pattern_7.png) | [<NSFW, click to see>](3600/previews/pattern_8.png) | [<NSFW, click to see>](3600/previews/pattern_9.png) | [<NSFW, click to see>](3600/previews/pattern_10.png) | [<NSFW, click to see>](3600/previews/pattern_11.png) | [<NSFW, click to see>](3600/previews/pattern_12.png) | ![pattern_13-3600](3600/previews/pattern_13.png) | ![pattern_14-3600](3600/previews/pattern_14.png) | [<NSFW, click to see>](3600/previews/pattern_15.png) | [<NSFW, click to see>](3600/previews/pattern_16.png) | ![bikini-3600](3600/previews/bikini.png) | [<NSFW, click to see>](3600/previews/bondage.png) | [<NSFW, click to see>](3600/previews/free.png) | ![maid-3600](3600/previews/maid.png) | ![miko-3600](3600/previews/miko.png) | [<NSFW, click to see>](3600/previews/nude.png) | [<NSFW, click to see>](3600/previews/nude2.png) | ![suit-3600](3600/previews/suit.png) | [<NSFW, click to see>](3600/previews/yukata.png) | | 3200 | 0.733 | [Download](3200/akira_makino_onichichi.zip) | [<NSFW, click to see>](3200/previews/pattern_1.png) | [<NSFW, click to see>](3200/previews/pattern_2.png) | [<NSFW, click to see>](3200/previews/pattern_3.png) | [<NSFW, click to see>](3200/previews/pattern_4.png) | [<NSFW, click to see>](3200/previews/pattern_5.png) | [<NSFW, click to see>](3200/previews/pattern_6.png) | [<NSFW, click to see>](3200/previews/pattern_7.png) | [<NSFW, click to see>](3200/previews/pattern_8.png) | [<NSFW, click to see>](3200/previews/pattern_9.png) | [<NSFW, click to see>](3200/previews/pattern_10.png) | [<NSFW, click to see>](3200/previews/pattern_11.png) | [<NSFW, click to see>](3200/previews/pattern_12.png) | ![pattern_13-3200](3200/previews/pattern_13.png) | ![pattern_14-3200](3200/previews/pattern_14.png) | [<NSFW, click to see>](3200/previews/pattern_15.png) | [<NSFW, click to see>](3200/previews/pattern_16.png) | ![bikini-3200](3200/previews/bikini.png) | [<NSFW, click to see>](3200/previews/bondage.png) | [<NSFW, click to see>](3200/previews/free.png) | ![maid-3200](3200/previews/maid.png) | ![miko-3200](3200/previews/miko.png) | [<NSFW, click to see>](3200/previews/nude.png) | [<NSFW, click to see>](3200/previews/nude2.png) | ![suit-3200](3200/previews/suit.png) | [<NSFW, click to see>](3200/previews/yukata.png) | | 2800 | 0.722 | [Download](2800/akira_makino_onichichi.zip) | [<NSFW, click to see>](2800/previews/pattern_1.png) | [<NSFW, click to see>](2800/previews/pattern_2.png) | [<NSFW, click to see>](2800/previews/pattern_3.png) | [<NSFW, click to see>](2800/previews/pattern_4.png) | [<NSFW, click to see>](2800/previews/pattern_5.png) | [<NSFW, click to see>](2800/previews/pattern_6.png) | [<NSFW, click to see>](2800/previews/pattern_7.png) | [<NSFW, click to see>](2800/previews/pattern_8.png) | [<NSFW, click to see>](2800/previews/pattern_9.png) | [<NSFW, click to see>](2800/previews/pattern_10.png) | [<NSFW, click to see>](2800/previews/pattern_11.png) | [<NSFW, click to see>](2800/previews/pattern_12.png) | ![pattern_13-2800](2800/previews/pattern_13.png) | ![pattern_14-2800](2800/previews/pattern_14.png) | [<NSFW, click to see>](2800/previews/pattern_15.png) | [<NSFW, click to see>](2800/previews/pattern_16.png) | ![bikini-2800](2800/previews/bikini.png) | [<NSFW, click to see>](2800/previews/bondage.png) | [<NSFW, click to see>](2800/previews/free.png) | ![maid-2800](2800/previews/maid.png) | ![miko-2800](2800/previews/miko.png) | [<NSFW, click to see>](2800/previews/nude.png) | [<NSFW, click to see>](2800/previews/nude2.png) | ![suit-2800](2800/previews/suit.png) | [<NSFW, click to see>](2800/previews/yukata.png) | | 2400 | 0.712 | [Download](2400/akira_makino_onichichi.zip) | [<NSFW, click to see>](2400/previews/pattern_1.png) | [<NSFW, click to see>](2400/previews/pattern_2.png) | [<NSFW, click to see>](2400/previews/pattern_3.png) | [<NSFW, click to see>](2400/previews/pattern_4.png) | [<NSFW, click to see>](2400/previews/pattern_5.png) | [<NSFW, click to see>](2400/previews/pattern_6.png) | [<NSFW, click to see>](2400/previews/pattern_7.png) | [<NSFW, click to see>](2400/previews/pattern_8.png) | [<NSFW, click to see>](2400/previews/pattern_9.png) | [<NSFW, click to see>](2400/previews/pattern_10.png) | [<NSFW, click to see>](2400/previews/pattern_11.png) | [<NSFW, click to see>](2400/previews/pattern_12.png) | ![pattern_13-2400](2400/previews/pattern_13.png) | ![pattern_14-2400](2400/previews/pattern_14.png) | [<NSFW, click to see>](2400/previews/pattern_15.png) | [<NSFW, click to see>](2400/previews/pattern_16.png) | ![bikini-2400](2400/previews/bikini.png) | [<NSFW, click to see>](2400/previews/bondage.png) | [<NSFW, click to see>](2400/previews/free.png) | ![maid-2400](2400/previews/maid.png) | ![miko-2400](2400/previews/miko.png) | [<NSFW, click to see>](2400/previews/nude.png) | [<NSFW, click to see>](2400/previews/nude2.png) | ![suit-2400](2400/previews/suit.png) | [<NSFW, click to see>](2400/previews/yukata.png) | | 2000 | 0.726 | [Download](2000/akira_makino_onichichi.zip) | [<NSFW, click to see>](2000/previews/pattern_1.png) | [<NSFW, click to see>](2000/previews/pattern_2.png) | [<NSFW, click to see>](2000/previews/pattern_3.png) | [<NSFW, click to see>](2000/previews/pattern_4.png) | [<NSFW, click to see>](2000/previews/pattern_5.png) | [<NSFW, click to see>](2000/previews/pattern_6.png) | [<NSFW, click to see>](2000/previews/pattern_7.png) | [<NSFW, click to see>](2000/previews/pattern_8.png) | [<NSFW, click to see>](2000/previews/pattern_9.png) | [<NSFW, click to see>](2000/previews/pattern_10.png) | [<NSFW, click to see>](2000/previews/pattern_11.png) | [<NSFW, click to see>](2000/previews/pattern_12.png) | ![pattern_13-2000](2000/previews/pattern_13.png) | ![pattern_14-2000](2000/previews/pattern_14.png) | [<NSFW, click to see>](2000/previews/pattern_15.png) | [<NSFW, click to see>](2000/previews/pattern_16.png) | ![bikini-2000](2000/previews/bikini.png) | [<NSFW, click to see>](2000/previews/bondage.png) | [<NSFW, click to see>](2000/previews/free.png) | ![maid-2000](2000/previews/maid.png) | ![miko-2000](2000/previews/miko.png) | [<NSFW, click to see>](2000/previews/nude.png) | [<NSFW, click to see>](2000/previews/nude2.png) | ![suit-2000](2000/previews/suit.png) | [<NSFW, click to see>](2000/previews/yukata.png) | | 1600 | 0.532 | [Download](1600/akira_makino_onichichi.zip) | [<NSFW, click to see>](1600/previews/pattern_1.png) | [<NSFW, click to see>](1600/previews/pattern_2.png) | [<NSFW, click to see>](1600/previews/pattern_3.png) | [<NSFW, click to see>](1600/previews/pattern_4.png) | [<NSFW, click to see>](1600/previews/pattern_5.png) | [<NSFW, click to see>](1600/previews/pattern_6.png) | [<NSFW, click to see>](1600/previews/pattern_7.png) | [<NSFW, click to see>](1600/previews/pattern_8.png) | [<NSFW, click to see>](1600/previews/pattern_9.png) | [<NSFW, click to see>](1600/previews/pattern_10.png) | [<NSFW, click to see>](1600/previews/pattern_11.png) | [<NSFW, click to see>](1600/previews/pattern_12.png) | ![pattern_13-1600](1600/previews/pattern_13.png) | ![pattern_14-1600](1600/previews/pattern_14.png) | [<NSFW, click to see>](1600/previews/pattern_15.png) | [<NSFW, click to see>](1600/previews/pattern_16.png) | ![bikini-1600](1600/previews/bikini.png) | [<NSFW, click to see>](1600/previews/bondage.png) | [<NSFW, click to see>](1600/previews/free.png) | ![maid-1600](1600/previews/maid.png) | ![miko-1600](1600/previews/miko.png) | [<NSFW, click to see>](1600/previews/nude.png) | [<NSFW, click to see>](1600/previews/nude2.png) | ![suit-1600](1600/previews/suit.png) | [<NSFW, click to see>](1600/previews/yukata.png) | | 1200 | 0.523 | [Download](1200/akira_makino_onichichi.zip) | [<NSFW, click to see>](1200/previews/pattern_1.png) | [<NSFW, click to see>](1200/previews/pattern_2.png) | [<NSFW, click to see>](1200/previews/pattern_3.png) | [<NSFW, click to see>](1200/previews/pattern_4.png) | [<NSFW, click to see>](1200/previews/pattern_5.png) | [<NSFW, click to see>](1200/previews/pattern_6.png) | [<NSFW, click to see>](1200/previews/pattern_7.png) | [<NSFW, click to see>](1200/previews/pattern_8.png) | [<NSFW, click to see>](1200/previews/pattern_9.png) | [<NSFW, click to see>](1200/previews/pattern_10.png) | [<NSFW, click to see>](1200/previews/pattern_11.png) | [<NSFW, click to see>](1200/previews/pattern_12.png) | ![pattern_13-1200](1200/previews/pattern_13.png) | ![pattern_14-1200](1200/previews/pattern_14.png) | [<NSFW, click to see>](1200/previews/pattern_15.png) | [<NSFW, click to see>](1200/previews/pattern_16.png) | ![bikini-1200](1200/previews/bikini.png) | [<NSFW, click to see>](1200/previews/bondage.png) | [<NSFW, click to see>](1200/previews/free.png) | ![maid-1200](1200/previews/maid.png) | ![miko-1200](1200/previews/miko.png) | [<NSFW, click to see>](1200/previews/nude.png) | [<NSFW, click to see>](1200/previews/nude2.png) | ![suit-1200](1200/previews/suit.png) | [<NSFW, click to see>](1200/previews/yukata.png) | | 800 | 0.473 | [Download](800/akira_makino_onichichi.zip) | [<NSFW, click to see>](800/previews/pattern_1.png) | [<NSFW, click to see>](800/previews/pattern_2.png) | [<NSFW, click to see>](800/previews/pattern_3.png) | [<NSFW, click to see>](800/previews/pattern_4.png) | [<NSFW, click to see>](800/previews/pattern_5.png) | [<NSFW, click to see>](800/previews/pattern_6.png) | [<NSFW, click to see>](800/previews/pattern_7.png) | [<NSFW, click to see>](800/previews/pattern_8.png) | [<NSFW, click to see>](800/previews/pattern_9.png) | [<NSFW, click to see>](800/previews/pattern_10.png) | [<NSFW, click to see>](800/previews/pattern_11.png) | [<NSFW, click to see>](800/previews/pattern_12.png) | ![pattern_13-800](800/previews/pattern_13.png) | ![pattern_14-800](800/previews/pattern_14.png) | [<NSFW, click to see>](800/previews/pattern_15.png) | [<NSFW, click to see>](800/previews/pattern_16.png) | ![bikini-800](800/previews/bikini.png) | [<NSFW, click to see>](800/previews/bondage.png) | [<NSFW, click to see>](800/previews/free.png) | ![maid-800](800/previews/maid.png) | ![miko-800](800/previews/miko.png) | [<NSFW, click to see>](800/previews/nude.png) | [<NSFW, click to see>](800/previews/nude2.png) | ![suit-800](800/previews/suit.png) | [<NSFW, click to see>](800/previews/yukata.png) | | 400 | 0.371 | [Download](400/akira_makino_onichichi.zip) | [<NSFW, click to see>](400/previews/pattern_1.png) | [<NSFW, click to see>](400/previews/pattern_2.png) | [<NSFW, click to see>](400/previews/pattern_3.png) | [<NSFW, click to see>](400/previews/pattern_4.png) | [<NSFW, click to see>](400/previews/pattern_5.png) | [<NSFW, click to see>](400/previews/pattern_6.png) | [<NSFW, click to see>](400/previews/pattern_7.png) | [<NSFW, click to see>](400/previews/pattern_8.png) | [<NSFW, click to see>](400/previews/pattern_9.png) | [<NSFW, click to see>](400/previews/pattern_10.png) | [<NSFW, click to see>](400/previews/pattern_11.png) | [<NSFW, click to see>](400/previews/pattern_12.png) | ![pattern_13-400](400/previews/pattern_13.png) | ![pattern_14-400](400/previews/pattern_14.png) | [<NSFW, click to see>](400/previews/pattern_15.png) | [<NSFW, click to see>](400/previews/pattern_16.png) | ![bikini-400](400/previews/bikini.png) | [<NSFW, click to see>](400/previews/bondage.png) | [<NSFW, click to see>](400/previews/free.png) | ![maid-400](400/previews/maid.png) | ![miko-400](400/previews/miko.png) | [<NSFW, click to see>](400/previews/nude.png) | [<NSFW, click to see>](400/previews/nude2.png) | ![suit-400](400/previews/suit.png) | [<NSFW, click to see>](400/previews/yukata.png) |
{"license": "mit", "tags": ["art"], "datasets": ["CyberHarem/akira_makino_onichichi"], "pipeline_tag": "text-to-image"}
text-to-image
CyberHarem/akira_makino_onichichi
[ "art", "text-to-image", "dataset:CyberHarem/akira_makino_onichichi", "license:mit", "region:us" ]
2023-11-12T18:27:36+00:00
[]
[]
TAGS #art #text-to-image #dataset-CyberHarem/akira_makino_onichichi #license-mit #region-us
Lora of akira\_makino\_onichichi ================================ This model is trained with HCP-Diffusion. And the auto-training framework is maintained by DeepGHS Team. The base model used during training is NAI, and the base model used for generating preview images is Meina/MeinaMix\_V11. After downloading the pt and safetensors files for the specified step, you need to use them simultaneously. The pt file will be used as an embedding, while the safetensors file will be loaded for Lora. For example, if you want to use the model from step 5600, you need to download '5600/akira\_makino\_onichichi.pt' as the embedding and '5600/akira\_makino\_onichichi.safetensors' for loading Lora. By using both files together, you can generate images for the desired characters. The best step we recommend is 5600, with the score of 0.792. The trigger words are: 1. 'akira\_makino\_onichichi' 2. 'blush, short\_hair, purple\_eyes, blue\_hair, breasts, open\_mouth, black\_hair, large\_breasts' For the following groups, it is not recommended to use this model and we express regret: 1. Individuals who cannot tolerate any deviations from the original character design, even in the slightest detail. 2. Individuals who are facing the application scenarios with high demands for accuracy in recreating character outfits. 3. Individuals who cannot accept the potential randomness in AI-generated images based on the Stable Diffusion algorithm. 4. Individuals who are not comfortable with the fully automated process of training character models using LoRA, or those who believe that training character models must be done purely through manual operations to avoid disrespecting the characters. 5. Individuals who finds the generated image content offensive to their values. These are available steps:
[]
[ "TAGS\n#art #text-to-image #dataset-CyberHarem/akira_makino_onichichi #license-mit #region-us \n" ]
[ 38 ]
[ "passage: TAGS\n#art #text-to-image #dataset-CyberHarem/akira_makino_onichichi #license-mit #region-us \n" ]
[ 0.009592467918992043, 0.09018023312091827, -0.0039832983165979385, 0.11452822387218475, 0.126128688454628, 0.07561339437961578, 0.30467352271080017, 0.09213471412658691, 0.07940223813056946, -0.018436985090374947, 0.14162461459636688, 0.06716171652078629, 0.03528561070561409, 0.03555835038423538, -0.025712739676237106, -0.2749216854572296, 0.010591244325041771, -0.002435222966596484, 0.058600131422281265, 0.03264949843287468, 0.04459686577320099, -0.056327491998672485, 0.12921135127544403, -0.01145980041474104, -0.14572985470294952, -0.037943415343761444, 0.006815738044679165, -0.0615302249789238, 0.04677338898181915, 0.036687664687633514, 0.0075529273599386215, -0.00007574188930448145, 0.01812703348696232, -0.03861190751194954, 0.05854024365544319, -0.05140024051070213, -0.15171943604946136, 0.005631040781736374, 0.1277540922164917, -0.06866969168186188, 0.0622447207570076, 0.03162229433655739, -0.11316198110580444, 0.03275058791041374, -0.16246387362480164, 0.14021548628807068, -0.005706447176635265, 0.07335255295038223, 0.1997314840555191, 0.041100699454545975, 0.028711117804050446, 0.028117945417761803, -0.08094452321529388, 0.05139109492301941, 0.005729908123612404, -0.10097287595272064, -0.09356886148452759, 0.11369691789150238, 0.03156508877873421, 0.15636149048805237, -0.10915147513151169, 0.0971284955739975, -0.00549480551853776, -0.02916756644845009, -0.15456146001815796, -0.07408598810434341, 0.026983195915818214, 0.0637856125831604, 0.03625338897109032, 0.03885789215564728, 0.2875193655490875, 0.12037940323352814, 0.031665824353694916, 0.009187547490000725, -0.049291033297777176, 0.05512746050953865, -0.05023941025137901, 0.1010560542345047, -0.019410548731684685, 0.03596723824739456, -0.057884473353624344, -0.02881828136742115, -0.13839729130268097, -0.032907310873270035, -0.12322269380092621, -0.07479371130466461, -0.05734718590974808, 0.07732409238815308, -0.19140902161598206, -0.06202823668718338, -0.054743628948926926, -0.0782063901424408, 0.02756943553686142, -0.10155825316905975, 0.10994917154312134, 0.0734991803765297, 0.04040759801864624, -0.12736304104328156, 0.10759914666414261, 0.10025890916585922, 0.14963889122009277, 0.014993889257311821, -0.022871866822242737, 0.17923162877559662, 0.12595932185649872, -0.09699595719575882, -0.045692890882492065, 0.0496189258992672, 0.024428715929389, -0.06279769539833069, 0.03314488008618355, -0.10943659394979477, -0.1988101750612259, 0.02024775557219982, -0.09169016033411026, -0.006138755939900875, -0.004306488670408726, 0.023276666179299355, -0.11043056845664978, 0.010727874003350735, 0.17768573760986328, 0.00913786981254816, 0.04659426957368851, -0.01592242904007435, -0.08121981471776962, -0.02969367429614067, -0.021259168162941933, 0.0388835109770298, 0.14268265664577484, 0.0821295827627182, -0.10866542905569077, 0.04008394479751587, 0.015035312622785568, -0.0014254626585170627, 0.11988271027803421, 0.024927647784352303, 0.060281164944171906, -0.15138350427150726, -0.008626480586826801, -0.05320249870419502, 0.061215098947286606, -0.06223776191473007, 0.05061948299407959, 0.02760094776749611, -0.033278029412031174, 0.01113959401845932, 0.0039546675980091095, -0.03592408448457718, -0.1066088080406189, 0.10003341734409332, -0.1183730959892273, 0.12907494604587555, -0.10679735243320465, -0.023725416511297226, -0.083869069814682, -0.05087041109800339, -0.06449701637029648, -0.04100683704018593, -0.03018755093216896, 0.19919706881046295, 0.052038319408893585, 0.05418070778250694, -0.10727943480014801, 0.018527256324887276, -0.027572762221097946, 0.294740229845047, -0.1368037313222885, -0.021657248958945274, 0.12270478159189224, -0.045951563864946365, -0.1629226952791214, 0.05862230435013771, -0.06165381893515587, 0.1599491685628891, 0.044243328273296356, 0.2626454830169678, -0.13188274204730988, -0.10724575072526932, -0.02899450995028019, 0.05454230308532715, -0.09013897180557251, -0.11300674080848694, 0.08970409631729126, 0.06521926075220108, 0.039304934442043304, -0.009328423999249935, -0.03455067425966263, 0.07658015936613083, -0.09647838026285172, -0.0605776309967041, 0.046619709581136703, -0.03386669605970383, -0.041124377399683, 0.04893634840846062, 0.08286923170089722, -0.0703764259815216, -0.029593972489237785, -0.06808798760175705, -0.017958467826247215, 0.07234501838684082, 0.015914060175418854, -0.08239063620567322, 0.05898650363087654, 0.027789875864982605, -0.006114079616963863, 0.006398508790880442, 0.032067641615867615, -0.05802479386329651, 0.06682898849248886, 0.11744467169046402, -0.1123017892241478, 0.029470345005393028, -0.023014741018414497, 0.014820510521531105, 0.04139397665858269, 0.02326805889606476, 0.024441512301564217, 0.0010026509407907724, -0.15065105259418488, 0.09284984320402145, -0.013324187137186527, 0.07250914722681046, -0.06557399779558182, -0.04750817269086838, 0.20420601963996887, -0.01856199838221073, -0.027611596509814262, 0.07089420408010483, 0.02634589932858944, -0.03616940975189209, -0.08929591625928879, 0.008145741187036037, 0.11027739942073822, 0.020105263218283653, -0.11067093163728714, 0.1825101226568222, -0.051546234637498856, 0.11797241121530533, 0.1861066073179245, -0.20587587356567383, 0.012251239269971848, -0.048435598611831665, 0.0190656129270792, -0.005076702684164047, -0.017384512349963188, 0.0031998285558074713, -0.14878757297992706, -0.0569504052400589, 0.051163144409656525, -0.0718962773680687, 0.07891403883695602, 0.038172099739313126, -0.06686826050281525, -0.08798763155937195, 0.06438004225492477, 0.20225004851818085, -0.21723447740077972, 0.16674961149692535, 0.23217037320137024, 0.03246251866221428, 0.24048814177513123, 0.026229631155729294, 0.070717453956604, -0.05443739891052246, -0.04949123039841652, -0.00892825610935688, 0.21266764402389526, -0.16876766085624695, -0.018679948523640633, -0.007395059801638126, -0.04727782681584358, 0.003909802995622158, -0.12364985048770905, -0.19141101837158203, -0.07121189683675766, 0.007784583140164614, -0.05630312114953995, 0.05908577889204025, -0.0341838039457798, 0.09880510717630386, -0.05871054157614708, -0.04889466613531113, 0.08516331017017365, -0.014027739875018597, -0.025037847459316254, 0.07053899765014648, -0.10286719352006912, -0.22179442644119263, -0.0748881846666336, -0.15154235064983368, -0.11041394621133804, 0.008878889493644238, 0.07299202680587769, -0.17058967053890228, 0.025350701063871384, -0.05022236704826355, -0.13376165926456451, -0.02519179880619049, -0.08718683570623398, -0.03132890909910202, 0.05026331543922424, -0.1302187591791153, -0.05295321345329285, -0.04203501716256142, -0.022924689576029778, 0.003851755755022168, 0.2481100857257843, -0.10401110351085663, 0.19601185619831085, 0.05234256386756897, 0.028580443933606148, 0.0553646944463253, -0.0007299541612155735, 0.1710740029811859, -0.11092883348464966, 0.0840456485748291, 0.07730010151863098, 0.014550034888088703, 0.09137345850467682, 0.17612557113170624, 0.10580158233642578, -0.06935396790504456, 0.00563399400562048, -0.010588175617158413, -0.1045096144080162, -0.0638139471411705, -0.05352108180522919, -0.058693431317806244, 0.19166837632656097, 0.07241831719875336, 0.07470757514238358, 0.2113598883152008, 0.0794740691781044, 0.041816629469394684, -0.06464296579360962, 0.11865819245576859, 0.0662732943892479, -0.046083707362413406, -0.008377933874726295, 0.046913761645555496, -0.049839701503515244, -0.027326354756951332, 0.1769176423549652, 0.13681724667549133, 0.03596643730998039, 0.1335715800523758, 0.022457486018538475, 0.07084035873413086, 0.10059549659490585, 0.11273431777954102, -0.010309607721865177, 0.07154978811740875, -0.027142368257045746, -0.08103321492671967, -0.0844060480594635, 0.15456551313400269, 0.11026979982852936, -0.05051423981785774, -0.25909796357154846, 0.040842533111572266, -0.09648565202951431, 0.07499921321868896, -0.07730011641979218, 0.02590392529964447, -0.15929880738258362, 0.08136416971683502, 0.09753783792257309, 0.08098701387643814, -0.03894650191068649, 0.10289503633975983, 0.1139923632144928, -0.10957113653421402, 0.11032119393348694, -0.02527467906475067, 0.15197321772575378, 0.07873795181512833, -0.0023600305430591106, 0.022097233682870865, -0.24855563044548035, -0.00019103004888165742, 0.055601589381694794, -0.15570619702339172, 0.20322860777378082, 0.035660985857248306, -0.05670632794499397, -0.07475220412015915, -0.10806716978549957, 0.09682346880435944, 0.1635870635509491, 0.16450464725494385, 0.03635778650641441, -0.10555991530418396, -0.06716276705265045, -0.060933034867048264, 0.0009315380011685193, 0.10827253013849258, 0.013264479115605354, -0.10021062195301056, 0.05540329962968826, -0.024091675877571106, -0.021023565903306007, 0.21503441035747528, -0.1020732969045639, -0.09752032905817032, -0.0019557401537895203, 0.05087617412209511, 0.03396514058113098, 0.0763290673494339, -0.0032469332218170166, -0.031600672751665115, -0.025004971772432327, 0.02379278838634491, 0.0446261465549469, -0.07686814665794373, -0.021624121814966202, -0.04755320027470589, -0.024217216297984123, -0.033158570528030396, -0.09106297045946121, -0.052794359624385834, -0.1153472512960434, -0.1136886477470398, 0.08491640537977219, -0.029932061210274696, 0.041590768843889236, -0.1380956619977951, -0.04791842773556709, 0.04275989532470703, 0.01341546606272459, -0.019498340785503387, 0.008958380669355392, -0.06678058207035065, -0.08945472538471222, 0.07878481596708298, -0.12626491487026215, 0.05144340544939041, -0.058444149792194366, -0.10975254327058792, -0.12785880267620087, -0.07498916983604431, -0.08881717920303345, 0.03568126633763313, 0.32595518231391907, -0.015696153044700623, 0.0940287783741951, 0.1784166395664215, -0.06690485030412674, -0.27710747718811035, -0.056653738021850586, -0.24003808200359344, -0.0160721093416214, 0.15122410655021667, -0.13684239983558655, 0.05718928948044777, 0.11210475862026215, -0.06178303062915802, 0.18798311054706573, -0.3362351655960083, -0.09367738664150238, -0.0513228178024292, 0.04573655128479004, 0.4190105199813843, -0.25633832812309265, -0.027333486825227737, -0.09899555146694183, -0.09141860902309418, 0.1809530407190323, 0.011631464585661888, 0.04107513278722763, 0.05427557975053787, 0.021992336958646774, -0.041027672588825226, 0.007540090475231409, 0.18534977734088898, 0.012254203669726849, 0.08133675903081894, -0.13147227466106415, -0.2397460639476776, 0.20239189267158508, -0.01558202225714922, -0.10476205497980118, -0.06788953393697739, -0.07167157530784607, -0.11963879317045212, 0.07882770895957947, -0.059541184455156326, 0.02638368122279644, 0.03713051602244377, -0.03928977996110916, -0.13749316334724426, 0.12943123281002045, -0.06326345354318619, 0.04615725949406624, 0.21671469509601593, -0.02421483024954796, 0.013819174841046333, -0.05400462448596954, -0.06038516014814377, -0.09321627020835876, 0.07772403955459595, -0.1042134165763855, -0.06621630489826202, 0.08984991163015366, -0.1463904231786728, 0.03517185524106026, 0.04702681675553322, 0.018144845962524414, 0.07573489099740982, 0.02580047771334648, 0.0047157760709524155, 0.11291014403104782, 0.2098766565322876, -0.13657128810882568, -0.020638437941670418, -0.018304910510778427, 0.0011670972453430295, 0.2535737454891205, -0.06202135607600212, 0.07669336348772049, 0.03434842452406883, -0.0030304635874927044, 0.0003431638178881258, 0.10495077073574066, -0.06604664772748947, -0.12603330612182617, 0.019456537440419197, -0.08544450998306274, -0.046107321977615356, 0.12499668449163437, 0.1191922053694725, -0.15532171726226807, -0.06058153510093689, 0.12411026656627655, -0.04346552863717079, -0.0652281641960144, -0.040562670677900314, 0.08680941164493561, -0.13250377774238586, -0.02654763124883175, -0.02628503181040287, 0.030590256676077843, -0.06457727402448654, 0.10397501289844513, 0.010114645585417747, 0.0029855016618967056, 0.10256616026163101, -0.013297675177454948, 0.014817751944065094, -0.020165225490927696, -0.007910330779850483, 0.005193508230149746, -0.07129588723182678, -0.15625642240047455, 0.06030812859535217, 0.13065649569034576, -0.05435498058795929, -0.07141365855932236, -0.18934419751167297, 0.012994061224162579, 0.039246123284101486, 0.033834632486104965, -0.13111045956611633, -0.08208125829696655, -0.03323676437139511, -0.009546414948999882, -0.12601590156555176, -0.12021255493164062, -0.09820833802223206, 0.012243373319506645, 0.07203580439090729, 0.05521306395530701, -0.06728112697601318, -0.05579648166894913, 0.13092240691184998, -0.009916139766573906, 0.05476350337266922, 0.08803476393222809, -0.07675129920244217, -0.02554916776716709, -0.2201688140630722, -0.023479221388697624, 0.055913295596838, -0.01950954832136631, 0.0009868090273812413, 0.1212322860956192, -0.008108995854854584, 0.028933020308613777, 0.06018223240971565, 0.017765913158655167, 0.0400519035756588, -0.05702792480587959, -0.01086270622909069, -0.10072658956050873, -0.14728103578090668, -0.10757223516702652, 0.042374562472105026, 0.20225954055786133, -0.0509418323636055, 0.05932886153459549, 0.014948029071092606, 0.08148153871297836, -0.03846514970064163, 0.041947539895772934, 0.043233148753643036, -0.1484304815530777, -0.0813823789358139, -0.11088438332080841, -0.05573008209466934, -0.07487273961305618, 0.2188652604818344, 0.11907059699296951, -0.24421314895153046, 0.034532029181718826, 0.14635717868804932, -0.1693844348192215, 0.0366935096681118, 0.2584995627403259, -0.02796204946935177, -0.018278952687978745, -0.06712280958890915, 0.09430527687072754, -0.015568660572171211, 0.08156275749206543, 0.03532261401414871, 0.1271892935037613, 0.06949231028556824, 0.04194461181759834, 0.08402475714683533, 0.020480895414948463, -0.0071527292020618916, -0.029865052551031113, 0.027495745569467545, 0.06875082105398178, -0.036346834152936935, -0.04171552136540413, 0.18930557370185852, -0.04955246299505234, 0.049143869429826736, -0.06051890179514885, -0.04703911021351814, -0.034891266375780106, -0.21881544589996338, -0.07102004438638687, -0.1442425549030304, 0.09301893413066864, -0.024385804310441017, 0.054718971252441406, 0.14985822141170502, 0.04294731095433235, -0.08169509470462799, -0.011588430032134056, -0.13656707108020782, -0.05903090164065361, 0.09474611282348633, -0.053463295102119446, 0.00767505681142211, -0.021707376465201378, -0.043692439794540405, -0.02593020536005497, -0.06113532558083534, -0.03780025243759155, 0.057327382266521454, 0.07915786653757095, 0.020966103300452232, -0.16830666363239288, -0.14656396210193634, -0.045351527631282806, -0.017241796478629112, -0.03339827060699463, 0.20402644574642181, 0.01001681573688984, 0.06336787343025208, 0.03689093887805939, 0.09046704322099686, 0.06618159264326096, 0.09851068258285522, -0.04635722190141678, -0.09865467995405197, -0.10765659064054489, -0.00224942434579134, -0.017815731465816498, -0.031520094722509384, -0.008663377724587917, 0.1870291382074356, 0.22507140040397644, -0.19675862789154053, -0.05757227540016174, 0.0070692189037799835, 0.020178642123937607, 0.05073937773704529, 0.10513582825660706, -0.03542112559080124, 0.2159859836101532, -0.055639952421188354, 0.01994357816874981, -0.08527983725070953, -0.05777880549430847, -0.024589061737060547, 0.011118120513856411, 0.12392498552799225, -0.05228368565440178, -0.07973945885896683, 0.205053448677063, -0.1683156043291092, 0.061210013926029205, 0.17710712552070618, -0.13221247494220734, -0.007588420528918505, 0.06147991493344307, 0.0512361116707325, 0.05542080104351044, 0.11478467285633087, -0.11840828508138657, -0.01729429140686989, -0.07293376326560974, 0.0671868696808815, -0.20223119854927063, -0.09041675180196762, -0.019760390743613243, -0.15887148678302765, 0.22500841319561005, -0.03676261380314827, 0.041947294026613235, 0.052843786776065826, -0.009376480244100094, -0.014029421843588352, 0.03422005847096443, 0.005154280457645655, 0.08665050566196442, -0.1283119022846222, 0.005942375864833593, 0.009352711960673332, -0.07558949291706085, 0.0850151926279068, 0.03603357821702957, 0.04229269176721573, 0.07830134779214859, -0.03679800406098366, -0.06381446123123169, 0.13612625002861023, -0.1615278422832489, 0.08876363933086395, -0.005000370554625988, 0.03160158544778824, -0.08280495554208755, -0.009441354312002659, 0.02740909717977047, 0.06401407718658447, -0.16123709082603455, -0.057056114077568054, 0.03399980440735817, -0.07996171712875366, -0.052792008966207504, 0.08818966895341873, -0.14139851927757263, -0.011887256987392902, -0.13632138073444366, 0.029157182201743126, -0.11528283357620239, 0.07174066454172134, 0.1521328240633011, -0.0722339004278183, 0.007058579474687576, -0.0663214772939682, 0.09083384275436401, -0.024106936529278755, 0.031179320067167282, -0.12242471426725388 ]
null
null
stable-baselines3
# **DQN** Agent playing **SpaceInvadersNoFrameskip-v4** This is a trained model of a **DQN** agent playing **SpaceInvadersNoFrameskip-v4** using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3) and the [RL Zoo](https://github.com/DLR-RM/rl-baselines3-zoo). The RL Zoo is a training framework for Stable Baselines3 reinforcement learning agents, with hyperparameter optimization and pre-trained agents included. ## Usage (with SB3 RL Zoo) RL Zoo: https://github.com/DLR-RM/rl-baselines3-zoo<br/> SB3: https://github.com/DLR-RM/stable-baselines3<br/> SB3 Contrib: https://github.com/Stable-Baselines-Team/stable-baselines3-contrib Install the RL Zoo (with SB3 and SB3-Contrib): ```bash pip install rl_zoo3 ``` ``` # Download model and save it into the logs/ folder python -m rl_zoo3.load_from_hub --algo dqn --env SpaceInvadersNoFrameskip-v4 -orga VenomAI -f logs/ python -m rl_zoo3.enjoy --algo dqn --env SpaceInvadersNoFrameskip-v4 -f logs/ ``` If you installed the RL Zoo3 via pip (`pip install rl_zoo3`), from anywhere you can do: ``` python -m rl_zoo3.load_from_hub --algo dqn --env SpaceInvadersNoFrameskip-v4 -orga VenomAI -f logs/ python -m rl_zoo3.enjoy --algo dqn --env SpaceInvadersNoFrameskip-v4 -f logs/ ``` ## Training (with the RL Zoo) ``` python -m rl_zoo3.train --algo dqn --env SpaceInvadersNoFrameskip-v4 -f logs/ # Upload the model and generate video (when possible) python -m rl_zoo3.push_to_hub --algo dqn --env SpaceInvadersNoFrameskip-v4 -f logs/ -orga VenomAI ``` ## Hyperparameters ```python OrderedDict([('batch_size', 32), ('buffer_size', 100000), ('env_wrapper', ['stable_baselines3.common.atari_wrappers.AtariWrapper']), ('exploration_final_eps', 0.01), ('exploration_fraction', 0.1), ('frame_stack', 4), ('gradient_steps', 1), ('learning_rate', 0.0001), ('learning_starts', 100000), ('n_timesteps', 1000000.0), ('optimize_memory_usage', False), ('policy', 'CnnPolicy'), ('target_update_interval', 1000), ('train_freq', 4), ('normalize', False)]) ``` # Environment Arguments ```python {'render_mode': 'rgb_array'} ```
{"library_name": "stable-baselines3", "tags": ["SpaceInvadersNoFrameskip-v4", "deep-reinforcement-learning", "reinforcement-learning", "stable-baselines3"], "model-index": [{"name": "DQN", "results": [{"task": {"type": "reinforcement-learning", "name": "reinforcement-learning"}, "dataset": {"name": "SpaceInvadersNoFrameskip-v4", "type": "SpaceInvadersNoFrameskip-v4"}, "metrics": [{"type": "mean_reward", "value": "511.00 +/- 187.89", "name": "mean_reward", "verified": false}]}]}]}
reinforcement-learning
VenomAI/DQN-SpaceInvadersNFS-v4
[ "stable-baselines3", "SpaceInvadersNoFrameskip-v4", "deep-reinforcement-learning", "reinforcement-learning", "model-index", "region:us" ]
2023-11-12T18:30:13+00:00
[]
[]
TAGS #stable-baselines3 #SpaceInvadersNoFrameskip-v4 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us
# DQN Agent playing SpaceInvadersNoFrameskip-v4 This is a trained model of a DQN agent playing SpaceInvadersNoFrameskip-v4 using the stable-baselines3 library and the RL Zoo. The RL Zoo is a training framework for Stable Baselines3 reinforcement learning agents, with hyperparameter optimization and pre-trained agents included. ## Usage (with SB3 RL Zoo) RL Zoo: URL SB3: URL SB3 Contrib: URL Install the RL Zoo (with SB3 and SB3-Contrib): If you installed the RL Zoo3 via pip ('pip install rl_zoo3'), from anywhere you can do: ## Training (with the RL Zoo) ## Hyperparameters # Environment Arguments
[ "# DQN Agent playing SpaceInvadersNoFrameskip-v4\nThis is a trained model of a DQN agent playing SpaceInvadersNoFrameskip-v4\nusing the stable-baselines3 library\nand the RL Zoo.\n\nThe RL Zoo is a training framework for Stable Baselines3\nreinforcement learning agents,\nwith hyperparameter optimization and pre-trained agents included.", "## Usage (with SB3 RL Zoo)\n\nRL Zoo: URL\nSB3: URL\nSB3 Contrib: URL\n\nInstall the RL Zoo (with SB3 and SB3-Contrib):\n\n\n\n\nIf you installed the RL Zoo3 via pip ('pip install rl_zoo3'), from anywhere you can do:", "## Training (with the RL Zoo)", "## Hyperparameters", "# Environment Arguments" ]
[ "TAGS\n#stable-baselines3 #SpaceInvadersNoFrameskip-v4 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us \n", "# DQN Agent playing SpaceInvadersNoFrameskip-v4\nThis is a trained model of a DQN agent playing SpaceInvadersNoFrameskip-v4\nusing the stable-baselines3 library\nand the RL Zoo.\n\nThe RL Zoo is a training framework for Stable Baselines3\nreinforcement learning agents,\nwith hyperparameter optimization and pre-trained agents included.", "## Usage (with SB3 RL Zoo)\n\nRL Zoo: URL\nSB3: URL\nSB3 Contrib: URL\n\nInstall the RL Zoo (with SB3 and SB3-Contrib):\n\n\n\n\nIf you installed the RL Zoo3 via pip ('pip install rl_zoo3'), from anywhere you can do:", "## Training (with the RL Zoo)", "## Hyperparameters", "# Environment Arguments" ]
[ 43, 90, 73, 9, 5, 7 ]
[ "passage: TAGS\n#stable-baselines3 #SpaceInvadersNoFrameskip-v4 #deep-reinforcement-learning #reinforcement-learning #model-index #region-us \n# DQN Agent playing SpaceInvadersNoFrameskip-v4\nThis is a trained model of a DQN agent playing SpaceInvadersNoFrameskip-v4\nusing the stable-baselines3 library\nand the RL Zoo.\n\nThe RL Zoo is a training framework for Stable Baselines3\nreinforcement learning agents,\nwith hyperparameter optimization and pre-trained agents included.## Usage (with SB3 RL Zoo)\n\nRL Zoo: URL\nSB3: URL\nSB3 Contrib: URL\n\nInstall the RL Zoo (with SB3 and SB3-Contrib):\n\n\n\n\nIf you installed the RL Zoo3 via pip ('pip install rl_zoo3'), from anywhere you can do:## Training (with the RL Zoo)## Hyperparameters# Environment Arguments" ]
[ 0.043572068214416504, 0.2414778620004654, -0.0026879787910729647, 0.012635791674256325, 0.05784223601222038, 0.0030472534708678722, 0.08585051447153091, 0.10650663822889328, 0.024212315678596497, -0.001382096204906702, 0.003954293206334114, 0.17533031105995178, 0.03632635250687599, 0.13125447928905487, -0.018073517829179764, -0.2066594809293747, -0.013479253277182579, -0.06247470900416374, -0.07153085619211197, 0.036099132150411606, 0.07206681370735168, -0.030116932466626167, 0.036061208695173264, -0.051406677812337875, -0.057161085307598114, 0.036824777722358704, -0.03157254680991173, 0.007067287806421518, 0.15158706903457642, -0.1222257912158966, 0.12329676002264023, 0.020955175161361694, 0.1896144151687622, -0.12332789599895477, 0.0339222252368927, 0.08982209116220474, -0.036988191306591034, 0.013221588917076588, 0.00975361280143261, -0.052562564611434937, 0.1590864509344101, -0.09371145814657211, 0.07146181166172028, 0.010926910676062107, -0.07592244446277618, -0.1774153709411621, -0.09356249868869781, 0.07947742193937302, 0.0617753230035305, 0.005319166928529739, 0.03726791962981224, 0.11306490749120712, -0.020991774275898933, 0.06488905102014542, 0.11562903225421906, -0.17549200356006622, 0.013578375801444054, 0.17859570682048798, 0.003242473118007183, 0.15767055749893188, -0.05546637624502182, 0.019877681508660316, 0.02752300351858139, 0.04758313298225403, 0.06873945891857147, -0.08186400681734085, -0.1364826112985611, -0.056155186146497726, -0.15456219017505646, -0.03352400287985802, 0.05195203423500061, -0.011860138736665249, -0.05783402919769287, -0.010724928230047226, -0.04010869935154915, 0.0008851495804265141, -0.028637725859880447, 0.01805497519671917, 0.07031578570604324, -0.01226285845041275, 0.02092539705336094, -0.08391954004764557, -0.0390290804207325, -0.038563769310712814, -0.018022390082478523, 0.12054917961359024, 0.08285853266716003, 0.0266572255641222, -0.04135355353355408, 0.10274127870798111, -0.07091585546731949, -0.05454207584261894, 0.04555258899927139, -0.03786851093173027, -0.10615779459476471, 0.02120024710893631, -0.05905991420149803, 0.026879185810685158, 0.09943640232086182, 0.18048083782196045, -0.09862488508224487, 0.012620617635548115, -0.03430783003568649, 0.08121664822101593, -0.03196052461862564, 0.03197542577981949, -0.0840383991599083, -0.016251085326075554, 0.17835216224193573, 0.0030782297253608704, 0.022272996604442596, 0.002074616262689233, -0.049819961190223694, -0.02881433069705963, -0.017756454646587372, 0.06631895154714584, 0.07032092660665512, 0.010587303899228573, -0.0037596761249005795, -0.027667716145515442, -0.036921944469213486, -0.05629328638315201, -0.04952820762991905, 0.018803736194968224, -0.04712437093257904, -0.047942135483026505, 0.06027210131287575, -0.005624116864055395, 0.11337806284427643, -0.025607796385884285, 0.026316547766327858, -0.019410157576203346, -0.07494441419839859, -0.13221681118011475, -0.0304415225982666, 0.0691632330417633, 0.04371757060289383, -0.22497159242630005, -0.16994807124137878, -0.008539012633264065, 0.017946386709809303, -0.018741264939308167, -0.11334165185689926, 0.02453240379691124, -0.007166135590523481, -0.049758363515138626, -0.01601579785346985, 0.10474669933319092, -0.020438622683286667, 0.018010856583714485, -0.05593825876712799, 0.16603368520736694, -0.14290283620357513, 0.031004127115011215, -0.08706212788820267, 0.023509707301855087, -0.21286657452583313, 0.041208744049072266, -0.177636057138443, 0.04863585904240608, -0.08500861376523972, 0.02327173389494419, 0.021320728585124016, 0.01968831568956375, 0.08580207824707031, 0.10143322497606277, -0.23631145060062408, 0.05405791476368904, 0.07900930196046829, -0.022739801555871964, -0.04218491166830063, 0.06798892468214035, -0.06558530032634735, 0.1382148116827011, 0.046505436301231384, 0.24831900000572205, 0.10361487418413162, -0.2036508023738861, 0.061786454170942307, 0.0578593946993351, -0.08880111575126648, -0.004730981774628162, -0.020022382959723473, 0.11598580330610275, -0.01114928349852562, 0.03338807821273804, -0.12186288088560104, 0.1456439197063446, 0.02738998830318451, -0.0165485180914402, -0.04454165697097778, -0.1614885926246643, 0.10309953987598419, -0.015504824928939342, 0.09532155096530914, -0.042415786534547806, 0.0001161050095106475, -0.011168917641043663, 0.18012429773807526, -0.043841805309057236, 0.0007168867159634829, 0.07871408760547638, 0.10895700752735138, 0.028009075671434402, -0.020230965688824654, -0.20380273461341858, -0.0423048660159111, 0.02367858961224556, 0.044489551335573196, 0.2190362960100174, 0.19936694204807281, 0.07770156860351562, -0.022313760593533516, -0.025487221777439117, -0.003248062450438738, -0.05106664076447487, 0.03467361256480217, -0.027858436107635498, -0.024532482028007507, 0.06065356358885765, -0.09305168688297272, 0.02817818708717823, -0.13112716376781464, 0.06307920068502426, -0.17345242202281952, 0.06863926351070404, 0.021998396143317223, -0.005436043255031109, 0.024577690288424492, -0.011292695067822933, -0.034188106656074524, -0.06233125180006027, 0.07110602408647537, 0.06098933145403862, 0.014702376909554005, 0.0021991983521729708, -0.0683600977063179, -0.13828523457050323, 0.08231553435325623, -0.04042381793260574, -0.14305958151817322, 0.06392676383256912, 0.011172642931342125, 0.04875864461064339, -0.05975872278213501, 0.016254881396889687, 0.22900153696537018, 0.05321883037686348, 0.09785865992307663, -0.04092191904783249, -0.022525805979967117, -0.06617844104766846, -0.06677833944559097, 0.09694591909646988, 0.10812206566333771, 0.060318704694509506, -0.0030071530491113663, 0.07626225054264069, 0.10942911356687546, -0.1035122498869896, -0.0651884600520134, 0.03220061957836151, -0.05973697826266289, 0.019652515649795532, 0.049140311777591705, 0.02971293032169342, 0.08619047701358795, 0.1833551675081253, 0.008245792239904404, 0.0386311337351799, -0.025997694581747055, 0.026109617203474045, -0.15547916293144226, -0.03145433962345123, 0.04308181628584862, 0.00886955764144659, -0.07408110797405243, 0.04994636029005051, 0.051439400762319565, 0.13607151806354523, -0.08217083662748337, -0.13170577585697174, -0.059745315462350845, -0.03804200142621994, -0.04239124804735184, 0.14975430071353912, -0.08507520705461502, -0.19221234321594238, -0.017164425924420357, -0.15751953423023224, -0.02518727444112301, -0.005179801490157843, 0.002318724524229765, -0.08325926214456558, 0.017780914902687073, 0.010001576505601406, -0.03129372000694275, -0.0684933215379715, -0.06596160680055618, -0.05786636844277382, 0.09124112874269485, 0.06932931393384933, -0.12240120023488998, -0.00961651187390089, -0.03742414712905884, -0.020465577021241188, 0.04516167193651199, 0.08452648669481277, -0.007267598994076252, 0.07773483544588089, -0.13209199905395508, -0.06962883472442627, 0.02834828943014145, 0.2766247093677521, 0.02882981114089489, 0.004668009467422962, 0.17051753401756287, -0.03629542142152786, 0.04912714660167694, 0.16181479394435883, 0.030781643465161324, -0.14196757972240448, 0.07090470939874649, -0.011341600678861141, -0.09542687982320786, -0.1706860214471817, -0.10215658694505692, -0.037867411971092224, -0.05015881359577179, 0.05638284236192703, 0.004951419774442911, -0.04476970434188843, 0.05910305306315422, 0.08782228082418442, -0.017004497349262238, -0.06151578947901726, 0.11129767447710037, 0.032263003289699554, -0.030136963352560997, 0.08078382909297943, -0.042354047298431396, -0.04206389561295509, 0.0032403599470853806, 0.22643887996673584, 0.0937788337469101, -0.01775507442653179, -0.042567066848278046, 0.019317636266350746, 0.05095715448260307, 0.03613382205367088, 0.11312435567378998, -0.06975842267274857, -0.06826137751340866, -0.035185977816581726, 0.027829548344016075, -0.02945687249302864, 0.08205190300941467, 0.0630207508802414, 0.005563626065850258, -0.04653681069612503, -0.07972332090139389, -0.04849022626876831, 0.08408913016319275, -0.027642227709293365, -0.10093270242214203, 0.09321888536214828, 0.048575710505247116, 0.0016974330646917224, 0.03055831417441368, 0.027994604781270027, 0.01462269201874733, -0.07982148975133896, -0.06775744259357452, 0.011468625627458096, 0.07076629996299744, -0.06822766363620758, -0.027886953204870224, -0.19817815721035004, 0.14578363299369812, 0.010630400851368904, 0.04118429124355316, -0.13048617541790009, 0.1209396943449974, -0.023116756230592728, -0.026430301368236542, 0.013811616227030754, 0.0014643745962530375, 0.08203291147947311, -0.04806509613990784, 0.15762180089950562, 0.009528410620987415, -0.28092408180236816, -0.1418946087360382, -0.08416824042797089, -0.051183976233005524, -0.022873088717460632, 0.014752174727618694, 0.0642135739326477, 0.01516205258667469, 0.003868846921250224, -0.013076163828372955, 0.03185269236564636, -0.09826882928609848, -0.06493937969207764, -0.04839126765727997, -0.02250157669186592, -0.06525848805904388, -0.05647949501872063, -0.0006809153710491955, -0.17226077616214752, 0.12522587180137634, 0.11787347495555878, -0.06451737880706787, -0.041814323514699936, -0.06554657220840454, 0.046191465109586716, -0.07571537792682648, 0.0469326451420784, 0.003414976177737117, 0.019198855385184288, -0.06806991249322891, -0.17922484874725342, 0.016097763553261757, -0.10899919271469116, 0.03772687539458275, -0.05070559307932854, 0.020257100462913513, 0.08594245463609695, 0.17520126700401306, 0.05856714025139809, 0.01460097823292017, -0.07239776104688644, -0.07543374598026276, -0.0017121878918260336, -0.06344114243984222, 0.05762333422899246, -0.009151889942586422, -0.20333483815193176, 0.02763226442039013, -0.11414948850870132, 0.06860900670289993, 0.3310066759586334, 0.3324824273586273, -0.10698744654655457, 0.1177443116903305, 0.04819539934396744, -0.042202454060316086, -0.21051374077796936, -0.002244179602712393, 0.012272895313799381, 0.024992236867547035, 0.13725964725017548, -0.12924811244010925, 0.05453680083155632, 0.0794181227684021, -0.024458877742290497, 0.01456840243190527, -0.09078162908554077, -0.10816970467567444, 0.20847418904304504, 0.14226987957954407, 0.04421741142868996, -0.09421348571777344, 0.08391669392585754, 0.004295284394174814, 0.08375877887010574, 0.2107764035463333, -0.052112679928541183, 0.10695768147706985, 0.005195184610784054, 0.19852910935878754, 0.0328996516764164, -0.023768596351146698, 0.10834760218858719, -0.009801650419831276, 0.07911337912082672, 0.03985166177153587, -0.007676942739635706, 0.010487722232937813, -0.04522453248500824, 0.014148596674203873, -0.028376007452607155, 0.010284217074513435, -0.2274095118045807, 0.0582297146320343, -0.06368855386972427, 0.04604509472846985, 0.008256820961833, -0.0999874547123909, -0.03583388403058052, 0.06431841105222702, 0.08014573156833649, 0.01975327916443348, 0.0436067171394825, -0.03867863491177559, 0.11051398515701294, 0.20660489797592163, -0.009811338968575, 0.17751595377922058, -0.0615963339805603, 0.01464168168604374, -0.023011628538370132, -0.04223164543509483, -0.1462583988904953, -0.035259708762168884, 0.03498423472046852, 0.057734888046979904, 0.015203364193439484, 0.049647457897663116, -0.05656236410140991, 0.08498423546552658, 0.021687336266040802, -0.041541360318660736, 0.033579520881175995, 0.08835696429014206, 0.12415177375078201, 0.010754258371889591, -0.030121933668851852, 0.06147436052560806, -0.08128108084201813, -0.09446098655462265, -0.004497923422604799, -0.029991207644343376, -0.1083834245800972, 0.11353230476379395, 0.16914646327495575, 0.039594944566488266, -0.057076629251241684, 0.10688766092061996, -0.02768099494278431, 0.10047874599695206, 0.009198128245770931, 0.06507332623004913, -0.014091075398027897, -0.03691792115569115, 0.10611724853515625, -0.05442855879664421, -0.01637818105518818, 0.07645545154809952, -0.06522727757692337, -0.023877469822764397, -0.0801999643445015, 0.06034626066684723, 0.09222240000963211, -0.16854619979858398, -0.0639432892203331, -0.032122284173965454, -0.08628080040216446, 0.013965039514005184, 0.012447911314666271, 0.0710059329867363, -0.08589600026607513, 0.06316167116165161, -0.024337708950042725, 0.015639442950487137, -0.03689891844987869, 0.019222697243094444, -0.19525384902954102, -0.002140450058504939, -0.11280795186758041, -0.00348020251840353, -0.002931603929027915, 0.04463808611035347, -0.04961875081062317, -0.029358822852373123, -0.0030675032176077366, 0.044366419315338135, -0.16609135270118713, 0.002798673929646611, -0.011639905162155628, 0.03210212290287018, -0.0002893915225286037, -0.0983390137553215, 0.014195028692483902, -0.04294256120920181, -0.04198618605732918, 0.04925514757633209, 0.009436776861548424, 0.06470516324043274, -0.2795179784297943, -0.14905457198619843, 0.030816160142421722, 0.0683867484331131, 0.05483196675777435, -0.1830425262451172, 0.03568267077207565, -0.08042316138744354, -0.02253127470612526, -0.037770628929138184, 0.018491698428988457, -0.0539514496922493, 0.0018174031283706427, -0.04225044324994087, -0.023033907637000084, -0.028055014088749886, -0.07556360960006714, 0.0826747715473175, 0.12462522834539413, 0.07555580884218216, -0.03807181864976883, 0.09595896303653717, -0.10009756684303284, -0.04657831788063049, -0.04052736237645149, -0.036951083689928055, 0.017965637147426605, -0.0870552659034729, 0.048530060797929764, 0.05188591405749321, 0.18719671666622162, -0.08520494401454926, -0.058800119906663895, -0.014255574904382229, 0.0746525228023529, 0.07849094271659851, 0.005095830652862787, 0.17779210209846497, -0.045693784952163696, 0.05693846940994263, 0.021304311230778694, 0.046699028462171555, 0.10497613251209259, -0.023569339886307716, 0.14490213990211487, 0.21171095967292786, -0.037196725606918335, -0.11048602312803268, 0.043668005615472794, 0.01745123788714409, -0.002401199424639344, 0.05968761444091797, 0.11983796209096909, -0.050589341670274734, -0.10903856158256531, 0.23442286252975464, 0.054169271141290665, -0.11218088120222092, 0.09546315670013428, 0.039532262831926346, -0.015890996903181076, -0.1301896870136261, 0.010444961488246918, -0.0013640925753861666, -0.11233190447092056, 0.03386834263801575, -0.06087532266974449, -0.025547027587890625, 0.11809267848730087, 0.008789865300059319, 0.03317064419388771, -0.04139537364244461, -0.03756232187151909, -0.04352104663848877, -0.04273213446140289, -0.012549578212201595, -0.02991986647248268, -0.030186517164111137, -0.07621737569570541, -0.007770835887640715, -0.012012424878776073, 0.030795488506555557, -0.015285328030586243, -0.02503054589033127, -0.021192016080021858, -0.06697061657905579, -0.0026312144473195076, -0.008178025484085083, 0.015549594536423683, 0.010121971368789673, 0.2358063906431198, 0.07042546570301056, -0.10260069370269775, -0.01036880537867546, 0.22197756171226501, -0.03853277862071991, -0.06528383493423462, -0.07849395275115967, 0.25128230452537537, -0.10482002794742584, 0.051095426082611084, -0.005819917656481266, -0.06550488620996475, -0.07153836637735367, 0.2309868484735489, 0.13502730429172516, -0.1677926480770111, 0.06329060345888138, -0.0368385910987854, -0.009490780532360077, -0.14286863803863525, 0.16013580560684204, 0.1865294873714447, 0.09480160474777222, -0.12259847670793533, 0.0023130534682422876, -0.03518044203519821, -0.018328361213207245, -0.1660851687192917, -0.004593863617628813, -0.029364850372076035, -0.0427238829433918, -0.050771355628967285, 0.029773715883493423, -0.15205919742584229, -0.0927426889538765, -0.1916799396276474, -0.11482496559619904, -0.12386849522590637, -0.04549141973257065, -0.11142764985561371, -0.0019938007462769747, 0.02257080189883709, -0.0641874223947525, 0.021061956882476807, -0.0212461706250906, -0.05887424945831299, 0.015386379323899746, -0.08395619690418243, 0.0674985870718956, 0.06488548219203949, 0.15327942371368408, -0.0790991559624672, 0.025424562394618988, 0.07090727984905243, -0.057595450431108475, -0.10164349526166916, 0.06067253649234772, 0.015708057209849358, -0.1972588747739792, 0.007548294495791197, 0.17712996900081635, -0.10420889407396317, 0.09745754301548004, 0.048501528799533844, -0.012951982207596302, 0.0867827981710434, -0.024721821770071983, -0.016682926565408707, -0.04852180927991867, -0.011212974786758423, -0.10143939405679703, 0.09892100840806961, 0.0876845121383667, -0.0517118014395237, 0.07436849176883698, -0.09508965909481049, -0.04068392515182495, 0.13103286921977997, -0.010057874955236912, -0.08450483530759811, -0.11667824536561966, -0.04081142693758011, 0.09684515744447708, -0.018041390925645828, -0.20185889303684235, -0.11639472097158432, -0.11752668023109436, -0.00014377340266946703, -0.03563340753316879, 0.061800602823495865, 0.02430674433708191, -0.02556120604276657, -0.008150683715939522, -0.17615078389644623, -0.06614746153354645, 0.13479791581630707, -0.10176112502813339, -0.07456064969301224 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # arabert-fully-supervised-arabic-propaganda This model is a fine-tuned version of [aubmindlab/bert-base-arabertv02-twitter](https://huggingface.co/aubmindlab/bert-base-arabertv02-twitter) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.4417 - Accuracy: 0.9167 - Precision: 0.5577 - Recall: 0.7073 - F1: 0.6237 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.2 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|:------:| | 0.5249 | 1.0 | 20 | 0.4933 | 0.7714 | 0.2901 | 0.9268 | 0.4419 | | 0.303 | 2.0 | 40 | 0.3490 | 0.8571 | 0.3933 | 0.8537 | 0.5385 | | 0.1552 | 3.0 | 60 | 0.3830 | 0.9048 | 0.5085 | 0.7317 | 0.6 | | 0.1411 | 4.0 | 80 | 0.4215 | 0.9143 | 0.5455 | 0.7317 | 0.6250 | | 0.1359 | 5.0 | 100 | 0.4417 | 0.9167 | 0.5577 | 0.7073 | 0.6237 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu121 - Datasets 2.14.6 - Tokenizers 0.14.1
{"tags": ["generated_from_trainer"], "metrics": ["accuracy", "precision", "recall", "f1"], "base_model": "aubmindlab/bert-base-arabertv02-twitter", "model-index": [{"name": "arabert-fully-supervised-arabic-propaganda", "results": []}]}
text-classification
Bmalmotairy/arabert-fully-supervised-arabic-propaganda
[ "transformers", "tensorboard", "safetensors", "bert", "text-classification", "generated_from_trainer", "base_model:aubmindlab/bert-base-arabertv02-twitter", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T18:30:16+00:00
[]
[]
TAGS #transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-aubmindlab/bert-base-arabertv02-twitter #autotrain_compatible #endpoints_compatible #region-us
arabert-fully-supervised-arabic-propaganda ========================================== This model is a fine-tuned version of aubmindlab/bert-base-arabertv02-twitter on the None dataset. It achieves the following results on the evaluation set: * Loss: 0.4417 * Accuracy: 0.9167 * Precision: 0.5577 * Recall: 0.7073 * F1: 0.6237 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 2e-05 * train\_batch\_size: 64 * eval\_batch\_size: 64 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * lr\_scheduler\_warmup\_ratio: 0.2 * num\_epochs: 5 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu121 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 64\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.2\n* num\\_epochs: 5", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-aubmindlab/bert-base-arabertv02-twitter #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 64\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.2\n* num\\_epochs: 5", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 68, 116, 4, 33 ]
[ "passage: TAGS\n#transformers #tensorboard #safetensors #bert #text-classification #generated_from_trainer #base_model-aubmindlab/bert-base-arabertv02-twitter #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 64\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.2\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu121\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.08724016696214676, 0.09985264390707016, -0.004035210236907005, 0.10187695920467377, 0.12054022401571274, 0.010918018408119678, 0.15931177139282227, 0.1451641321182251, -0.06599672883749008, 0.07374264299869537, 0.13771864771842957, 0.1183827817440033, 0.015180151909589767, 0.1534406840801239, -0.05757570639252663, -0.2709137499332428, 0.01122005470097065, 0.027814971283078194, -0.07180747389793396, 0.11688384413719177, 0.08067698776721954, -0.12943674623966217, 0.10362526029348373, -0.006916659884154797, -0.14879722893238068, 0.018368830904364586, 0.021081535145640373, -0.07391281425952911, 0.11921127885580063, 0.023432215675711632, 0.11927183717489243, 0.03896302729845047, 0.07635777443647385, -0.19521021842956543, 0.012735191732645035, 0.06404212862253189, -0.006031036842614412, 0.09036149829626083, 0.04119977355003357, -0.02915855124592781, 0.1136547401547432, -0.1058790534734726, 0.07532951235771179, 0.021940117701888084, -0.13483938574790955, -0.2164798080921173, -0.07238549739122391, 0.04561464488506317, 0.07715954631567001, 0.056146275252103806, -0.014590638689696789, 0.13855195045471191, -0.029245683923363686, 0.11579397320747375, 0.27387362718582153, -0.2991630434989929, -0.07132270932197571, 0.030493849888443947, 0.037167686969041824, 0.07846885174512863, -0.11525667458772659, 0.005113648250699043, 0.055854156613349915, 0.02241823449730873, 0.14136067032814026, -0.02649335190653801, -0.018904509022831917, 0.002296037506312132, -0.13219742476940155, -0.027247127145528793, 0.12129467725753784, 0.03241301327943802, -0.03540963679552078, -0.06441481411457062, -0.0797308161854744, -0.1705971509218216, -0.051479265093803406, -0.0331544503569603, 0.03990020602941513, -0.047317150980234146, -0.08440964668989182, -0.010909217409789562, -0.08677103370428085, -0.07127352803945541, -0.03742876648902893, 0.18453145027160645, 0.04176225885748863, 0.0017896050121635199, -0.031074151396751404, 0.09156937897205353, -0.03598781302571297, -0.16808930039405823, 0.004891620948910713, 0.010605080984532833, -0.00046018927241675556, -0.04290751367807388, -0.03448839113116264, -0.06518983840942383, 0.01674441620707512, 0.161915585398674, -0.07350020110607147, 0.08312223851680756, -0.0078010461293160915, 0.017386021092534065, -0.08431776612997055, 0.1629946529865265, -0.03302888572216034, -0.032213155180215836, -0.0014938736567273736, 0.09038611501455307, 0.028360221534967422, -0.025820337235927582, -0.09444931894540787, 0.024934988468885422, 0.11126479506492615, 0.025639774277806282, -0.06820044666528702, 0.09121229499578476, -0.03786219283938408, -0.016324544325470924, 0.016056420281529427, -0.10985414683818817, 0.04015865549445152, 0.007767539471387863, -0.06791196763515472, -0.05101164057850838, 0.02338126301765442, -0.004630571696907282, 0.00290357181802392, 0.1171773299574852, -0.09355482459068298, 0.008775983937084675, -0.07387622445821762, -0.12641526758670807, 0.015014726668596268, -0.09976083040237427, 0.004749531392008066, -0.09508311003446579, -0.13451389968395233, -0.010525249876081944, 0.04875839874148369, -0.044458746910095215, -0.005762345157563686, -0.05674036592245102, -0.08823026716709137, 0.04143159091472626, -0.008775808848440647, 0.06270961463451385, -0.06877381354570389, 0.09456054866313934, 0.044790927320718765, 0.09427542984485626, -0.012407888658344746, 0.03276299685239792, -0.09013192355632782, 0.040748197585344315, -0.22034139931201935, 0.04035276547074318, -0.07913664728403091, 0.05426106974482536, -0.09527859091758728, -0.08997005224227905, 0.0000870817166287452, 0.0075692348182201385, 0.07836291193962097, 0.1174217164516449, -0.1690383404493332, -0.08768154680728912, 0.18473286926746368, -0.10322688519954681, -0.11939767748117447, 0.11519588530063629, -0.0599975623190403, 0.04337160289287567, 0.06709395349025726, 0.1930616796016693, 0.07120057940483093, -0.12104541063308716, -0.01522861234843731, -0.03597152233123779, 0.051009658724069595, 0.021834954619407654, 0.051096949726343155, 0.021070726215839386, 0.029061660170555115, 0.014458803460001945, -0.006091222632676363, 0.01827048324048519, -0.09259083867073059, -0.08088038861751556, -0.03383341431617737, -0.08089158684015274, 0.07838553935289383, 0.05259976163506508, 0.06581801921129227, -0.12531964480876923, -0.10492946952581406, 0.05142192915081978, 0.07057306170463562, -0.07590635865926743, 0.025893280282616615, -0.0925917848944664, 0.07071303576231003, -0.038429368287324905, -0.010119722224771976, -0.16564632952213287, -0.05745251104235649, 0.030910823494195938, 0.009278318844735622, 0.024707287549972534, -0.0075689153745770454, 0.09937459975481033, 0.07382148504257202, -0.07088018208742142, -0.03302798047661781, -0.020455848425626755, 0.008723882026970387, -0.12284352630376816, -0.21121910214424133, -0.0159943625330925, -0.04442061111330986, 0.07789390534162521, -0.20529375970363617, 0.04790252074599266, 0.05895348638296127, 0.12262982130050659, 0.06296258419752121, -0.019553983584046364, -0.021281184628605843, 0.0526154600083828, -0.03897423297166824, -0.06209784746170044, 0.050122298300266266, -0.018399307504296303, -0.08459830284118652, -0.030828824266791344, -0.1553761065006256, 0.17608991265296936, 0.12625791132450104, -0.04233948886394501, -0.09289279580116272, -0.007674957625567913, -0.04986690729856491, -0.026803219690918922, -0.030304551124572754, 0.005328180734068155, 0.15039722621440887, -0.0018140089232474566, 0.14703218638896942, -0.0818759948015213, -0.04437442868947983, 0.04009748622775078, -0.03569546341896057, -0.005577057600021362, 0.11058568954467773, 0.056091953068971634, -0.10606266558170319, 0.13675856590270996, 0.15688124299049377, -0.06370510905981064, 0.16331733763217926, -0.03619593754410744, -0.05788590386509895, -0.030216384679079056, -0.009335952810943127, 0.015701934695243835, 0.1084175780415535, -0.11796572059392929, -0.019132262095808983, 0.006340503692626953, 0.015086759813129902, -0.0005949254264123738, -0.18643604218959808, -0.011626144871115685, 0.03927019238471985, -0.05383945629000664, 0.008823343552649021, 0.0018469808856025338, 0.008053756318986416, 0.11540348827838898, 0.00438331114128232, -0.06840761750936508, 0.014501050114631653, -0.0077599831856787205, -0.07187075167894363, 0.19835731387138367, -0.08019974827766418, -0.15381701290607452, -0.11403892189264297, -0.07792986184358597, -0.05783103033900261, 0.025711258873343468, 0.059220459312200546, -0.10236288607120514, -0.0350659117102623, -0.09874165803194046, 0.020433960482478142, 0.03163588047027588, 0.05938207358121872, 0.02123342640697956, -0.0010822342010214925, 0.07530565559864044, -0.1049017682671547, -0.018267149105668068, -0.04867924004793167, -0.05229106917977333, 0.04004168137907982, 0.027717960998415947, 0.10595039278268814, 0.11857333034276962, -0.03345327451825142, 0.024984490126371384, -0.0397644005715847, 0.2246486395597458, -0.07693257927894592, 0.00024346901045646518, 0.10448446124792099, -0.03134063258767128, 0.05653535574674606, 0.14264540374279022, 0.05583252012729645, -0.1001133918762207, 0.014345252886414528, 0.054390668869018555, -0.03849988430738449, -0.20104265213012695, -0.024568436667323112, -0.021266115829348564, 0.026478232815861702, 0.12212123721837997, 0.037410128861665726, 0.03701410070061684, 0.05873372033238411, 0.019175110384821892, 0.045451778918504715, -0.008213474415242672, 0.08643491566181183, 0.10291329771280289, 0.04068521037697792, 0.1309320628643036, -0.04157382994890213, -0.07019192725419998, 0.04112778604030609, -0.005098090972751379, 0.19419355690479279, 0.005457681603729725, 0.13315284252166748, 0.037682369351387024, 0.1329287737607956, 0.017419293522834778, 0.06857787072658539, -0.01948780007660389, -0.04256869852542877, -0.010107200592756271, -0.051444072276353836, -0.0362994410097599, 0.042199037969112396, -0.07351906597614288, 0.04631195217370987, -0.11938871443271637, 0.02493235096335411, 0.07195572555065155, 0.24224944412708282, 0.04042581841349602, -0.330058753490448, -0.09242995828390121, 0.020071011036634445, -0.051741234958171844, -0.02095845155417919, 0.032564692199230194, 0.14292891323566437, -0.06247994303703308, 0.07212978601455688, -0.07875457406044006, 0.07616636157035828, -0.049801360815763474, 0.04748167097568512, 0.05514487996697426, 0.08088715374469757, -0.02903066761791706, 0.049072764813899994, -0.2619873285293579, 0.2835053503513336, 0.0250740647315979, 0.06050568446516991, -0.058719452470541, 0.0032162864226847887, 0.03695320338010788, 0.07123307883739471, 0.08399553596973419, -0.030075842514634132, -0.1112734004855156, -0.19083955883979797, -0.07838058471679688, 0.01687420904636383, 0.1319732517004013, -0.04392275959253311, 0.12163305282592773, -0.022983765229582787, -0.014687711372971535, 0.061610426753759384, -0.05791735276579857, -0.07378093898296356, -0.079167939722538, 0.004493496380746365, 0.03275396302342415, -0.022145111113786697, -0.07213539630174637, -0.11099762469530106, -0.08416064828634262, 0.14838989078998566, -0.04684780165553093, -0.03473300114274025, -0.12248454242944717, 0.06528265029191971, 0.08087992668151855, -0.08894643932580948, 0.03575741499662399, 0.0066512818448245525, 0.10235098004341125, 0.017001384869217873, -0.06476016342639923, 0.12733405828475952, -0.06420847028493881, -0.19985127449035645, -0.06548573821783066, 0.13647818565368652, 0.02996017225086689, 0.05163928493857384, -0.012456381693482399, 0.032240934669971466, 0.006524727679789066, -0.07951628416776657, 0.04665680602192879, -0.019250085577368736, 0.04922119528055191, -0.00649562431499362, -0.058337386697530746, 0.0013160489033907652, -0.06299366801977158, 0.0016920791240409017, 0.1480015367269516, 0.28773078322410583, -0.09304822981357574, 0.04327953979372978, 0.05776836350560188, -0.061936650425195694, -0.2017754465341568, 0.02847111038863659, 0.049180470407009125, -0.0014874553307890892, 0.0340670607984066, -0.1712723970413208, 0.0831642672419548, 0.08114255219697952, -0.0145163144916296, 0.0938587412238121, -0.2668288052082062, -0.14316347241401672, 0.10726261883974075, 0.13943861424922943, 0.09709683060646057, -0.16253003478050232, -0.04135066270828247, -0.014856631867587566, -0.09311950951814651, 0.10773757100105286, -0.10945800691843033, 0.11433126032352448, -0.01773182302713394, 0.06943314522504807, 0.02191656455397606, -0.04996466636657715, 0.112519271671772, -0.0038978003431111574, 0.10867124050855637, -0.07202240079641342, -0.03738030046224594, 0.061288781464099884, -0.0695420578122139, 0.021018072962760925, -0.10652090609073639, 0.025929488241672516, -0.07788994163274765, -0.023145588114857674, -0.06652086973190308, 0.02912118472158909, -0.03836394101381302, -0.059330396354198456, -0.04736759513616562, 0.02668979950249195, 0.05175016075372696, -0.018610702827572823, 0.1731497347354889, -0.007123682182282209, 0.16865016520023346, 0.15921194851398468, 0.10204597562551498, -0.06984731554985046, -0.017408113926649094, 0.01931486651301384, -0.019183095544576645, 0.06489790230989456, -0.1686071902513504, 0.053117770701646805, 0.12637217342853546, 0.01317197922617197, 0.13577285408973694, 0.06771984696388245, -0.034073419868946075, 0.011061863042414188, 0.08008942008018494, -0.1870361864566803, -0.0934908390045166, -0.002962286351248622, -0.036586835980415344, -0.1187097579240799, 0.08051421493291855, 0.13431434333324432, -0.06278562545776367, 0.004021878819912672, -0.005330052692443132, 0.018223397433757782, -0.01655387505888939, 0.1893644779920578, 0.05828891694545746, 0.0561353899538517, -0.08597836643457413, 0.07130499184131622, 0.03097194992005825, -0.09339822083711624, 0.04132938012480736, 0.09916507452726364, -0.0824752151966095, -0.032363396137952805, 0.034453678876161575, 0.18742287158966064, -0.01624279096722603, -0.03117763251066208, -0.16148661077022552, -0.10193739831447601, 0.06244523450732231, 0.23372520506381989, 0.07975977659225464, 0.010034938342869282, -0.03360877186059952, 0.03279786556959152, -0.11190573871135712, 0.12108440697193146, 0.04516275227069855, 0.08295721560716629, -0.1442468911409378, 0.13207311928272247, -0.012840489856898785, 0.0161477979272604, -0.036630187183618546, 0.0285800714045763, -0.12662795186042786, -0.007863984443247318, -0.10450410842895508, -0.011151927523314953, -0.03497109189629555, 0.0008233650005422533, 0.000564842892345041, -0.06834853440523148, -0.06984039396047592, 0.003621139796450734, -0.10274715721607208, -0.013516019098460674, 0.023280074819922447, 0.04858981445431709, -0.13694126904010773, -0.03057423233985901, 0.036824844777584076, -0.07927154749631882, 0.08656852692365646, 0.0313253328204155, 0.005689514800906181, 0.0488504022359848, -0.1315540373325348, 0.019143665209412575, 0.0597628578543663, -0.01277932059019804, 0.05036802589893341, -0.09325101226568222, -0.006316993851214647, -0.028754189610481262, 0.05234584957361221, 0.027980268001556396, 0.09558147937059402, -0.11730287969112396, 0.034734390676021576, -0.011391782201826572, -0.07167799025774002, -0.0575731135904789, 0.03532366827130318, 0.08713226020336151, -0.00801214948296547, 0.17312775552272797, -0.09997951239347458, 0.031834136694669724, -0.20668183267116547, 0.0037030931562185287, 0.017519351094961166, -0.13125312328338623, -0.07608921825885773, -0.04999975860118866, 0.0668855533003807, -0.06992777436971664, 0.12810352444648743, 0.016071295365691185, 0.01236896961927414, 0.04902438074350357, -0.06799431145191193, -0.00729187810793519, 0.03242575749754906, 0.16659225523471832, 0.03407301381230354, -0.05287221819162369, 0.03711399808526039, 0.024645915254950523, 0.09553410857915878, 0.08326046913862228, 0.2029498964548111, 0.14113378524780273, -0.01968841440975666, 0.09188123792409897, 0.04780744016170502, -0.04409433528780937, -0.15521110594272614, 0.06508581340312958, -0.053592193871736526, 0.10474700480699539, -0.020317716524004936, 0.17907127737998962, 0.11588902026414871, -0.15248464047908783, 0.03186645731329918, -0.0384163074195385, -0.08256997168064117, -0.12039480358362198, -0.053046345710754395, -0.1042327731847763, -0.15477079153060913, 0.011176475323736668, -0.12064880132675171, 0.03536617010831833, 0.053438033908605576, 0.022454606369137764, 0.004663430154323578, 0.19092997908592224, -0.0042325230315327644, 0.03230004757642746, 0.06997884064912796, 0.020320666953921318, -0.039144814014434814, -0.06462088972330093, -0.0802987590432167, -0.015059860423207283, -0.007201324217021465, 0.008181807585060596, -0.03485943377017975, -0.03428720682859421, 0.029637858271598816, -0.017868123948574066, -0.11312444508075714, 0.010727505199611187, 0.022941991686820984, 0.05144495517015457, 0.0308152437210083, 0.007457072846591473, -0.002412867033854127, -0.01394076831638813, 0.209219828248024, -0.07820244133472443, -0.0395711287856102, -0.11262252926826477, 0.22309452295303345, 0.02432415261864662, 0.016887547448277473, 0.003452301723882556, -0.0774257481098175, -0.00002586333721410483, 0.20635969936847687, 0.18711057305335999, -0.05001220107078552, 0.017471186816692352, -0.020448435097932816, -0.0019285689340904355, -0.002566790208220482, 0.08516004681587219, 0.09447541832923889, 0.039293065667152405, -0.06570584326982498, -0.05110470950603485, -0.020961765199899673, -0.023525232449173927, -0.047140199691057205, 0.0715167373418808, 0.029308699071407318, 0.019353775307536125, -0.060042645782232285, 0.04810298979282379, -0.03781358525156975, -0.1296035349369049, 0.06044362485408783, -0.2298315465450287, -0.14486350119113922, -0.002519697416573763, 0.09425294399261475, 0.000005229372163739754, 0.06339354813098907, -0.004235243424773216, -0.027990687638521194, 0.06261100620031357, -0.010620499961078167, -0.06743746995925903, -0.08280281722545624, 0.07541247457265854, -0.12714122235774994, 0.22154539823532104, -0.0392162948846817, 0.028515079990029335, 0.12585346400737762, 0.01918257214128971, -0.07889106869697571, 0.05451766401529312, 0.06253304332494736, -0.09628497064113617, -0.005146507173776627, 0.13869433104991913, -0.03698934614658356, 0.11343719810247421, 0.05821717157959938, -0.1330491155385971, 0.001165124587714672, -0.061617616564035416, -0.07832503318786621, -0.04318208619952202, -0.02640756592154503, -0.04894717037677765, 0.13275942206382751, 0.21272654831409454, -0.03051471896469593, 0.010190516710281372, -0.052986133843660355, 0.02272227220237255, 0.062478385865688324, 0.01728334091603756, -0.04223453253507614, -0.26292088627815247, 0.01806323230266571, 0.09603854268789291, -0.010241040959954262, -0.26993557810783386, -0.09750477969646454, 0.010367653332650661, -0.022263266146183014, -0.09331206977367401, 0.09240594506263733, 0.09381617605686188, 0.05385831370949745, -0.051591139286756516, -0.09729532152414322, -0.04885233938694, 0.16659697890281677, -0.15551719069480896, -0.08224430680274963 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # deberta-v3-large-survey-new_fact_main_passage-rater This model is a fine-tuned version of [microsoft/deberta-v3-large](https://huggingface.co/microsoft/deberta-v3-large) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.2742 - Krippendorff: 0.9302 - Spearman: 0.9541 - Absolute Agreement: 0.9183 - Agreement Within One: 0.9837 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 6e-06 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 1000 - num_epochs: 20 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Krippendorff | Spearman | Absolute Agreement | Agreement Within One | |:-------------:|:-----:|:----:|:---------------:|:------------:|:--------:|:------------------:|:--------------------:| | No log | 1.0 | 50 | 2.0683 | -0.3510 | nan | 0.0972 | 1.0 | | No log | 2.0 | 100 | 2.0617 | -0.3510 | nan | 0.0972 | 1.0 | | No log | 3.0 | 150 | 2.0480 | -0.3510 | nan | 0.0972 | 1.0 | | No log | 4.0 | 200 | 1.9377 | -0.5105 | nan | 0.2222 | 1.0 | | No log | 5.0 | 250 | 2.0281 | -0.5105 | nan | 0.2222 | 1.0 | | No log | 6.0 | 300 | 2.1102 | -0.5105 | nan | 0.2222 | 1.0 | | No log | 7.0 | 350 | 2.1711 | -0.1354 | -0.0833 | 0.2361 | 0.7639 | | No log | 8.0 | 400 | 2.2375 | 0.0597 | 0.1749 | 0.2917 | 0.9167 | | No log | 9.0 | 450 | 2.2094 | 0.2618 | 0.2157 | 0.3194 | 0.8611 | | 1.4101 | 10.0 | 500 | 2.2945 | 0.3359 | 0.3103 | 0.3611 | 0.8611 | | 1.4101 | 11.0 | 550 | 2.0979 | 0.4477 | 0.3534 | 0.3611 | 0.8333 | | 1.4101 | 12.0 | 600 | 2.0983 | 0.5901 | 0.5467 | 0.3472 | 0.8333 | | 1.4101 | 13.0 | 650 | 2.4303 | 0.3729 | 0.2982 | 0.375 | 0.9167 | | 1.4101 | 14.0 | 700 | 2.2451 | 0.7466 | 0.6756 | 0.4722 | 0.8611 | | 1.4101 | 15.0 | 750 | 2.5756 | 0.5188 | 0.4433 | 0.4167 | 0.8889 | | 1.4101 | 16.0 | 800 | 2.9836 | 0.4981 | 0.3828 | 0.3889 | 0.8889 | | 1.4101 | 17.0 | 850 | 2.4424 | 0.8183 | 0.7631 | 0.4444 | 0.8889 | | 1.4101 | 18.0 | 900 | 2.8010 | 0.7666 | 0.7242 | 0.4861 | 0.8611 | | 1.4101 | 19.0 | 950 | 3.0376 | 0.6291 | 0.5873 | 0.4861 | 0.8333 | | 0.4737 | 20.0 | 1000 | 3.3937 | 0.5765 | 0.5255 | 0.4306 | 0.9167 | ### Framework versions - Transformers 4.26.0 - Pytorch 1.13.1 - Datasets 2.10.1 - Tokenizers 0.12.1
{"license": "mit", "tags": ["generated_from_trainer"], "model-index": [{"name": "deberta-v3-large-survey-new_fact_main_passage-rater", "results": []}]}
text-classification
domenicrosati/deberta-v3-large-survey-new_fact_main_passage-rater
[ "transformers", "pytorch", "tensorboard", "deberta-v2", "text-classification", "generated_from_trainer", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T18:37:29+00:00
[]
[]
TAGS #transformers #pytorch #tensorboard #deberta-v2 #text-classification #generated_from_trainer #license-mit #autotrain_compatible #endpoints_compatible #region-us
deberta-v3-large-survey-new\_fact\_main\_passage-rater ====================================================== This model is a fine-tuned version of microsoft/deberta-v3-large on the None dataset. It achieves the following results on the evaluation set: * Loss: 0.2742 * Krippendorff: 0.9302 * Spearman: 0.9541 * Absolute Agreement: 0.9183 * Agreement Within One: 0.9837 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 6e-06 * train\_batch\_size: 16 * eval\_batch\_size: 16 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * lr\_scheduler\_warmup\_steps: 1000 * num\_epochs: 20 * mixed\_precision\_training: Native AMP ### Training results ### Framework versions * Transformers 4.26.0 * Pytorch 1.13.1 * Datasets 2.10.1 * Tokenizers 0.12.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-06\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 20\n* mixed\\_precision\\_training: Native AMP", "### Training results", "### Framework versions\n\n\n* Transformers 4.26.0\n* Pytorch 1.13.1\n* Datasets 2.10.1\n* Tokenizers 0.12.1" ]
[ "TAGS\n#transformers #pytorch #tensorboard #deberta-v2 #text-classification #generated_from_trainer #license-mit #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-06\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 20\n* mixed\\_precision\\_training: Native AMP", "### Training results", "### Framework versions\n\n\n* Transformers 4.26.0\n* Pytorch 1.13.1\n* Datasets 2.10.1\n* Tokenizers 0.12.1" ]
[ 57, 131, 4, 32 ]
[ "passage: TAGS\n#transformers #pytorch #tensorboard #deberta-v2 #text-classification #generated_from_trainer #license-mit #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 6e-06\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 20\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.26.0\n* Pytorch 1.13.1\n* Datasets 2.10.1\n* Tokenizers 0.12.1" ]
[ -0.09419464319944382, 0.0819736123085022, -0.00423989724367857, 0.08212623745203018, 0.1398732215166092, 0.008019800297915936, 0.12661798298358917, 0.14956742525100708, -0.10706411302089691, 0.04653304070234299, 0.12092546373605728, 0.17130465805530548, 0.031082550063729286, 0.15872608125209808, -0.055921733379364014, -0.31198784708976746, 0.024350058287382126, 0.053720828145742416, -0.06877810508012772, 0.1287686824798584, 0.09489783644676208, -0.13375157117843628, 0.06675601750612259, 0.02458246238529682, -0.17789240181446075, -0.0021490692161023617, -0.009719875641167164, -0.08010583370923996, 0.13443726301193237, 0.012222287245094776, 0.10320509225130081, 0.048764996230602264, 0.07189678400754929, -0.1833876222372055, 0.010056683793663979, 0.048082318156957626, 0.01282890047878027, 0.08673860132694244, 0.06372833997011185, -0.026021502912044525, 0.1435668170452118, -0.08649370074272156, 0.09412052482366562, 0.021685238927602768, -0.13128161430358887, -0.29380613565444946, -0.08669155836105347, 0.03699199855327606, 0.07994949817657471, 0.07470887154340744, -0.0015686957631260157, 0.13819755613803864, -0.07333420217037201, 0.11013995856046677, 0.26747941970825195, -0.2894618809223175, -0.05309412628412247, -0.0003888496139552444, 0.04150163754820824, 0.061628688126802444, -0.10712930560112, -0.03775104507803917, 0.018794283270835876, 0.045213326811790466, 0.15424491465091705, -0.013775722123682499, -0.038956426084041595, -0.0040529510006308556, -0.14485475420951843, -0.06848371028900146, 0.09718042612075806, 0.007610689848661423, -0.032423537224531174, -0.0819580927491188, -0.06260613352060318, -0.19216090440750122, -0.0572357103228569, -0.02292666770517826, 0.0447293259203434, -0.05057917907834053, -0.07233956456184387, 0.004373213276267052, -0.07687264680862427, -0.0670054703950882, -0.03380461409687996, 0.1930190771818161, 0.0672653466463089, 0.0015981359174475074, -0.04130242019891739, 0.10266947746276855, 0.0025191802997142076, -0.15078836679458618, -0.012128735892474651, 0.019034387543797493, -0.01438054908066988, -0.0410798043012619, -0.043298326432704926, -0.06125155836343765, -0.004188921302556992, 0.15827572345733643, -0.12228973954916, 0.08383642137050629, 0.009481864981353283, 0.028370101004838943, -0.08031776547431946, 0.17917805910110474, -0.01893613487482071, 0.04495372250676155, -0.01575166918337345, 0.05359259992837906, 0.005456769838929176, -0.02432744950056076, -0.09712094068527222, 0.021857162937521935, 0.12325767427682877, 0.04282861575484276, -0.06606990844011307, 0.07036103308200836, -0.04052754491567612, -0.025235362350940704, 0.037199877202510834, -0.11510772258043289, 0.04012181609869003, 0.01029246300458908, -0.08643949776887894, -0.03272420912981033, 0.012408765032887459, -0.011960127390921116, -0.03986349329352379, 0.1313590109348297, -0.07633912563323975, 0.0252820011228323, -0.08413664251565933, -0.14113514125347137, 0.023864267393946648, -0.1174793392419815, 0.014128368347883224, -0.08673271536827087, -0.12264389544725418, -0.017497163265943527, 0.05538437142968178, -0.0419563427567482, -0.03866945207118988, -0.05380064249038696, -0.08043470978736877, 0.03669698163866997, -0.02160426415503025, 0.09255362302064896, -0.06867823749780655, 0.10589733719825745, 0.032764922827482224, 0.08378405123949051, 0.0028864583000540733, 0.06134150177240372, -0.0844656378030777, 0.033049583435058594, -0.2287781536579132, 0.06350985914468765, -0.0749966949224472, 0.06565504521131516, -0.09947898983955383, -0.12811416387557983, 0.03359118103981018, -0.007485142908990383, 0.0934532880783081, 0.1025753915309906, -0.14841291308403015, -0.08413536846637726, 0.20558011531829834, -0.09427174925804138, -0.0935608297586441, 0.1061195358633995, -0.0557774119079113, 0.020182518288493156, 0.05734604597091675, 0.21488331258296967, 0.09224182367324829, -0.10079739987850189, 0.02101164124906063, -0.036955464631319046, 0.035658445209264755, -0.03304548189043999, 0.05589647591114044, 0.014409828931093216, 0.06323589384555817, 0.017437122762203217, -0.0015360860852524638, 0.03830280527472496, -0.10360971093177795, -0.07600720971822739, -0.02989354357123375, -0.06081405654549599, 0.05544432997703552, 0.059372443705797195, 0.06976387649774551, -0.1195870190858841, -0.1002625972032547, 0.08713510632514954, 0.0762086734175682, -0.0812746211886406, 0.04742865264415741, -0.09154057502746582, 0.06577236950397491, 0.0036810620222240686, -0.003036171430721879, -0.1979525238275528, -0.012146384455263615, 0.025234928354620934, -0.026078827679157257, 0.01588757336139679, -0.014487155713140965, 0.079795703291893, 0.06345736980438232, -0.04129130393266678, -0.035959646105766296, -0.03901845961809158, 0.0028716623783111572, -0.10895438492298126, -0.20809806883335114, -0.04452749341726303, -0.03914792463183403, 0.07072172313928604, -0.170883446931839, 0.04907219856977463, 0.06434812396764755, 0.09936002641916275, 0.029377032071352005, -0.0188999492675066, -0.021033111959695816, 0.08075139671564102, -0.029084375128149986, -0.06414471566677094, 0.06660670787096024, 0.015044569037854671, -0.07434812933206558, 0.006375274155288935, -0.1390027105808258, 0.15134938061237335, 0.12609612941741943, -0.020370926707983017, -0.08846738189458847, -0.024996917694807053, -0.06655535846948624, -0.030191030353307724, -0.014863299205899239, 0.05020071566104889, 0.17757827043533325, 0.008115002885460854, 0.15373507142066956, -0.08291017264127731, -0.05603213980793953, 0.04909678176045418, -0.024330733343958855, 0.007989253848791122, 0.12787343561649323, 0.051596302539110184, -0.08893886208534241, 0.13189184665679932, 0.11010733246803284, -0.04980257898569107, 0.14379407465457916, -0.0597384050488472, -0.0665033757686615, -0.023739084601402283, -0.003934646490961313, 0.018693380057811737, 0.09434114396572113, -0.13340038061141968, -0.017684485763311386, 0.02264661341905594, 0.036911532282829285, 0.016089286655187607, -0.21040266752243042, -0.00781194306910038, 0.04607353359460831, -0.05728450417518616, -0.04027640074491501, -0.004569774027913809, 0.026712164282798767, 0.10564262419939041, 0.01901639625430107, -0.06838290393352509, 0.016358356922864914, 0.012642936781048775, -0.06919639557600021, 0.20731565356254578, -0.10302675515413284, -0.16907523572444916, -0.10146433115005493, -0.08026600629091263, -0.02995872311294079, -0.0011454641353338957, 0.08274228870868683, -0.09223975241184235, -0.03177972882986069, -0.06092032045125961, 0.006279957015067339, -0.02334928885102272, 0.03388430178165436, 0.013785602524876595, 0.0034243506379425526, 0.047682035714387894, -0.11702077835798264, -0.021972067654132843, -0.04355349764227867, -0.022576894611120224, 0.07729174196720123, 0.03031749464571476, 0.09555792808532715, 0.145236536860466, -0.04230457544326782, 0.039948493242263794, -0.04610773175954819, 0.1957326978445053, -0.0764644593000412, -0.028765849769115448, 0.11746636033058167, -0.006768878549337387, 0.0774630606174469, 0.11275175958871841, 0.05602376535534859, -0.09456323832273483, -0.0016715592937543988, 0.02385002188384533, -0.04774283990263939, -0.22671815752983093, -0.03334212675690651, -0.04571757838129997, 0.005098190624266863, 0.11704468727111816, 0.03643316775560379, 0.02303323894739151, 0.04338105767965317, 0.02107132412493229, 0.014419912360608578, 0.00441396189853549, 0.0964583307504654, 0.1316526085138321, 0.04371172562241554, 0.1319473683834076, -0.042509738355875015, -0.052283287048339844, 0.035164736211299896, -0.009073491208255291, 0.23278510570526123, -0.0016151407035067677, 0.14715997874736786, 0.059198591858148575, 0.139724463224411, 0.0240663830190897, 0.08249390125274658, -0.0019108165288344026, -0.02550296112895012, -0.0016668447060510516, -0.03497797250747681, -0.03836554288864136, 0.01589539460837841, -0.03545466810464859, 0.007951823063194752, -0.14245547354221344, 0.004125777631998062, 0.045798059552907944, 0.29470524191856384, 0.021044718101620674, -0.3375057578086853, -0.10593510419130325, -0.009130638092756271, -0.059909652918577194, -0.04288740083575249, 0.01999026909470558, 0.08213430643081665, -0.0918789654970169, 0.06549283862113953, -0.07591614127159119, 0.09826255589723587, -0.06153746321797371, 0.033904947340488434, 0.05042460560798645, 0.084871307015419, -0.004134688526391983, 0.06380287557840347, -0.3066827952861786, 0.27759528160095215, 0.0047368635423481464, 0.06357186287641525, -0.07319333404302597, 0.009251268580555916, 0.016718439757823944, 0.05037641152739525, 0.06633692979812622, -0.01627441868185997, -0.111365407705307, -0.18924397230148315, -0.07224659621715546, 0.006428789347410202, 0.1162153035402298, -0.0038921511732041836, 0.11322586983442307, -0.010379395447671413, 0.006730878259986639, 0.05239660665392876, -0.08119940012693405, -0.04228007420897484, -0.09600329399108887, 0.017315365374088287, -0.003765558358281851, 0.0021220091730356216, -0.06942609697580338, -0.11646154522895813, -0.05765622854232788, 0.16603252291679382, -0.013738072477281094, -0.0629560574889183, -0.1346374899148941, 0.049481913447380066, 0.0848984345793724, -0.08954393863677979, 0.04985355958342552, 0.0024460754357278347, 0.1060015857219696, -0.004714943002909422, -0.07005683332681656, 0.12512406706809998, -0.05383812636137009, -0.18366581201553345, -0.03174223005771637, 0.1253911703824997, 0.04850617051124573, 0.058132462203502655, -0.01064242236316204, 0.03215356543660164, -0.01425542775541544, -0.08650555461645126, 0.040354352444410324, -0.013282906264066696, 0.07638271898031235, -0.02294299006462097, -0.04296496883034706, 0.044039372354745865, -0.06708431243896484, -0.014150023460388184, 0.18216030299663544, 0.26163777709007263, -0.1025676280260086, 0.08173847943544388, 0.040875520557165146, -0.05672413483262062, -0.1845170110464096, 0.012537422589957714, 0.07483144849538803, 0.0008468743762932718, 0.009566890075802803, -0.21234670281410217, 0.028307007625699043, 0.08977304399013519, -0.011964734643697739, 0.08276262879371643, -0.3246544897556305, -0.13204096257686615, 0.11918896436691284, 0.13371741771697998, 0.06525561213493347, -0.15082082152366638, -0.028478873893618584, -0.0026575797237455845, -0.12577655911445618, 0.11829032003879547, -0.0583949014544487, 0.12740840017795563, -0.045168936252593994, 0.08542802929878235, 0.017668185755610466, -0.05772764980792999, 0.11205779761075974, 0.009286809712648392, 0.09097635000944138, -0.05982092767953873, 0.0017656913259997964, 0.07820133119821548, -0.06843370944261551, 0.03741038590669632, -0.0716741681098938, 0.029719645157456398, -0.11513064056634903, -0.026067664846777916, -0.08457544445991516, 0.019103221595287323, -0.03681714087724686, -0.04800323769450188, -0.04409157857298851, 0.02874828316271305, 0.06839022785425186, -0.02661016397178173, 0.16960053145885468, 0.0022112412843853235, 0.15499408543109894, 0.1501026451587677, 0.08562449365854263, -0.08974149823188782, -0.05906108021736145, 0.0033411732874810696, -0.007242257706820965, 0.054625656455755234, -0.14475272595882416, 0.040334105491638184, 0.15559783577919006, 0.025204144418239594, 0.11911424994468689, 0.08266627788543701, -0.04708822816610336, 0.016611596569418907, 0.05556485429406166, -0.15874452888965607, -0.11218244582414627, 0.009680463001132011, -0.0040521943010389805, -0.09252060204744339, 0.06297644227743149, 0.09637799113988876, -0.06293985247612, -0.020361218601465225, 0.0037170113064348698, 0.014417451806366444, -0.023048290982842445, 0.20053978264331818, 0.0337715819478035, 0.07612157613039017, -0.11018671840429306, 0.07524256408214569, 0.052518196403980255, -0.12442081421613693, 0.038657691329717636, 0.12038193643093109, -0.0944322794675827, -0.02761920355260372, 0.06395863741636276, 0.13311462104320526, -0.0483776330947876, -0.04493594542145729, -0.15749825537204742, -0.14616255462169647, 0.10942655056715012, 0.18573836982250214, 0.07723630219697952, 0.02003859169781208, -0.04487238824367523, 0.025699056684970856, -0.1342277079820633, 0.08864326030015945, 0.0520547479391098, 0.0718122273683548, -0.12654046714305878, 0.17064082622528076, 0.005992338061332703, 0.05073780566453934, -0.017648521810770035, 0.0003355495282448828, -0.1232232004404068, 0.021179579198360443, -0.11705518513917923, -0.016070973128080368, -0.05304167419672012, 0.0048018693923950195, -0.015088269487023354, -0.03471530228853226, -0.044309794902801514, 0.01771889068186283, -0.12343423068523407, -0.01912246271967888, 0.003635842353105545, 0.03602541610598564, -0.12708845734596252, -0.03180000185966492, 0.013406068086624146, -0.0915270745754242, 0.08114736527204514, 0.05780087783932686, -0.0036199192982167006, 0.036517154425382614, -0.03950130566954613, -0.006301394198089838, 0.07325836271047592, -0.010425684973597527, 0.06269419193267822, -0.13102680444717407, -0.008875934407114983, -0.005387189798057079, 0.02207041159272194, 0.024446027353405952, 0.08334492892026901, -0.1339753270149231, 0.018014810979366302, -0.024852316826581955, -0.07990346848964691, -0.06734876334667206, 0.057374563068151474, 0.08454559743404388, 0.01715502329170704, 0.16742974519729614, -0.08672349154949188, 0.04593971371650696, -0.19565865397453308, -0.015943311154842377, 0.003870150074362755, -0.12710106372833252, -0.0456443652510643, -0.04857485741376877, 0.07330265641212463, -0.061374910175800323, 0.12873660027980804, 0.019589349627494812, 0.023535503074526787, 0.04585005342960358, -0.06735736131668091, -0.030782287940382957, 0.0297271516174078, 0.16893264651298523, 0.0243141520768404, -0.04886658862233162, 0.08423744142055511, 0.0403694286942482, 0.07122969627380371, 0.10968593508005142, 0.2187725454568863, 0.1452215015888214, 0.027434376999735832, 0.0811358243227005, 0.04066653922200203, -0.0529509112238884, -0.19289365410804749, 0.06518189609050751, -0.030015118420124054, 0.12026533484458923, -0.01348662469536066, 0.1984534114599228, 0.11698078364133835, -0.16852520406246185, 0.07420681416988373, -0.026498641818761826, -0.09600228816270828, -0.11890269815921783, -0.048836540430784225, -0.08253803849220276, -0.162637859582901, 0.0035237616393715143, -0.11341224610805511, 0.043593812733888626, 0.05731138214468956, 0.024914294481277466, 0.003940190188586712, 0.13802441954612732, 0.04253585636615753, 0.015300920233130455, 0.06803828477859497, 0.007569822017103434, -0.028351830318570137, -0.06930971890687943, -0.07375078648328781, 0.007235805504024029, -0.016496989876031876, 0.038180120289325714, -0.02713627740740776, -0.05191713199019432, 0.03908004239201546, -0.027194533497095108, -0.09455137699842453, 0.022350313141942024, 0.03501129150390625, 0.06994054466485977, 0.044165197759866714, 0.009093886241316795, -0.0206651259213686, -0.01850605383515358, 0.20483189821243286, -0.06944611668586731, -0.07566920667886734, -0.09484390169382095, 0.2967168986797333, 0.06427347660064697, -0.014068939723074436, 0.04982125014066696, -0.05585774779319763, -0.0256134532392025, 0.18197041749954224, 0.17787179350852966, -0.00927735771983862, 0.005382772069424391, -0.009677265770733356, -0.008095037192106247, 0.004137204959988594, 0.11177486926317215, 0.13151110708713531, 0.05663874000310898, -0.08099613338708878, -0.04498795419931412, -0.05960837006568909, -0.024128589779138565, -0.05684290826320648, 0.08451787382364273, 0.04260757565498352, -0.003517278004437685, -0.041347749531269073, 0.05674666166305542, -0.06449434161186218, -0.08726052194833755, 0.05320600047707558, -0.2034069001674652, -0.15267980098724365, -0.01357112918049097, 0.06459863483905792, -0.009118075482547283, 0.06418563425540924, 0.004413359798491001, -0.016912125051021576, 0.08219368010759354, -0.0019432419212535024, -0.07727236300706863, -0.07816065847873688, 0.1020158901810646, -0.1238197386264801, 0.16995742917060852, -0.04491932690143585, 0.042402785271406174, 0.12629717588424683, 0.0663362368941307, -0.08318391442298889, 0.053064167499542236, 0.048111047595739365, -0.08135038614273071, 0.0106421560049057, 0.12649546563625336, -0.0353548489511013, 0.04592796042561531, 0.03664318844676018, -0.1511000692844391, 0.004296928644180298, -0.10095901787281036, -0.054583869874477386, -0.028860945254564285, -0.03173341602087021, -0.03182931989431381, 0.10529130697250366, 0.21853002905845642, -0.029718143865466118, 0.009982085786759853, -0.07371751219034195, 0.012880041263997555, 0.05854791775345802, 0.007232749834656715, -0.059531889855861664, -0.26326876878738403, 0.00815677922219038, 0.08411446958780289, -0.00777079351246357, -0.24065285921096802, -0.09760981053113937, 0.01160136703401804, -0.05385652557015419, -0.09389161318540573, 0.09647517651319504, 0.049185093492269516, 0.05325184389948845, -0.0621318481862545, -0.06955966353416443, -0.06943648308515549, 0.18245048820972443, -0.16688789427280426, -0.05814714357256889 ]
null
null
peft
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed] ## Training procedure The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: bfloat16 ### Framework versions - PEFT 0.6.2.dev0
{"library_name": "peft", "base_model": "Deci/DeciCoder-1b"}
null
CShorten/decicoder-50m-updated-schemaSplit-10k-steps
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:Deci/DeciCoder-1b", "region:us" ]
2023-11-12T18:37:32+00:00
[ "1910.09700" ]
[]
TAGS #peft #safetensors #arxiv-1910.09700 #base_model-Deci/DeciCoder-1b #region-us
# Model Card for Model ID ## Model Details ### Model Description - Developed by: - Shared by [optional]: - Model type: - Language(s) (NLP): - License: - Finetuned from model [optional]: ### Model Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Downstream Use [optional] ### Out-of-Scope Use ## Bias, Risks, and Limitations ### Recommendations Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. ## Training Details ### Training Data ### Training Procedure #### Preprocessing [optional] #### Training Hyperparameters - Training regime: #### Speeds, Sizes, Times [optional] ## Evaluation ### Testing Data, Factors & Metrics #### Testing Data #### Factors #### Metrics ### Results #### Summary ## Model Examination [optional] ## Environmental Impact Carbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019). - Hardware Type: - Hours used: - Cloud Provider: - Compute Region: - Carbon Emitted: ## Technical Specifications [optional] ### Model Architecture and Objective ### Compute Infrastructure #### Hardware #### Software [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Model Card Authors [optional] ## Model Card Contact ## Training procedure The following 'bitsandbytes' quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: False - load_in_4bit: True - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: nf4 - bnb_4bit_use_double_quant: True - bnb_4bit_compute_dtype: bfloat16 ### Framework versions - PEFT 0.6.2.dev0
[ "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: bfloat16", "### Framework versions\n\n\n- PEFT 0.6.2.dev0" ]
[ "TAGS\n#peft #safetensors #arxiv-1910.09700 #base_model-Deci/DeciCoder-1b #region-us \n", "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: False\n- load_in_4bit: True\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: nf4\n- bnb_4bit_use_double_quant: True\n- bnb_4bit_compute_dtype: bfloat16", "### Framework versions\n\n\n- PEFT 0.6.2.dev0" ]
[ 37, 6, 3, 45, 28, 3, 4, 9, 9, 10, 42, 20, 3, 4, 5, 9, 11, 13, 3, 12, 5, 4, 5, 3, 4, 9, 53, 9, 8, 6, 3, 14, 8, 7, 9, 4, 164, 14 ]
[ "passage: TAGS\n#peft #safetensors #arxiv-1910.09700 #base_model-Deci/DeciCoder-1b #region-us \n# Model Card for Model ID## Model Details### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Downstream Use [optional]### Out-of-Scope Use## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.## How to Get Started with the Model\n\nUse the code below to get started with the model.## Training Details### Training Data### Training Procedure#### Preprocessing [optional]#### Training Hyperparameters\n\n- Training regime:#### Speeds, Sizes, Times [optional]## Evaluation### Testing Data, Factors & Metrics#### Testing Data#### Factors#### Metrics### Results#### Summary## Model Examination [optional]## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:## Technical Specifications [optional]### Model Architecture and Objective### Compute Infrastructure#### Hardware#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Model Card Authors [optional]## Model Card Contact" ]
[ -0.0890924334526062, 0.20476669073104858, -0.004046343732625246, 0.029457159340381622, 0.08862961083650589, 0.025224218145012856, 0.06072753295302391, 0.10459984838962555, -0.06071311607956886, 0.10535138100385666, 0.05466222018003464, 0.08295578509569168, 0.10219942033290863, 0.1922721117734909, 0.004247473552823067, -0.2059345841407776, 0.0236305370926857, -0.10060594975948334, 0.022236356511712074, 0.12546713650226593, 0.15162712335586548, -0.10300696641206741, 0.08461058884859085, -0.02424236573278904, -0.01472675334662199, -0.023711754009127617, -0.0746430829167366, -0.056701499968767166, 0.04649109020829201, 0.07337809354066849, 0.06253267079591751, 0.005074060522019863, 0.08755075186491013, -0.26502060890197754, 0.015299898572266102, 0.04532882198691368, -0.014837964437901974, 0.08024284243583679, 0.09043736010789871, -0.0677550658583641, 0.09850171208381653, -0.045636072754859924, 0.12938959896564484, 0.06405342370271683, -0.07891079038381577, -0.1423415094614029, -0.0833515003323555, 0.07189059257507324, 0.15813706815242767, 0.07457779347896576, -0.041159532964229584, 0.15438677370548248, -0.13591767847537994, 0.011356626637279987, 0.02943914197385311, -0.0496361218392849, -0.08219189196825027, 0.048414114862680435, 0.09161726385354996, 0.06900935620069504, -0.1354367434978485, -0.03741276636719704, 0.037218235433101654, 0.028348352760076523, 0.07456894218921661, 0.025455543771386147, 0.1501486450433731, 0.04003177583217621, -0.13662020862102509, -0.0398927703499794, 0.15882138907909393, 0.047497257590293884, -0.05502253398299217, -0.20981371402740479, 0.007618664763867855, -0.0734950378537178, -0.02237190492451191, -0.034633517265319824, 0.03692162036895752, -0.01952948421239853, 0.05855882540345192, 0.01589939557015896, -0.09432762861251831, -0.04619612544775009, 0.07744620740413666, 0.03836573660373688, 0.025209687650203705, -0.0341811366379261, -0.0007460378110408783, 0.13022319972515106, 0.06480289250612259, -0.11986435949802399, -0.06428422778844833, -0.05709142982959747, -0.06096070259809494, -0.06895843148231506, 0.02695060707628727, 0.02633029967546463, 0.06348229199647903, 0.22324898838996887, 0.01009597722440958, 0.04575646296143532, 0.06235693022608757, 0.011047426611185074, 0.07351697236299515, 0.09667633473873138, -0.07803270220756531, -0.13410383462905884, -0.03704071417450905, 0.08871711790561676, -0.001428314484655857, -0.018310250714421272, -0.032383304089307785, 0.05257105827331543, 0.046262722462415695, 0.10080932825803757, 0.06879023462533951, -0.006164874881505966, -0.09632301330566406, -0.03717425465583801, 0.22807742655277252, -0.1472799926996231, 0.03218688443303108, 0.012025685980916023, -0.0311351977288723, -0.03647492825984955, 0.012345989234745502, 0.022025518119335175, -0.016035152599215508, 0.10485262423753738, -0.07634498178958893, -0.0333777591586113, -0.11545944958925247, -0.013090860098600388, 0.033208999782800674, 0.0418289452791214, -0.003927132580429316, -0.020822033286094666, -0.06321132928133011, -0.0684218555688858, 0.07406333088874817, -0.08301912993192673, -0.06622029840946198, -0.02260151319205761, -0.08103066682815552, 0.005466082599014044, 0.004831092432141304, 0.11987437307834625, -0.03258044272661209, 0.039812229573726654, -0.010277865454554558, 0.05478507652878761, 0.06959138810634613, 0.03300745412707329, -0.06707566976547241, 0.05891910567879677, -0.19063808023929596, 0.09883340448141098, -0.09230532497167587, 0.0333397276699543, -0.1478966325521469, -0.016407707706093788, 0.01946062222123146, 0.006799635011702776, 0.023044534027576447, 0.13352946937084198, -0.23411902785301208, -0.012679706327617168, 0.1535266637802124, -0.09731769561767578, -0.10300175100564957, 0.05821754410862923, -0.05647696554660797, 0.11756439507007599, 0.03154715895652771, -0.04418950527906418, 0.053492870181798935, -0.13205592334270477, -0.03614269942045212, -0.03398403525352478, -0.008397487923502922, 0.12584136426448822, 0.09249116480350494, -0.0639057531952858, 0.0471506305038929, 0.018219849094748497, -0.025438562035560608, -0.03892004117369652, -0.053262826055288315, -0.12231963127851486, 0.00006324708374449983, -0.07355798780918121, 0.04749446362257004, -0.012435388751327991, -0.07492748647928238, -0.026519285514950752, -0.15891078114509583, 0.007735797669738531, 0.08379198610782623, 0.012965315952897072, -0.036779776215553284, -0.0989203080534935, 0.0035330886021256447, -0.016430748626589775, -0.03560171276330948, -0.14369948208332062, -0.028200481086969376, 0.025989973917603493, -0.1302594542503357, 0.02028120681643486, -0.08179106563329697, 0.05351174250245094, 0.02299949899315834, -0.051788754761219025, -0.017744097858667374, -0.01924981363117695, 0.025952951982617378, -0.04384128376841545, -0.23472441732883453, -0.015082319267094135, -0.036531608551740646, 0.14495772123336792, -0.22436930239200592, 0.03513672947883606, 0.0810585618019104, 0.12667764723300934, -0.014639037661254406, -0.0563817024230957, 0.02212301827967167, -0.06182097643613815, -0.03812996670603752, -0.05799267441034317, -0.02085295505821705, -0.019581357017159462, -0.06358040124177933, 0.007147657684981823, -0.11209635436534882, -0.026617875322699547, 0.10464783012866974, 0.08675404638051987, -0.16480417549610138, -0.025776371359825134, -0.037677016109228134, -0.07719165831804276, -0.08608120679855347, -0.05120616406202316, 0.13765504956245422, 0.051828932017087936, 0.03201717510819435, -0.08257603645324707, -0.07220751792192459, 0.006765816360712051, -0.023974573239684105, -0.037137582898139954, 0.1044045016169548, 0.07929576933383942, -0.09697019308805466, 0.09520182758569717, 0.07539892196655273, 0.021537233144044876, 0.09724269062280655, -0.015039133839309216, -0.11733134835958481, -0.0340077206492424, 0.032084450125694275, 0.007157749030739069, 0.1630782037973404, -0.09091456979513168, 0.061100952327251434, 0.034789662808179855, -0.027649521827697754, 0.046637266874313354, -0.10351830720901489, 0.017512699589133263, 0.015064891427755356, -0.013131558895111084, -0.009236587211489677, -0.04656469449400902, 0.018547989428043365, 0.07753179967403412, 0.03422388434410095, 0.036310143768787384, 0.0323043167591095, -0.033692073076963425, -0.12109572440385818, 0.19539831578731537, -0.10287494957447052, -0.236991748213768, -0.15323685109615326, 0.06077577918767929, 0.04295855015516281, -0.026038192212581635, 0.004987527150660753, -0.05383167788386345, -0.10589656233787537, -0.08792882412672043, -0.01053927093744278, 0.04785066097974777, -0.07204009592533112, -0.06366333365440369, 0.05279744043946266, 0.049490757286548615, -0.13419310748577118, 0.04309115558862686, 0.059587374329566956, -0.04424852877855301, 0.00488396268337965, 0.07220623642206192, 0.08881773799657822, 0.15522204339504242, -0.018344983458518982, -0.027959154918789864, 0.047993820160627365, 0.27168041467666626, -0.15121939778327942, 0.09327760338783264, 0.10117918998003006, -0.08418280631303787, 0.07685728371143341, 0.1843709796667099, 0.036007922142744064, -0.10989002138376236, 0.04522302374243736, 0.029765967279672623, -0.01665903441607952, -0.2674669623374939, -0.06281691044569016, 0.0036664220970124006, -0.0939224362373352, 0.07248181849718094, 0.08207940310239792, 0.09626606851816177, 0.04803090542554855, -0.0687447190284729, -0.07138226926326752, 0.01623087376356125, 0.07634932547807693, -0.05433845520019531, -0.002983957063406706, 0.0818023756146431, -0.03277996554970741, 0.0027538968715816736, 0.10782653093338013, 0.015683511272072792, 0.1844581514596939, 0.055554334074258804, 0.1299397498369217, 0.09472199529409409, 0.09648213535547256, 0.004919606726616621, 0.024016931653022766, 0.015392686240375042, 0.014427910558879375, 0.002882955828681588, -0.08375448733568192, 0.016129394993185997, 0.11569655686616898, 0.064411960542202, 0.05467773973941803, 0.018027320504188538, -0.05603310838341713, 0.06502263993024826, 0.18050804734230042, -0.007573777809739113, -0.20701448619365692, -0.06736133992671967, 0.07109374552965164, -0.07983569800853729, -0.11622301489114761, -0.021042203530669212, 0.05831141024827957, -0.16299711167812347, 0.014293200336396694, -0.037661243230104446, 0.09236827492713928, -0.09770718216896057, -0.040240928530693054, 0.0595686249434948, 0.0740017294883728, -0.024651488289237022, 0.08112755417823792, -0.19088971614837646, 0.13402847945690155, 0.016040263697504997, 0.077420674264431, -0.09839481860399246, 0.10509619116783142, 0.0022027704399079084, 0.007473956793546677, 0.1468619406223297, 0.001855440204963088, -0.03461270406842232, -0.06153382360935211, -0.11023696511983871, -0.00026473801699467003, 0.08436431735754013, -0.11318220943212509, 0.0670003592967987, 0.0024130011443048716, -0.020097222179174423, 0.007977040484547615, -0.09419239312410355, -0.15000036358833313, -0.17252333462238312, 0.06222698092460632, -0.12387669831514359, 0.058711059391498566, -0.10565371811389923, -0.07456890493631363, -0.02337505668401718, 0.18680724501609802, -0.20861488580703735, -0.06620422005653381, -0.13520993292331696, -0.09381449967622757, 0.1764594465494156, -0.03494150936603546, 0.07459726184606552, 0.015927979722619057, 0.1846771538257599, 0.02724882774055004, 0.014447649009525776, 0.09384473413228989, -0.08363960683345795, -0.18989385664463043, -0.07357007265090942, 0.14232409000396729, 0.1475888043642044, 0.04966476559638977, -0.006959696765989065, 0.009804547764360905, -0.05050967261195183, -0.12295698374509811, 0.002465814119204879, 0.12238103151321411, 0.09479325264692307, 0.00855171773582697, -0.01648709736764431, -0.13197098672389984, -0.06967354565858841, -0.062116462737321854, 0.019893741235136986, 0.16498249769210815, -0.07022097706794739, 0.13884463906288147, 0.1206422746181488, -0.05434200540184975, -0.19551563262939453, 0.053643301129341125, 0.06134827062487602, 0.023144276812672615, 0.04906807094812393, -0.1781323403120041, 0.09857498854398727, 0.04407969489693642, -0.04919685423374176, 0.12257903814315796, -0.15099003911018372, -0.15071389079093933, 0.08303117752075195, 0.06405438482761383, -0.25319045782089233, -0.11600618064403534, -0.09364945441484451, -0.04573241248726845, -0.12496110796928406, 0.07256893068552017, 0.007495630532503128, 0.007144811097532511, 0.04490414261817932, 0.03770243749022484, 0.008325127884745598, -0.050644513219594955, 0.20487858355045319, 0.0057704513892531395, 0.0402144156396389, -0.04839617758989334, -0.10181401669979095, 0.03371268883347511, -0.04098241776227951, 0.09157326817512512, -0.011992569081485271, 0.02154570072889328, -0.10948482900857925, -0.04639375954866409, -0.05473196879029274, 0.02792687900364399, -0.09519266337156296, -0.09103900194168091, -0.05220866575837135, 0.09898161143064499, 0.06986842304468155, -0.038128066807985306, -0.012423954904079437, -0.07907009869813919, 0.03671106696128845, 0.16537977755069733, 0.20515747368335724, 0.048061151057481766, -0.06858588755130768, 0.009847307577729225, -0.02182246558368206, 0.044403012841939926, -0.23946240544319153, 0.05472627282142639, 0.05017027258872986, 0.023067638278007507, 0.11525321751832962, -0.03279544785618782, -0.15473903715610504, -0.050580721348524094, 0.06996221840381622, -0.045386672019958496, -0.16568441689014435, -0.014369496144354343, 0.046099789440631866, -0.1999610811471939, -0.023538677021861076, 0.004710236564278603, -0.022153059020638466, -0.04214455559849739, 0.005329470615833998, 0.07963108271360397, -0.014832615852355957, 0.13095399737358093, 0.07519114762544632, 0.09187228232622147, -0.10941551625728607, 0.07681170850992203, 0.06897353380918503, -0.06343842297792435, 0.017632456496357918, 0.05868948996067047, -0.04105812683701515, -0.030464379116892815, 0.0690031498670578, 0.07430510967969894, 0.05728144198656082, -0.051104314625263214, -0.015491952188313007, -0.070621058344841, 0.05680634453892708, 0.12991592288017273, 0.04723229259252548, 0.0167965367436409, 0.04434245452284813, 0.01753508858382702, -0.0902186706662178, 0.10201701521873474, 0.051966696977615356, 0.028950797393918037, -0.042541638016700745, -0.006696081720292568, 0.018438756465911865, -0.023196138441562653, -0.013085066340863705, -0.012512787245213985, -0.07475725561380386, -0.015385263599455357, -0.12954148650169373, 0.03248703479766846, -0.08566007763147354, 0.020317625254392624, 0.02587796188890934, -0.052053600549697876, -0.009195448830723763, 0.017017260193824768, -0.06892591714859009, -0.04076220095157623, 0.0011734741274267435, 0.1175701692700386, -0.12343347817659378, 0.04246532544493675, 0.08628155291080475, -0.10348173975944519, 0.07520393282175064, -0.002901942003518343, 0.007347529754042625, 0.019740121439099312, -0.19213968515396118, 0.0766318291425705, -0.013337422162294388, 0.001053944113664329, 0.024117030203342438, -0.20952242612838745, -0.006080904044210911, -0.031189080327749252, -0.027666667476296425, 0.005164641886949539, -0.03985965996980667, -0.13107310235500336, 0.08205577731132507, -0.003765275701880455, -0.08665616065263748, -0.025787852704524994, 0.03258509933948517, 0.1278361976146698, -0.044140275567770004, 0.15561449527740479, -0.011077998206019402, 0.06383583694696426, -0.16799122095108032, -0.010091759264469147, -0.02306237630546093, 0.032815951853990555, -0.04741745442152023, -0.010609818622469902, 0.0538332536816597, -0.026015304028987885, 0.22677162289619446, -0.04341892898082733, 0.07405807077884674, 0.05401983857154846, 0.026722421869635582, -0.013983123935759068, 0.09975995123386383, 0.08531851321458817, -0.00022332010848913342, 0.015227331779897213, 0.016572358086705208, -0.01701829768717289, -0.03908390551805496, -0.16895601153373718, 0.04658033698797226, 0.15717346966266632, 0.03637083247303963, 0.015508216805756092, 0.06160755828022957, -0.10177727043628693, -0.07265425473451614, 0.12203750014305115, -0.017118535935878754, -0.03624684363603592, -0.06423043459653854, 0.1324765831232071, 0.11684929579496384, -0.18669357895851135, 0.06490632891654968, -0.0661192536354065, -0.07500117272138596, -0.09379434585571289, -0.12784923613071442, -0.06441590189933777, -0.03161323070526123, -0.010680772364139557, -0.07526928931474686, 0.0449347086250782, 0.09325391054153442, 0.002365459455177188, -0.02694730833172798, 0.11241437494754791, -0.0021750282030552626, -0.014465109445154667, 0.032511964440345764, 0.06701865792274475, 0.02804410457611084, -0.10059960931539536, 0.013631683774292469, 0.002950716530904174, 0.03361235558986664, 0.05961449816823006, 0.010869694873690605, -0.03246646374464035, -0.007698001805692911, -0.017008360475301743, -0.10968323051929474, 0.04155507683753967, -0.034518640488386154, -0.04464557394385338, 0.1252526491880417, 0.020641658455133438, 0.008879609405994415, -0.02171161212027073, 0.20313842594623566, -0.07258057594299316, -0.0738096535205841, -0.1728564351797104, 0.062766894698143, -0.05858193337917328, 0.04940785840153694, 0.0489935465157032, -0.11174070090055466, 0.03194813430309296, 0.13267196714878082, 0.11266837269067764, -0.015657298266887665, 0.007065607234835625, 0.04294498637318611, 0.00014571128122042865, -0.0451299287378788, 0.02464902028441429, 0.04020513594150543, 0.09140980988740921, -0.05559609830379486, 0.08049421012401581, -0.010536282323300838, -0.07865223288536072, 0.0037837009876966476, 0.10551504045724869, -0.007181053515523672, 0.011449815705418587, -0.0747838169336319, 0.14375869929790497, -0.058741774410009384, -0.22183655202388763, 0.05389891564846039, -0.07663926482200623, -0.16971123218536377, -0.023989642038941383, 0.03070111572742462, -0.016809269785881042, 0.015062140300869942, 0.08561712503433228, -0.05036556348204613, 0.1844942271709442, 0.045382674783468246, -0.06988335400819778, -0.06017757207155228, 0.05923645570874214, -0.09433206915855408, 0.29170510172843933, 0.013179867528378963, 0.05626986175775528, 0.1047554537653923, -0.011018817313015461, -0.12850576639175415, 0.04194965958595276, 0.09605628252029419, -0.06789332628250122, 0.0787057876586914, 0.16616679728031158, -0.0014761330094188452, 0.15168112516403198, 0.0679514929652214, -0.05077531561255455, 0.04515787586569786, -0.11450556665658951, -0.05934550240635872, -0.10349568724632263, 0.08500123769044876, -0.06933093816041946, 0.15488605201244354, 0.12824289500713348, -0.06672835350036621, -0.010444164276123047, -0.019755391404032707, 0.08562184125185013, -0.005187328439205885, 0.12809395790100098, 0.00486714718863368, -0.20649944245815277, 0.019284283742308617, -0.00893302634358406, 0.10000060498714447, -0.21354320645332336, -0.058656685054302216, 0.05663366615772247, -0.024775996804237366, -0.05987556278705597, 0.1054890975356102, 0.05611120164394379, 0.0476556159555912, -0.035782475024461746, -0.034787487238645554, -0.013842964544892311, 0.1347373127937317, -0.10338430851697922, -0.013065258972346783 ]
null
null
transformers
# Model Card ## Overview This document provides details about the training process and performance metrics for a machine learning model. The model is designed for a specific task, and the following table summarizes its performance at different training steps. ## Performance Metrics | Step | Training Loss | Validation Loss | Precision | Recall | F1 | Accuracy | |------|---------------|------------------|-----------|--------|----|----------| | 100 | No log | 0.353176 | 0.756010 | 0.763658 | 0.759815 | 0.937457 | | 200 | No log | 0.350509 | 0.736538 | 0.774990 | 0.755275 | 0.935063 | | 300 | No log | 0.347303 | 0.767657 | 0.774180 | 0.770905 | 0.939078 | | 400 | No log | 0.367788 | 0.742923 | 0.785917 | 0.763815 | 0.932747 | | 500 | 0.057400 | 0.370828 | 0.741564 | 0.782679 | 0.761567 | 0.936376 | | 600 | 0.057400 | 0.386560 | 0.742563 | 0.787940 | 0.764579 | 0.932669 | | 700 | 0.057400 | 0.360685 | 0.759093 | 0.785512 | 0.772076 | 0.940700 | | 800 | 0.057400 | 0.361319 | 0.718589 | 0.791582 | 0.753322 | 0.931125 | | 900 | 0.057400 | 0.374558 | 0.752726 | 0.782274 | 0.767216 | 0.935681 | | 1000 | 0.038600 | 0.388341 | 0.732331 | 0.788345 | 0.759306 | 0.934445 | | 1100 | 0.038600 | 0.379793 | 0.742737 | 0.786321 | 0.763908 | 0.934831 | | 1200 | 0.038600 | 0.349414 | 0.764706 | 0.768110 | 0.766404 | 0.941163 | | 1300 | 0.038600 | 0.377967 | 0.750688 | 0.772562 | 0.761468 | 0.937225 | | 1400 | 0.038600 | 0.377874 | 0.746738 | 0.787535 | 0.766594 | 0.935603 | | 1500 | 0.028400 | 0.377598 | 0.748765 | 0.797248 | 0.772246 | 0.936762 | | 1600 | 0.028400 | 0.381301 | 0.745503 | 0.788345 | 0.766326 | 0.936376 | | 1700 | 0.028400 | 0.393123 | 0.746426 | 0.802914 | 0.773640 | 0.934986 | | 1800 | 0.028400 | 0.363858 | 0.775276 | 0.794415 | 0.784729 | 0.943016 | | 1900 | 0.028400 | 0.382723 | 0.749618 | 0.794820 | 0.771558 | 0.938846 | | 2000 | 0.020700 | 0.402254 | 0.745601 | 0.788749 | 0.766568 | 0.935758 | | 2100 | 0.020700 | 0.394921 | 0.760827 | 0.789154 | 0.774732 | 0.938306 | | 2200 | 0.020700 | 0.389430 | 0.755573 | 0.795629 | 0.775084 | 0.938538 | | 2300 | 0.020700 | 0.400747 | 0.752783 | 0.793606 | 0.772656 | 0.937379 | | 2400 | 0.020700 | 0.416119 | 0.751634 | 0.791178 | 0.770899 | 0.935989 | | 2500 | 0.017600 | 0.405485 | 0.754724 | 0.791987 | 0.772907 | 0.937457 | | 2600 | 0.017600 | 0.404488 | 0.754906 | 0.794011 | 0.773964 | 0.937534 | | 2700 | 0.017600 | 0.399771 | 0.755590 | 0.793201 | 0.773939 | 0.937225 | | 2800 | 0.017600 | 0.401973 | 0.757764 | 0.789964 | 0.773529 | 0.937765 | | 2900 | 0.017600 | 0.401310 | 0.759846 | 0.796439 | 0.777712 | 0.938615 | | 3000 | 0.013900 | 0.400733 | 0.761535 | 0.794820 | 0.777822 | 0.938769 |
{}
token-classification
DataIntelligenceTeam/Tansport1.4
[ "transformers", "pytorch", "layoutlmv3", "token-classification", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T18:42:03+00:00
[]
[]
TAGS #transformers #pytorch #layoutlmv3 #token-classification #autotrain_compatible #endpoints_compatible #region-us
Model Card ========== Overview -------- This document provides details about the training process and performance metrics for a machine learning model. The model is designed for a specific task, and the following table summarizes its performance at different training steps. Performance Metrics -------------------
[]
[ "TAGS\n#transformers #pytorch #layoutlmv3 #token-classification #autotrain_compatible #endpoints_compatible #region-us \n" ]
[ 41 ]
[ "passage: TAGS\n#transformers #pytorch #layoutlmv3 #token-classification #autotrain_compatible #endpoints_compatible #region-us \n" ]
[ -0.05842987820506096, 0.029088284820318222, -0.008562782779335976, 0.03329263627529144, 0.17634959518909454, 0.03315509855747223, 0.06917909532785416, 0.09448128938674927, 0.005243215244263411, -0.029991144314408302, 0.12003349512815475, 0.2599136531352997, -0.028157811611890793, 0.12456515431404114, -0.08913734555244446, -0.2729027271270752, 0.05605781450867653, 0.07774190604686737, -0.03943677991628647, 0.10471943020820618, 0.09239821135997772, -0.09639572352170944, 0.08550889045000076, -0.02279028296470642, -0.1575535237789154, 0.03470994159579277, 0.024259069934487343, -0.11596566438674927, 0.09221235662698746, 0.03854667767882347, 0.16897600889205933, 0.0302426815032959, -0.028854189440608025, -0.12862727046012878, 0.02786851115524769, 0.025461025536060333, -0.06541597843170166, 0.06483285129070282, 0.09300833940505981, -0.07292735576629639, -0.0022038377355784178, 0.02544694021344185, 0.025702519342303276, 0.04426179826259613, -0.11810854077339172, -0.11124643683433533, -0.02789524383842945, 0.0635528638958931, 0.06559757143259048, 0.033594269305467606, 0.04122185334563255, 0.2153013050556183, -0.13315749168395996, 0.11704901605844498, 0.12341045588254929, -0.27933767437934875, -0.009441232308745384, 0.15733066201210022, 0.002357209101319313, -0.029523484408855438, -0.019298458471894264, 0.04512292146682739, 0.021576153114438057, 0.024360155686736107, 0.028172604739665985, -0.072011798620224, -0.09578248113393784, 0.02863670140504837, -0.09545274078845978, -0.028905397281050682, 0.20640307664871216, -0.031616512686014175, 0.06414664536714554, -0.011602483689785004, -0.10042209923267365, -0.03754862770438194, -0.022720545530319214, 0.0070870486088097095, -0.02902892418205738, 0.03563262149691582, 0.0061128935776650906, 0.024671461433172226, -0.10043221712112427, 0.015161139890551567, -0.2221299707889557, 0.2236221730709076, 0.023352844640612602, 0.07321427762508392, -0.16328775882720947, 0.0708920881152153, 0.012147938832640648, -0.09485683590173721, 0.04334615170955658, -0.10780845582485199, -0.0008309191907756031, -0.0552465058863163, -0.027577294036746025, 0.017312832176685333, 0.07980318367481232, 0.12397755682468414, 0.09365394711494446, 0.0471283495426178, 0.004611727315932512, 0.08818131685256958, 0.032559193670749664, 0.10351098328828812, -0.011156813241541386, -0.031381379812955856, 0.048949457705020905, -0.11301303654909134, 0.010197554714977741, -0.04418937861919403, -0.13167151808738708, -0.04408419504761696, 0.07013949006795883, 0.10765870660543442, 0.0232942346483469, 0.07247751951217651, -0.06978064775466919, -0.05615411326289177, 0.10622340440750122, -0.07709866762161255, 0.017067324370145798, -0.0009451101068407297, 0.014077739790081978, 0.12517668306827545, -0.011925293132662773, 0.0027391312178224325, -0.04697488248348236, 0.11625555902719498, -0.05861281231045723, 0.001256277784705162, -0.045226357877254486, -0.06738252192735672, 0.04326161369681358, -0.14476247131824493, 0.04560381546616554, -0.1675003319978714, -0.1180180162191391, 0.03132295235991478, 0.03875334560871124, 0.008039931766688824, -0.03893275931477547, 0.019622717052698135, 0.000291106611257419, -0.00038803817005828023, -0.061996519565582275, -0.01373910903930664, -0.053321827203035355, 0.0594942569732666, -0.006682501174509525, 0.058026302605867386, -0.08320728689432144, 0.06064118444919586, -0.09743102639913559, 0.021048296242952347, -0.1076643317937851, 0.0013946181861683726, -0.059198834002017975, 0.16992251574993134, -0.029822757467627525, -0.06846482306718826, -0.035779163241386414, 0.012289733625948429, -0.04486578702926636, 0.11181051284074783, -0.08661448955535889, -0.10576072335243225, 0.1256287693977356, -0.11430133134126663, -0.1311517208814621, 0.06055986508727074, 0.006881935056298971, -0.01771351881325245, 0.06398387253284454, 0.09930219501256943, 0.12037161737680435, -0.03478673845529556, 0.05309829115867615, 0.120406873524189, -0.1481243073940277, -0.1545179784297943, 0.014849832281470299, 0.011205773800611496, -0.12122143805027008, 0.059529658406972885, 0.06338345259428024, 0.07836046069860458, -0.0705798864364624, -0.04258683696389198, -0.040258947759866714, -0.016805022954940796, 0.09802677482366562, 0.058017127215862274, 0.093984454870224, -0.04611242190003395, 0.026842687278985977, 0.04143494367599487, 0.03122013993561268, 0.033550843596458435, 0.012540578842163086, -0.08492173254489899, 0.11681190133094788, -0.07401198148727417, 0.009932569228112698, -0.1934804469347, -0.12749773263931274, 0.005530590657144785, 0.06846266239881516, -0.03456025570631027, 0.1347837895154953, 0.07085654884576797, -0.0358145534992218, -0.0024269302375614643, -0.030109215527772903, 0.15866506099700928, 0.03295166790485382, -0.06394322961568832, -0.10212380439043045, -0.0033015317749232054, -0.06916150450706482, -0.04317079484462738, -0.03612659126520157, 0.014639539644122124, 0.0717584416270256, 0.1658264398574829, 0.014317180961370468, 0.07462476938962936, -0.010788490064442158, 0.05819890648126602, -0.06441567838191986, -0.007170400582253933, 0.11503611505031586, -0.010860664770007133, -0.05117249861359596, 0.1187892034649849, -0.12213227152824402, 0.3324744403362274, 0.18587535619735718, -0.27610212564468384, 0.012659362517297268, -0.04830966144800186, -0.023013103753328323, 0.000913769647013396, 0.027462603524327278, 0.05007627233862877, 0.032586470246315, 0.015100776217877865, 0.1648935228586197, -0.015940159559249878, -0.04889090731739998, 0.013935688883066177, -0.05830413103103638, -0.05009309947490692, 0.07311221957206726, 0.10443830490112305, -0.19209635257720947, 0.17369866371154785, 0.2135159820318222, 0.005777793936431408, 0.09526185691356659, -0.01607547700405121, 0.02474735490977764, 0.036721471697092056, -0.03619495406746864, -0.027413150295615196, -0.009031220339238644, -0.1648416668176651, -0.03386751934885979, 0.08719892054796219, 0.029274728149175644, 0.058728959411382675, -0.12790662050247192, -0.023152491077780724, 0.016393695026636124, 0.04073629528284073, -0.006699309218674898, 0.10421966761350632, 0.04923713579773903, 0.0767088234424591, -0.01093790028244257, -0.08596242219209671, 0.11145079135894775, 0.005096756853163242, -0.06529226899147034, 0.16203662753105164, -0.1295882910490036, -0.2975495755672455, -0.14292581379413605, -0.20303001999855042, -0.060360971838235855, 0.038015227764844894, 0.049597401171922684, -0.10446422547101974, -0.06334210187196732, 0.06435145437717438, -0.0010608163429424167, -0.05817916616797447, 0.06835747510194778, -0.03653942793607712, 0.0764976516366005, -0.024003487080335617, -0.07108154892921448, -0.05642031505703926, -0.04253562167286873, -0.025976017117500305, 0.12950022518634796, -0.07567308843135834, 0.06679273396730423, 0.16258195042610168, -0.0072227586060762405, 0.06628802418708801, -0.012176484800875187, 0.14821162819862366, -0.04025798290967941, -0.016866879537701607, 0.22301402688026428, -0.05008156970143318, 0.08242775499820709, 0.15391094982624054, 0.044616155326366425, -0.044810861349105835, 0.0020214139949530363, -0.041652798652648926, -0.11299555748701096, -0.19127117097377777, -0.14225256443023682, -0.12384692579507828, 0.025709573179483414, 0.06436899304389954, 0.06406375765800476, 0.10994330793619156, 0.10601445287466049, 0.02972625568509102, 0.029432622715830803, -0.05128905549645424, 0.07655670493841171, 0.23728345334529877, -0.0000042849114834098145, 0.13939988613128662, -0.06751152873039246, -0.12641963362693787, 0.07676400244235992, 0.0680006816983223, 0.15934541821479797, 0.08741538971662521, -0.030599312856793404, 0.03004130721092224, 0.13223978877067566, 0.16666559875011444, 0.13147641718387604, 0.0172983817756176, -0.04347218945622444, 0.005666645243763924, -0.0005525111919268966, -0.04537316411733627, 0.0069400351494550705, 0.11681798100471497, -0.10958881676197052, -0.05255940929055214, -0.0829748809337616, 0.07637031376361847, 0.10343170166015625, 0.05019993335008621, -0.22899805009365082, 0.02859838306903839, 0.07619646191596985, 0.0007249799091368914, -0.06915795803070068, 0.04589598998427391, -0.06778296083211899, -0.14137886464595795, 0.10049290955066681, -0.04626782611012459, 0.11142885684967041, -0.08509784936904907, 0.049293868243694305, -0.0014804508537054062, -0.03817348927259445, 0.03998064994812012, 0.10721785575151443, -0.24703827500343323, 0.23439356684684753, 0.012023345567286015, -0.06721705198287964, -0.08144742250442505, 0.0010778045980259776, 0.04226178675889969, 0.20771607756614685, 0.0569334402680397, 0.01390070654451847, -0.09396513551473618, -0.19446809589862823, -0.04219942167401314, 0.013581831939518452, 0.08051809668540955, -1.3245476937484568e-9, -0.020530162379145622, -0.03783658519387245, -0.02520790323615074, -0.019129883497953415, -0.03307904675602913, -0.000980295124463737, -0.1252846121788025, 0.059328239411115646, 0.050175633281469345, 0.018366480246186256, 0.005092106759548187, -0.06752552092075348, -0.12998686730861664, 0.23356346786022186, -0.07654207944869995, -0.07707364112138748, -0.12774740159511566, -0.09355377405881882, 0.0730544850230217, -0.08372336626052856, 0.07247837632894516, -0.09573674947023392, 0.03773335739970207, -0.029476644471287727, -0.2031322717666626, 0.12677420675754547, -0.13253432512283325, -0.036765675991773605, -0.061027586460113525, 0.14224150776863098, -0.09830161184072495, 0.0142179224640131, 0.014842793345451355, 0.017073776572942734, -0.07858040928840637, -0.08964131772518158, 0.011023011058568954, 0.029741711914539337, 0.043360497802495956, 0.056206606328487396, -0.06003091111779213, -0.03562464565038681, 0.014990865252912045, 0.03151271119713783, 0.24584107100963593, 0.18483322858810425, -0.07162967324256897, 0.11678566038608551, 0.11816679686307907, -0.04374634847044945, -0.31539151072502136, -0.06700664013624191, -0.10319915413856506, -0.04226698353886604, -0.02966504916548729, -0.1327025294303894, 0.14224043488502502, 0.02689075469970703, -0.03788989782333374, 0.07430906593799591, -0.15896058082580566, -0.08392816036939621, 0.1988213062286377, 0.007268242072314024, 0.35099518299102783, -0.07222215086221695, -0.0756809413433075, -0.0392087884247303, -0.15286937355995178, 0.09148100018501282, 0.008033244870603085, 0.07179157435894012, -0.04517575725913048, 0.036434464156627655, 0.03509733825922012, -0.06548847258090973, 0.10288472473621368, 0.034449655562639236, 0.051788728684186935, -0.0997864305973053, -0.08547107130289078, 0.04402453452348709, -0.03513065353035927, 0.011420156806707382, 0.04038114473223686, 0.0344565324485302, -0.12713268399238586, -0.015901708975434303, -0.06756243854761124, 0.08546020090579987, 0.04175609350204468, -0.055554814636707306, 0.007565491832792759, -0.025990264490246773, -0.007164922077208757, -0.005450709722936153, 0.25374284386634827, 0.017356621101498604, 0.12485659867525101, 0.13505657017230988, 0.10089316964149475, -0.17584560811519623, -0.03894687071442604, -0.08256591111421585, -0.06034725531935692, 0.0755295529961586, -0.03847983852028847, 0.0758291482925415, 0.13000251352787018, -0.03412541374564171, 0.03351049870252609, 0.11421653628349304, 0.05544406920671463, -0.0453634113073349, 0.15432514250278473, -0.1914745271205902, 0.05468768998980522, -0.02863403968513012, -0.006239918060600758, 0.04628654941916466, 0.13127152621746063, 0.1105322316288948, 0.045087799429893494, -0.02252943441271782, 0.0073934742249548435, -0.011352303437888622, -0.047917816787958145, 0.08861474692821503, 0.07893145084381104, 0.04890316724777222, -0.1491146832704544, 0.05204348638653755, 0.022121910005807877, -0.09706408530473709, -0.05732299014925957, 0.06954777240753174, -0.15959618985652924, -0.11165530234575272, 0.00862200278788805, 0.10018762946128845, -0.13472139835357666, -0.060425058007240295, -0.058866437524557114, -0.13346174359321594, 0.07318714261054993, 0.1556529998779297, 0.13088908791542053, 0.09481152147054672, -0.04940785467624664, -0.04623425379395485, -0.029606061056256294, -0.019341250881552696, 0.006211422383785248, 0.06417026370763779, -0.1926816701889038, 0.027819672599434853, -0.006545062642544508, 0.16417711973190308, -0.09598220884799957, -0.06972640007734299, -0.13747429847717285, 0.05198216065764427, -0.07868348062038422, -0.07070986181497574, -0.0989070013165474, -0.014458774589002132, 0.02296004258096218, -0.06207510456442833, -0.04596744477748871, -0.01523115485906601, -0.10934937000274658, 0.05052758753299713, 0.010426066815853119, 0.03545952960848808, -0.05018249526619911, -0.04215455800294876, 0.07951279729604721, -0.04034959897398949, 0.0849108025431633, 0.07868839055299759, -0.059932537376880646, 0.06658272445201874, -0.08153828978538513, -0.13970105350017548, 0.13981185853481293, 0.04125591740012169, 0.0963112860918045, 0.023066362366080284, 0.037820905447006226, 0.07398231327533722, 0.013330389745533466, 0.054001204669475555, 0.06611470133066177, -0.12442014366388321, 0.03447017818689346, -0.05441206693649292, -0.15491019189357758, -0.02610139548778534, -0.05583927407860756, 0.09902085363864899, 0.01580265909433365, 0.15417777001857758, -0.007655168883502483, 0.07336536049842834, -0.06069933623075485, 0.0008945160661824048, -0.03172742947936058, -0.1910085827112198, -0.0392284132540226, -0.046758949756622314, 0.013974569737911224, -0.0062415883876383305, 0.26210686564445496, 0.04217608645558357, 0.017791789025068283, 0.039412833750247955, 0.10042846202850342, -0.005816352553665638, 0.017155352979898453, 0.15451841056346893, 0.09339843690395355, -0.016364218667149544, -0.03297578543424606, 0.09053441137075424, 0.016717616468667984, -0.0785338282585144, 0.11073700338602066, 0.037774860858917236, -0.04970870167016983, 0.05419628322124481, 0.03054717183113098, 0.01094057783484459, -0.15401524305343628, -0.16314832866191864, -0.05543301999568939, 0.08048547804355621, 0.02151195891201496, 0.01805965043604374, 0.11031994223594666, -0.025279510766267776, 0.0264876876026392, -0.018105538561940193, -0.02958604507148266, -0.18371866643428802, -0.12029705196619034, -0.09314748644828796, -0.12122201919555664, 0.012614899314939976, -0.040332552045583725, -0.03163900226354599, 0.11839688569307327, 0.05518322065472603, -0.02358686365187168, 0.0584230050444603, 0.0076956008560955524, -0.009202918969094753, 0.011860202066600323, -0.01504017785191536, 0.007687640376389027, -0.013904251158237457, -0.01268638763576746, -0.14445114135742188, -0.033765897154808044, -0.043577730655670166, 0.0028797441627830267, -0.051480982452631, 0.023843731731176376, -0.09928517043590546, -0.11466912925243378, -0.03983224928379059, 0.02524578385055065, -0.05971711501479149, 0.09641474485397339, -0.0076262750662863255, 0.02541942149400711, 0.02049199305474758, 0.13633836805820465, -0.0686863586306572, -0.08508433401584625, -0.049291882663965225, 0.24145472049713135, 0.05487219616770744, 0.09114869683980942, 0.0032963731791824102, 0.014433035627007484, -0.07825903594493866, 0.28289514780044556, 0.27387815713882446, -0.03375861793756485, 0.05475667491555214, 0.019878307357430458, 0.015066244639456272, 0.08162862062454224, 0.13105551898479462, 0.07568486779928207, 0.20676189661026, -0.08085738122463226, -0.046831607818603516, -0.04867939278483391, -0.018360894173383713, -0.11142048239707947, 0.03519108146429062, 0.040170516818761826, -0.046923425048589706, -0.05816158279776573, 0.0781402513384819, -0.16776002943515778, 0.18596151471138, 0.052347321063280106, -0.17248034477233887, -0.07827642560005188, -0.029529646039009094, 0.14031770825386047, -0.004347537644207478, 0.06756298989057541, -0.044917743653059006, -0.08127658814191818, 0.04787087067961693, 0.00863921269774437, -0.22413308918476105, -0.06160663440823555, 0.08792616426944733, 0.02173013426363468, 0.016075367107987404, -0.03363373503088951, 0.06597783416509628, 0.0793713852763176, 0.05454283952713013, -0.05851883441209793, 0.024158885702490807, -0.006834599655121565, -0.07853610068559647, -0.006942293606698513, -0.008702214807271957, 0.006055486388504505, -0.06613104790449142, 0.02985510788857937, -0.13799434900283813, 0.024292796850204468, -0.08123404532670975, -0.011166258715093136, -0.016700686886906624, 0.02755938656628132, -0.01744285225868225, 0.06182042136788368, 0.070574551820755, 0.004118906334042549, -0.04524312540888786, -0.06743666529655457, -0.008379030041396618, 0.03568967804312706, -0.1189434826374054, -0.1320895403623581, -0.0862053856253624, -0.047328799962997437, 0.06322986632585526, -0.00744559895247221, -0.05765635147690773, -0.04867252707481384, -0.09802369028329849, 0.006103093270212412, -0.14418411254882812, 0.06643754243850708, 0.06020033732056618, 0.036168716847896576, -0.007334086578339338, -0.024278830736875534, 0.01983426697552204, 0.036898236721754074, -0.13910065591335297, -0.09875195473432541 ]
null
null
null
# **Reinforce** Agent playing **Pixelcopter-PLE-v0** This is a trained model of a **Reinforce** agent playing **Pixelcopter-PLE-v0** . To learn to use this model and train yours check Unit 4 of the Deep Reinforcement Learning Course: https://huggingface.co/deep-rl-course/unit4/introduction
{"tags": ["Pixelcopter-PLE-v0", "reinforce", "reinforcement-learning", "custom-implementation", "deep-rl-class"], "model-index": [{"name": "Reinforce-Pixelcopter-PLE-v0", "results": [{"task": {"type": "reinforcement-learning", "name": "reinforcement-learning"}, "dataset": {"name": "Pixelcopter-PLE-v0", "type": "Pixelcopter-PLE-v0"}, "metrics": [{"type": "mean_reward", "value": "17.10 +/- 15.62", "name": "mean_reward", "verified": false}]}]}]}
reinforcement-learning
AF6ECHO/Reinforce-Pixelcopter-PLE-v0
[ "Pixelcopter-PLE-v0", "reinforce", "reinforcement-learning", "custom-implementation", "deep-rl-class", "model-index", "region:us" ]
2023-11-12T18:55:56+00:00
[]
[]
TAGS #Pixelcopter-PLE-v0 #reinforce #reinforcement-learning #custom-implementation #deep-rl-class #model-index #region-us
# Reinforce Agent playing Pixelcopter-PLE-v0 This is a trained model of a Reinforce agent playing Pixelcopter-PLE-v0 . To learn to use this model and train yours check Unit 4 of the Deep Reinforcement Learning Course: URL
[ "# Reinforce Agent playing Pixelcopter-PLE-v0\n This is a trained model of a Reinforce agent playing Pixelcopter-PLE-v0 .\n To learn to use this model and train yours check Unit 4 of the Deep Reinforcement Learning Course: URL" ]
[ "TAGS\n#Pixelcopter-PLE-v0 #reinforce #reinforcement-learning #custom-implementation #deep-rl-class #model-index #region-us \n", "# Reinforce Agent playing Pixelcopter-PLE-v0\n This is a trained model of a Reinforce agent playing Pixelcopter-PLE-v0 .\n To learn to use this model and train yours check Unit 4 of the Deep Reinforcement Learning Course: URL" ]
[ 41, 58 ]
[ "passage: TAGS\n#Pixelcopter-PLE-v0 #reinforce #reinforcement-learning #custom-implementation #deep-rl-class #model-index #region-us \n# Reinforce Agent playing Pixelcopter-PLE-v0\n This is a trained model of a Reinforce agent playing Pixelcopter-PLE-v0 .\n To learn to use this model and train yours check Unit 4 of the Deep Reinforcement Learning Course: URL" ]
[ 0.0073175891302526, -0.2259262204170227, -0.0017347558168694377, 0.05054566636681557, 0.0658537745475769, -0.055378563702106476, 0.1412602812051773, 0.05916554853320122, -0.04990595206618309, 0.059261854737997055, 0.14166708290576935, 0.03996060788631439, 0.022112762555480003, 0.1513713151216507, 0.09764605015516281, -0.2469022423028946, 0.07438477873802185, 0.01641594059765339, 0.008152224123477936, 0.09583204984664917, 0.060265738517045975, -0.1405058205127716, 0.037032704800367355, -0.01332044042646885, -0.13650871813297272, 0.0010478810872882605, -0.021802188828587532, -0.03625129908323288, 0.15681709349155426, 0.006844013463705778, 0.09602472931146622, -0.001560068572871387, 0.06475798785686493, -0.12438877671957016, 0.05466329678893089, 0.06455880403518677, -0.06293967366218567, 0.058029334992170334, -0.057374246418476105, 0.11959903687238693, 0.04641333222389221, -0.01578129455447197, 0.054811324924230576, 0.010941818356513977, -0.14131468534469604, -0.006710252724587917, 0.007013716734945774, 0.15098218619823456, 0.1339312642812729, 0.01409265398979187, -0.0014771400019526482, 0.1363491266965866, -0.16774429380893707, 0.045684073120355606, 0.061802688986063004, -0.2633039951324463, -0.04168876260519028, 0.12259352207183838, 0.08951573073863983, 0.06848238408565521, -0.060910262167453766, 0.07636868953704834, 0.049813780933618546, 0.013985024765133858, 0.023094501346349716, -0.042509064078330994, -0.040479615330696106, 0.02289252169430256, -0.0921095609664917, -0.05999262258410454, 0.11517233401536942, -0.006806366611272097, 0.03735918551683426, -0.12476086616516113, -0.015330453403294086, -0.07314357161521912, -0.05917041376233101, -0.082573801279068, 0.07563583552837372, 0.030191516503691673, -0.048283837735652924, -0.08895846456289291, -0.056533291935920715, -0.11489585787057877, -0.023082571104168892, -0.07226225733757019, 0.005096882116049528, -0.03157244250178337, -0.035645097494125366, 0.09446526318788528, -0.0021088174544274807, -0.015028090216219425, -0.03452150896191597, -0.05930153280496597, -0.04213470220565796, -0.02359505370259285, -0.03510070592164993, -0.059062156826257706, 0.054655663669109344, 0.0680202916264534, 0.04938843473792076, 0.09133565425872803, -0.0467856265604496, 0.1667373925447464, -0.03256719931960106, 0.08078566938638687, -0.011897698976099491, 0.2012830525636673, 0.11370102316141129, 0.12129533290863037, 0.06716908514499664, -0.05294690653681755, -0.16726544499397278, 0.039163749665021896, 0.12641896307468414, 0.07664673775434494, -0.032492902129888535, 0.018162984400987625, -0.12440363317728043, 0.05439428985118866, -0.14826108515262604, -0.06745084375143051, 0.024251462891697884, 0.01822635903954506, -0.060682263225317, 0.03656952083110809, -0.0028792342636734247, 0.003339326474815607, 0.004654870834201574, -0.16432709991931915, -0.05568019300699234, 0.028964387252926826, -0.15712425112724304, -0.06656725704669952, 0.06277995556592941, -0.10113482922315598, -0.012132617644965649, -0.16982388496398926, -0.16305199265480042, -0.03628521412611008, 0.017857929691672325, -0.040613796561956406, -0.056917786598205566, -0.14010562002658844, -0.019415250048041344, -0.045320261269807816, -0.004312154371291399, 0.044072363525629044, 0.0020940210670232773, 0.04635847359895706, 0.0066573889926075935, 0.09289347380399704, 0.010714372619986534, -0.0014722738415002823, -0.04595406726002693, 0.0909833237528801, -0.30731555819511414, 0.07525643706321716, -0.08645553886890411, 0.05539081245660782, -0.057316381484270096, -0.0926317572593689, -0.007509906310588121, 0.06277763843536377, 0.060464419424533844, 0.20788121223449707, -0.2800109386444092, -0.07025618106126785, 0.13655538856983185, -0.09533236175775528, -0.13146020472049713, 0.0513952374458313, -0.050213608890771866, 0.07593657076358795, 0.027370907366275787, 0.140700101852417, -0.028026295825839043, -0.15554022789001465, 0.06281048059463501, 0.04586128890514374, -0.11356306821107864, 0.019295670092105865, 0.03597676753997803, 0.06723599135875702, 0.05744141340255737, -0.036986757069826126, -0.04105675220489502, 0.08096802979707718, -0.07076814025640488, -0.037564266473054886, 0.04588831216096878, -0.0579565204679966, 0.1630958467721939, 0.033971156924963, 0.09856503456830978, -0.04149768501520157, -0.07435470074415207, -0.005698562134057283, 0.038746561855077744, -0.08962973952293396, 0.025353478267788887, -0.18320298194885254, 0.2423991560935974, -0.02621818706393242, 0.027546977624297142, -0.16845986247062683, -0.0588528998196125, 0.011087946593761444, 0.21568740904331207, 0.030399197712540627, 0.12989304959774017, 0.07485637813806534, -0.01250512059777975, 0.014156299643218517, -0.06183977797627449, -0.1972363442182541, -0.03247830644249916, 0.008314179256558418, -0.058311350643634796, -0.04934588819742203, -0.0900716632604599, 0.10427892208099365, -0.19334633648395538, -0.005319371819496155, 0.08282599598169327, 0.023504555225372314, 0.03946567326784134, 0.0035407328978180885, -0.03634254261851311, 0.055148303508758545, 0.02030518464744091, -0.08980578929185867, 0.14668866991996765, 0.0035520538222044706, -0.03514726087450981, -0.03927676007151604, -0.03267495706677437, 0.05703731253743172, 0.08045367896556854, -0.18214593827724457, -0.0733821839094162, -0.0838410034775734, -0.02458474040031433, 0.050523869693279266, 0.036679428070783615, 0.02738112211227417, 0.44813573360443115, 0.057562243193387985, 0.09003535658121109, -0.08811535686254501, 0.039806611835956573, 0.012785476632416248, -0.031281858682632446, 0.013625281862914562, 0.04725322127342224, 0.11279468983411789, 0.028284218162298203, 0.01669839769601822, 0.03680038824677467, 0.01938779093325138, 0.08824212104082108, -0.10939645022153854, -0.003965397831052542, 0.002614045049995184, 0.038018375635147095, 0.03672022372484207, 0.07190682739019394, 0.015936892479658127, -0.09583546966314316, -0.030848123133182526, -0.11166880279779434, 0.015594755299389362, -0.20979784429073334, -0.025905707851052284, -0.029619399458169937, 0.0003502996696624905, 0.09109684824943542, 0.04222718998789787, -0.04444896802306175, 0.035467714071273804, 0.03947039321064949, -0.0861397460103035, 0.0594942644238472, -0.014317752793431282, -0.07008631527423859, 0.13023322820663452, -0.1002996563911438, -0.3153233230113983, -0.08797995746135712, 0.05698639526963234, 0.05295826122164726, 0.06816939264535904, -0.05876303091645241, -0.09240786731243134, 0.03294730558991432, -0.06836386770009995, -0.0017794050509110093, 0.0037346978206187487, -0.051060982048511505, 0.07253886014223099, 0.08541567623615265, -0.014505518600344658, -0.08911184966564178, -0.006620637606829405, -0.041561197489500046, -0.124965138733387, 0.044060997664928436, -0.03760828450322151, 0.00007921225915197283, 0.18620672821998596, 0.03724536672234535, 0.06256633251905441, -0.06291008740663528, 0.07596296072006226, -0.09150096774101257, 0.0004740063741337508, 0.18428465723991394, -0.015377625823020935, -0.004100616089999676, -0.03996327146887779, -0.0259257685393095, -0.10829219967126846, 0.053985193371772766, -0.07330703735351562, -0.07349077612161636, -0.0023273853585124016, -0.07770214974880219, -0.0351552739739418, 0.0012160884216427803, 0.07817990332841873, 0.029699061065912247, -0.09635239094495773, 0.04920589178800583, 0.1298678070306778, 0.0931883230805397, 0.03626195341348648, 0.023981640115380287, 0.13739009201526642, -0.11230582743883133, 0.019063033163547516, -0.05148853361606598, -0.1041760966181755, -0.042787205427885056, -0.0714287981390953, 0.07368279993534088, 0.06034531816840172, -0.09970010071992874, 0.05144011229276657, 0.041872985661029816, 0.0883496031165123, 0.1373600959777832, -0.04213863983750343, -0.11244629323482513, -0.041393622756004333, -0.022004956379532814, -0.1777329444885254, 0.0341336652636528, 0.22155584394931793, 0.0073304991237819195, -0.10497386753559113, 0.07876885682344437, -0.005956185050308704, 0.11527370661497116, 0.031222699210047722, -0.278682678937912, 0.016931315883994102, 0.00203216471709311, 0.042359162122011185, -0.047676295042037964, 0.10937416553497314, 0.11747439950704575, -0.14421136677265167, -0.06650938838720322, -0.03273930773139, 0.044137366116046906, -0.15618287026882172, 0.036923591047525406, -0.12602220475673676, 0.06240779533982277, 0.050940994173288345, 0.05090156942605972, -0.2197665423154831, 0.06881614029407501, -0.0274215005338192, 0.06763827055692673, -0.062248338013887405, -0.01823522336781025, 0.04473711550235748, 0.025079863145947456, 0.14955177903175354, -0.014347962103784084, 0.14454017579555511, -0.09031219780445099, -0.11753576993942261, 0.0027052261866629124, 0.08532248437404633, 0.013173088431358337, 0.013580933213233948, 0.0026939227245748043, 0.041669201105833054, -0.02811569906771183, 0.17063532769680023, -0.08147624880075455, -0.022407781332731247, -0.06592555344104767, -0.018158966675400734, 0.2039334923028946, -0.12064731866121292, -0.10121093690395355, -0.11619500070810318, 0.08663272857666016, -0.04296411573886871, 0.08175522089004517, -0.020344657823443413, 0.049704354256391525, -0.02509051002562046, 0.007178863976150751, 0.09594997018575668, 0.01950966566801071, 0.08983828872442245, -0.09791163355112076, -0.019585272297263145, 0.13838915526866913, -0.037155888974666595, -0.036971647292375565, -0.019425252452492714, 0.11054370552301407, -0.0358734093606472, 0.08033111691474915, 0.03929615020751953, 0.03664831817150116, 0.03428546339273453, -0.039165496826171875, 0.10309428721666336, 0.10041618347167969, -0.06291446089744568, 0.03864621743559837, -0.07954532653093338, 0.26597461104393005, 0.040773067623376846, 0.07301845401525497, 0.28390514850616455, 0.19391325116157532, -0.03036464750766754, 0.10683353990316391, -0.017607249319553375, -0.024403288960456848, -0.2950931787490845, 0.0006976581644266844, 0.027765681967139244, 0.11812873929738998, 0.01744898222386837, -0.20587195456027985, -0.1211688369512558, -0.03560304269194603, -0.007791717536747456, 0.0310499370098114, -0.2441052496433258, -0.06442268192768097, 0.06107868626713753, 0.13779635727405548, 0.15878525376319885, -0.05917542055249214, -0.007856467738747597, 0.029358724132180214, 0.07593556493520737, 0.017292039468884468, -0.11598441749811172, 0.11550791561603546, 0.025637371465563774, -0.05708931386470795, 0.0267958827316761, -0.044003549963235855, 0.04214555397629738, -0.17736166715621948, 0.10933554917573929, -0.05924695357680321, -0.08421005308628082, 0.07140472531318665, -0.02217724733054638, -0.048552993685007095, 0.0789642184972763, 0.020652711391448975, -0.13173207640647888, 0.038154006004333496, 0.005618774797767401, 0.04346654564142227, -0.004941361024975777, -0.019811764359474182, -0.029163256287574768, 0.07706235349178314, -0.03806605935096741, 0.09605937451124191, 0.19590972363948822, -0.0573095865547657, 0.03974950686097145, 0.085201695561409, 0.09593135863542557, -0.05523005872964859, -0.0809539332985878, -0.03812742978334427, -0.005277194548398256, 0.0674438327550888, -0.08598461747169495, -0.019085103645920753, 0.07938229292631149, 0.015313901007175446, 0.14910826086997986, 0.14389736950397491, -0.08835655450820923, 0.11321785300970078, 0.10694554448127747, -0.11366690695285797, -0.08583837002515793, -0.02963297814130783, 0.0009990704711526632, 0.04910186678171158, -0.048617590218782425, 0.05932905897498131, -0.1035301461815834, 0.012819357216358185, 0.03532040864229202, 0.0038119733799248934, -0.09975302964448929, 0.009764863178133965, 0.08645275235176086, 0.06119582802057266, -0.0567571222782135, 0.09250631928443909, -0.0019178141374140978, -0.10868195444345474, 0.07241881638765335, 0.009918469935655594, -0.021528873592615128, -0.06352251768112183, 0.03211374953389168, 0.2370220273733139, 0.13945111632347107, -0.04336636886000633, -0.12396618723869324, -0.15508891642093658, 0.037849195301532745, 0.024356422945857048, 0.051251959055662155, 0.0062240250408649445, -0.06906022876501083, 0.01234503649175167, -0.04392383247613907, 0.005266309250146151, -0.05930564925074577, -0.047703344374895096, -0.12081446498632431, 0.1154373437166214, 0.053290288895368576, 0.11705748736858368, -0.0842847004532814, -0.07057584822177887, -0.1921386867761612, 0.09190598875284195, 0.041707299649715424, -0.05532265454530716, 0.06002674251794815, -0.030134430155158043, 0.017344338819384575, 0.11256659775972366, -0.051967836916446686, 0.008543911390006542, -0.09269233793020248, 0.03236149623990059, 0.03133073076605797, 0.04903566092252731, -0.004612727556377649, -0.017903391271829605, 0.04399999976158142, -0.05730267986655235, 0.07619527727365494, -0.07757602632045746, -0.033709146082401276, 0.0645759105682373, -0.16051416099071503, -0.054324716329574585, 0.08708633482456207, 0.013749903067946434, 0.02590017393231392, -0.05825240537524223, 0.019142305478453636, -0.05566488951444626, -0.04483235627412796, 0.01169554702937603, -0.05552767962217331, -0.011517677456140518, 0.05293213203549385, -0.05287189036607742, -0.040493328124284744, -0.06794002652168274, 0.061874233186244965, -0.07247710227966309, 0.09816460311412811, 0.031187955290079117, -0.10892423242330551, 0.07648903876543045, -0.037552736699581146, -0.0049397205002605915, -0.009439278393983841, 0.039307788014411926, 0.15598824620246887, -0.1606634259223938, 0.05345672369003296, -0.0484454482793808, 0.13272921741008759, 0.046888746321201324, -0.04458791762590408, -0.020207170397043228, 0.02469455823302269, -0.05549024045467377, 0.06932897865772247, 0.15877580642700195, 0.09880131483078003, 0.02571805939078331, 0.008134597912430763, 0.10187267512083054, 0.1060529574751854, 0.08136752992868423, 0.08394161611795425, -0.03428563475608826, -0.11287897825241089, 0.14338994026184082, 0.09748584777116776, 0.024613093584775925, 0.21077860891819, 0.17944025993347168, 0.03125298395752907, 0.03018142655491829, -0.06512103229761124, 0.17325744032859802, 0.061261482536792755, -0.08229418843984604, 0.014424329623579979, 0.03221147879958153, -0.049809664487838745, -0.047004032880067825, -0.09757380187511444, -0.029556652531027794, -0.24085633456707, 0.10851483792066574, -0.057250600308179855, -0.09750643372535706, 0.022772664204239845, 0.02990041859447956, -0.018839845433831215, 0.11280566453933716, -0.07735858112573624, 0.012980576604604721, 0.18577688932418823, -0.03825045004487038, -0.022322099655866623, -0.1633504331111908, -0.11154003441333771, -0.014046176336705685, -0.11750495433807373, 0.025494296103715897, 0.06305963546037674, 0.01117965579032898, 0.04399528726935387, 0.028923438861966133, -0.020834028720855713, 0.019218796864151955, -0.05903913825750351, -0.042673509567976, -0.01891910657286644, 0.02202831581234932, -0.09593231230974197, -0.03627033904194832, 0.12151803076267242, -0.03246605768799782, -0.08207374066114426, -0.006544890813529491, 0.07848484069108963, -0.042620159685611725, 0.09450104832649231, -0.07687012106180191, -0.03479038178920746, -0.06794454902410507, 0.268902063369751, 0.09388194978237152, -0.20183001458644867, 0.03341769427061081, -0.030470456928014755, 0.026735708117485046, -0.09215684235095978, 0.16250114142894745, 0.0899243950843811, 0.049168527126312256, -0.12686687707901, -0.003401300171390176, -0.09992645680904388, -0.0028723697178065777, -0.12552696466445923, -0.14725084602832794, 0.12093491852283478, -0.003848524997010827, -0.06547791510820389, 0.02844911813735962, -0.15909899771213531, 0.06585367769002914, 0.0978507474064827, -0.1514272391796112, -0.038227714598178864, -0.06086801365017891, 0.06072385236620903, 0.026465637609362602, 0.13005392253398895, -0.05080926790833473, 0.012067130766808987, -0.0656723901629448, -0.011309894733130932, -0.0000654291216051206, -0.017478201538324356, 0.001532604917883873, -0.09828947484493256, 0.05038110539317131, -0.0835796371102333, 0.12184429168701172, 0.05709611251950264, 0.005326167680323124, 0.008464806713163853, 0.0648408755660057, -0.02414623089134693, -0.10202058404684067, -0.01877439208328724, 0.033475372940301895, 0.03998998552560806, 0.010373802855610847, 0.034506846219301224, 0.0006507808575406671, 0.07714920490980148, -0.011413984932005405, -0.027285432443022728, -0.058209117501974106, 0.03936338797211647, -0.10441672056913376, 0.10461361706256866, 0.0013552121818065643, -0.02240127883851528, -0.010913821868598461, -0.05532446503639221, 0.045815300196409225, 0.04572062939405441, 0.029743505641818047, -0.05261747166514397, -0.09262793511152267, -0.021781492978334427, 0.023900283500552177, -0.11539579927921295, -0.18497975170612335, -0.0664035826921463, -0.15038692951202393, -0.01633414439857006, -0.0620744526386261, 0.08902198076248169, 0.13558129966259003, 0.030392181128263474, -0.04822919890284538, -0.12171997129917145, 0.025026977062225342, 0.13544774055480957, -0.03851630911231041, -0.07532322406768799 ]
null
null
null
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # uplimit-project-3-phi-1.5 This model is a fine-tuned version of [microsoft/phi-1_5](https://huggingface.co/microsoft/phi-1_5) on the scitldr dataset. It achieves the following results on the evaluation set: - Loss: 2.5338 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.001 - train_batch_size: 1 - eval_batch_size: 1 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 1 ### Training results | Training Loss | Epoch | Step | Validation Loss | |:-------------:|:-----:|:----:|:---------------:| | 2.5547 | 0.1 | 200 | 2.5940 | | 2.5736 | 0.2 | 400 | 2.5878 | | 2.5351 | 0.3 | 600 | 2.5790 | | 2.5864 | 0.4 | 800 | 2.5668 | | 2.5437 | 0.5 | 1000 | 2.5579 | | 2.5745 | 0.6 | 1200 | 2.5531 | | 2.545 | 0.7 | 1400 | 2.5457 | | 2.5223 | 0.8 | 1600 | 2.5389 | | 2.5786 | 0.9 | 1800 | 2.5338 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.1.0+cu118 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "other", "tags": ["generated_from_trainer"], "datasets": ["scitldr"], "base_model": "microsoft/phi-1_5", "model-index": [{"name": "uplimit-project-3-phi-1.5", "results": []}]}
null
sergoumaya/uplimit-project-3-phi-1.5
[ "tensorboard", "safetensors", "generated_from_trainer", "dataset:scitldr", "base_model:microsoft/phi-1_5", "license:other", "region:us" ]
2023-11-12T18:57:20+00:00
[]
[]
TAGS #tensorboard #safetensors #generated_from_trainer #dataset-scitldr #base_model-microsoft/phi-1_5 #license-other #region-us
uplimit-project-3-phi-1.5 ========================= This model is a fine-tuned version of microsoft/phi-1\_5 on the scitldr dataset. It achieves the following results on the evaluation set: * Loss: 2.5338 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 0.001 * train\_batch\_size: 1 * eval\_batch\_size: 1 * seed: 42 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 1 ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.1.0+cu118 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.001\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#tensorboard #safetensors #generated_from_trainer #dataset-scitldr #base_model-microsoft/phi-1_5 #license-other #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.001\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 46, 97, 4, 33 ]
[ "passage: TAGS\n#tensorboard #safetensors #generated_from_trainer #dataset-scitldr #base_model-microsoft/phi-1_5 #license-other #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.001\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.1.0+cu118\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.10731098800897598, 0.020247573032975197, -0.0011501925764605403, 0.09952946752309799, 0.18258105218410492, 0.019899843260645866, 0.14197766780853271, 0.06880845129489899, -0.07539979368448257, 0.0654732882976532, 0.10352106392383575, 0.14995481073856354, 0.007693091407418251, 0.12170831114053726, -0.06012294441461563, -0.19044633209705353, 0.012390398420393467, 0.005794425029307604, -0.03213940188288689, 0.10970030725002289, 0.07656259834766388, -0.16227775812149048, 0.08146169036626816, -0.023331455886363983, -0.22731687128543854, 0.026255464181303978, 0.04149802029132843, -0.038980092853307724, 0.13704417645931244, -0.0032361207995563745, 0.16372010111808777, 0.008096826262772083, 0.12192989885807037, -0.18027588725090027, 0.01775408536195755, 0.08508704602718353, 0.005729480646550655, 0.07071107625961304, 0.08298290520906448, -0.011966169811785221, 0.07928840070962906, -0.10317964851856232, 0.0645148903131485, 0.023818640038371086, -0.13173845410346985, -0.22882616519927979, -0.09263481944799423, 0.00018306841957382858, 0.05777818709611893, 0.08740025013685226, -0.01875525899231434, 0.20229142904281616, -0.06724317371845245, 0.08564864844083786, 0.2486248016357422, -0.2826470732688904, -0.07270044088363647, 0.097996786236763, 0.009793204255402088, 0.10455621778964996, -0.11076220870018005, -0.002599927596747875, 0.07775482535362244, 0.04144465923309326, 0.11785509437322617, -0.010549851693212986, -0.10673020780086517, 0.01408230047672987, -0.15722233057022095, 0.01508664432913065, 0.0658133402466774, 0.041896071285009384, -0.03794048726558685, -0.008960934355854988, -0.06549189239740372, -0.1166626438498497, -0.04508108273148537, -0.03201521933078766, 0.06825130432844162, -0.05552227050065994, -0.07987654209136963, -0.0027976124547421932, -0.1117432489991188, -0.084264375269413, -0.06086358800530434, 0.19226033985614777, 0.05492204800248146, 0.03682304918766022, -0.023436618968844414, 0.10710626095533371, -0.04380256310105324, -0.1175432950258255, 0.030411818996071815, 0.028037695214152336, -0.01973147690296173, -0.05917908623814583, -0.06464149057865143, -0.09551306813955307, 0.04425894096493721, 0.09802785515785217, -0.10616511851549149, 0.03668462112545967, 0.024921687319874763, 0.04591779038310051, -0.09814925491809845, 0.12258336693048477, -0.07046863436698914, 0.02359933964908123, 0.01844693347811699, 0.0632590651512146, 0.04582817852497101, 0.0030048564076423645, -0.08205121755599976, 0.04344278946518898, 0.09610147029161453, 0.009190209209918976, -0.06981543451547623, 0.05093111842870712, -0.053464408963918686, 0.012714113108813763, -0.01389383990317583, -0.09942977130413055, 0.04416569694876671, 0.01510901004076004, -0.06358884274959564, -0.049147192388772964, 0.015913518145680428, 0.021662957966327667, 0.022023538127541542, 0.12912772595882416, -0.09660325944423676, 0.05429480969905853, -0.11173366010189056, -0.11961700022220612, 0.011456115171313286, -0.052740588784217834, 0.019011087715625763, -0.09352441877126694, -0.15478447079658508, -0.017104147002100945, 0.042561355978250504, -0.036530088633298874, 0.011164169758558273, -0.04069611802697182, -0.09572730958461761, -0.020978661254048347, -0.022632991895079613, 0.14343011379241943, -0.06832082569599152, 0.09998293220996857, 0.05349714308977127, 0.05204083397984505, -0.10088338702917099, 0.014053013175725937, -0.09546315670013428, 0.003128138603642583, -0.21910220384597778, -0.0020686720963567495, -0.08549471199512482, 0.04857763275504112, -0.05778030678629875, -0.07577905803918839, -0.013155718334019184, 0.009047908708453178, 0.09565381705760956, 0.08896149694919586, -0.19774229824543, -0.052759043872356415, 0.16558882594108582, -0.11388617008924484, -0.12119785696268082, 0.1027323305606842, -0.05086536332964897, 0.03722916543483734, 0.07409129291772842, 0.20553681254386902, -0.03504788875579834, -0.15671983361244202, -0.017303867265582085, -0.04309452325105667, 0.035209719091653824, -0.06257538497447968, 0.04740002378821373, -0.0022977767512202263, 0.02474159002304077, 0.03413911908864975, -0.05276377499103546, 0.03287747502326965, -0.12802937626838684, -0.07715963572263718, -0.06796331703662872, -0.11597897112369537, 0.01078073401004076, 0.075986348092556, 0.06572207063436508, -0.11798647046089172, -0.05644560232758522, 0.12012219429016113, 0.06993379443883896, -0.057749245315790176, 0.01640896126627922, -0.057071663439273834, 0.07011392712593079, -0.06939218193292618, -0.045832324773073196, -0.182357519865036, -0.09148469567298889, 0.0022525223903357983, -0.0053481957875192165, 0.02317151054739952, -0.013584740459918976, 0.0801224634051323, 0.09147245436906815, -0.06804600358009338, 0.0001458681363146752, -0.049699343740940094, 0.013865687884390354, -0.13415400683879852, -0.22767221927642822, -0.021473100408911705, -0.0258200541138649, 0.09584873169660568, -0.24220877885818481, 0.036988839507102966, -0.03918812796473503, 0.09940944612026215, 0.021153880283236504, -0.03704041615128517, -0.048524972051382065, 0.0914277508854866, -0.013696936890482903, -0.06165506690740585, 0.04322202876210213, -0.016746368259191513, -0.07154333591461182, -0.08528163284063339, -0.12269642949104309, 0.19204950332641602, 0.1459672898054123, -0.10606245696544647, -0.08544403314590454, 0.022761188447475433, -0.06019972264766693, -0.019802739843726158, -0.09056694060564041, 0.03745298087596893, 0.12000949680805206, -0.004142562858760357, 0.11186862736940384, -0.08069632947444916, -0.018193848431110382, 0.01616220921278, -0.04812457412481308, 0.052004389464855194, 0.10317450016736984, 0.13852566480636597, -0.058710359036922455, 0.12881234288215637, 0.12524209916591644, -0.10020441561937332, 0.08871331810951233, -0.05658169463276863, -0.07889915257692337, -0.018863031640648842, 0.0035484270192682743, 0.004008354619145393, 0.17516738176345825, -0.04640744626522064, 0.0312410369515419, -0.0027728902641683817, 0.007403829600661993, 0.027529044076800346, -0.24750156700611115, -0.05586545541882515, -0.007157717365771532, -0.04446270689368248, -0.016549181193113327, -0.034287210553884506, 0.02502400055527687, 0.11172687262296677, -0.047043945640325546, -0.044161606580019, 0.009094903245568275, 0.002249115612357855, -0.07938747853040695, 0.22553448379039764, -0.05682799220085144, -0.06311403214931488, -0.08523669093847275, -0.006556941196322441, -0.051484983414411545, -0.0019351824885234237, 0.04320285841822624, -0.08791743963956833, -0.038172751665115356, -0.09811513870954514, 0.018789656460285187, 0.062441010028123856, 0.03297730162739754, 0.0026910637971013784, 0.010210102424025536, 0.09335979074239731, -0.12360912561416626, 0.005654196720570326, -0.08206713944673538, -0.08160653710365295, 0.04715554788708687, 0.09792907536029816, 0.12479890882968903, 0.13389156758785248, -0.023788588121533394, -0.0031805825419723988, -0.021249279379844666, 0.23918792605400085, -0.056370917707681656, -0.0450822114944458, 0.10105486959218979, -0.01015381794422865, 0.03969110548496246, 0.108365498483181, 0.09301744401454926, -0.12178761512041092, 0.011186392977833748, 0.05395590513944626, -0.036475639790296555, -0.21531915664672852, -0.04458554834127426, -0.011358908377587795, -0.06034199148416519, 0.04400438815355301, 0.038168299943208694, -0.019225724041461945, 0.0586971677839756, 0.04619431123137474, 0.02938687987625599, -0.04483398050069809, 0.04590241611003876, 0.032425954937934875, 0.04591022804379463, 0.11479730904102325, -0.05906260758638382, -0.04494871571660042, 0.033549096435308456, -0.029194818809628487, 0.2503502070903778, -0.0010716235265135765, 0.05203591287136078, 0.09157856553792953, 0.19399994611740112, -0.018966030329465866, 0.06777488440275192, 0.019104326143860817, -0.07446775585412979, 0.010004948824644089, -0.06454076617956161, 0.015592508018016815, 0.011102957651019096, -0.11729791760444641, 0.06690976023674011, -0.09145507216453552, -0.002090613590553403, 0.07312295585870743, 0.1921878606081009, 0.023400472477078438, -0.3214004337787628, -0.05503225699067116, -0.0031660657841712236, 0.0072608450427651405, -0.0019609720911830664, -0.0017467314610257745, 0.14892417192459106, -0.029789112508296967, 0.03783094882965088, -0.0654568076133728, 0.06746566295623779, 0.01267344132065773, 0.03797069191932678, 0.042437344789505005, 0.14312848448753357, -0.01216211449354887, 0.04172387719154358, -0.2823622524738312, 0.2937917709350586, 0.025187822058796883, 0.13051727414131165, -0.040886636823415756, -0.029046233743429184, 0.011796130798757076, 0.05140708386898041, 0.05369013920426369, -0.02198820933699608, -0.05838979408144951, -0.21490928530693054, -0.06314954906702042, 0.054743532091379166, 0.11914399266242981, 0.024171408265829086, 0.09934389591217041, 0.006403324194252491, 0.018168048933148384, 0.09233696758747101, -0.02932804264128208, -0.1431255340576172, -0.04645998030900955, -0.05429061874747276, 0.02442675456404686, -0.09204640239477158, -0.08559838682413101, -0.11274562031030655, -0.1498657464981079, 0.10838790237903595, 0.0027796318754553795, -0.013628106564283371, -0.10810727626085281, 0.12497466057538986, 0.07493457943201065, -0.05847477167844772, 0.03759916126728058, 0.031439222395420074, 0.04731115326285362, 0.028513625264167786, -0.05322284996509552, 0.10837499052286148, -0.05542590096592903, -0.1700642853975296, -0.06002761051058769, 0.07110617309808731, 0.04831402003765106, 0.042727064341306686, -0.007542378269135952, 0.020733516663312912, 0.0007375059649348259, -0.09964769333600998, 0.04254331439733505, -0.011049486696720123, 0.06584486365318298, 0.02865239605307579, -0.05880163609981537, -0.0297914519906044, -0.04595183953642845, -0.046494260430336, 0.11188259720802307, 0.3127846419811249, -0.07900865375995636, -0.02115778997540474, 0.046937234699726105, -0.06434740871191025, -0.1933605670928955, 0.10223280638456345, 0.04960576817393303, -0.0034610501024872065, 0.1147380843758583, -0.12187863141298294, 0.16183559596538544, 0.14580193161964417, -0.028336456045508385, 0.1232057511806488, -0.3010331094264984, -0.1464628130197525, 0.09446770697832108, 0.22146688401699066, 0.12086773663759232, -0.18105848133563995, -0.029549546539783478, -0.013264995068311691, -0.10878394544124603, 0.10930527746677399, -0.1877981275320053, 0.09561428427696228, 0.00284047843888402, 0.060252804309129715, 0.005119001027196646, -0.07014641165733337, 0.13135093450546265, -0.007246758323162794, 0.1488109976053238, -0.045176535844802856, -0.005983706563711166, 0.0907500684261322, -0.018036440014839172, 0.018722664564847946, -0.04005271941423416, 0.029650308191776276, 0.012531138956546783, -0.019796589389443398, -0.08034593611955643, 0.05392947047948837, -0.03896529972553253, -0.05811341851949692, -0.04130248725414276, 0.02502019889652729, 0.005104484036564827, -0.03031863458454609, 0.1272273063659668, 0.021266039460897446, 0.18023109436035156, 0.09610436111688614, 0.030674362555146217, -0.0715644583106041, -0.021970046684145927, 0.012454203329980373, -0.03504033014178276, 0.03685160353779793, -0.1292978674173355, 0.011457407847046852, 0.120403952896595, 0.020057855173945427, 0.09964693337678909, 0.07807008177042007, -0.056046076118946075, 0.029103167355060577, 0.0814029797911644, -0.16533522307872772, -0.148531973361969, 0.040314316749572754, -0.05655454844236374, -0.08914275467395782, 0.10356175899505615, 0.0813094899058342, -0.07471440732479095, -0.002040589228272438, -0.028499362990260124, 0.006831343285739422, -0.05591243878006935, 0.21402607858181, 0.09050130099058151, 0.038060810416936874, -0.09941645711660385, 0.09934582561254501, 0.038867197930812836, -0.05313597619533539, -0.01120005827397108, 0.030473671853542328, -0.06855752319097519, -0.012428550980985165, 0.1293959617614746, 0.2180340737104416, -0.03365979343652725, -0.053116604685783386, -0.1740293949842453, -0.10989946126937866, 0.022258425131440163, 0.18887513875961304, 0.10078004747629166, -0.004447576589882374, 0.013867184519767761, 0.02373574674129486, -0.13395999372005463, 0.08284574747085571, 0.03039325773715973, 0.090593621134758, -0.15322381258010864, 0.17043651640415192, 0.01199235487729311, 0.00003804933658102527, -0.03292190283536911, 0.06970558315515518, -0.13079267740249634, 0.01640939712524414, -0.11748530715703964, -0.025041794404387474, -0.004656869452446699, -0.021739276126027107, 0.0164331141859293, -0.07859237492084503, -0.07995611429214478, 0.03047286532819271, -0.11686324328184128, -0.00926453061401844, 0.05674739554524422, 0.038655150681734085, -0.1488042175769806, -0.03367834910750389, 0.01181996613740921, -0.042945198714733124, 0.03351512923836708, 0.040870506316423416, 0.02406417578458786, 0.08992462605237961, -0.23739854991436005, 0.01228876318782568, 0.08828920871019363, -0.001387268421240151, 0.07128608971834183, -0.03215329349040985, -0.03440573811531067, 0.008473781868815422, 0.10048520565032959, 0.021904373541474342, 0.09000344574451447, -0.12941482663154602, 0.005831340327858925, -0.04365542158484459, -0.06424580514431, -0.03310304507613182, -0.0029618586413562298, 0.08294542878866196, 0.006815907079726458, 0.18583270907402039, -0.09880050271749496, 0.007051314692944288, -0.23281671106815338, -0.004417621996253729, -0.016225911676883698, -0.08567346632480621, -0.11142326891422272, -0.021378956735134125, 0.07458261400461197, -0.04701299965381622, 0.17041712999343872, 0.01803385466337204, 0.0388539582490921, 0.03250947594642639, -0.019661255180835724, 0.017956653609871864, 0.040119096636772156, 0.2295590341091156, 0.02068616822361946, -0.018985843285918236, 0.0485721118748188, 0.055698469281196594, 0.10136166960000992, 0.05113033205270767, 0.23172704875469208, 0.199626624584198, -0.0557982474565506, 0.09505191445350647, 0.06317370384931564, -0.06793272495269775, -0.0895591601729393, 0.05232236534357071, -0.06590237468481064, 0.051470961421728134, -0.03240435570478439, 0.2016208916902542, 0.0768766850233078, -0.15623992681503296, 0.023408927023410797, -0.058719996362924576, -0.08104369789361954, -0.10913016647100449, 0.000349942478351295, -0.08935648202896118, -0.1531326025724411, 0.023184441030025482, -0.10752879083156586, 0.007770317606627941, 0.15210795402526855, 0.011017168872058392, -0.006110824178904295, 0.20979227125644684, 0.06292838603258133, 0.06385549902915955, 0.014384659938514233, 0.00330394902266562, -0.04655291885137558, -0.08513469249010086, -0.08470244705677032, 0.006668993271887302, -0.01833643950521946, 0.03386325761675835, -0.05500199273228645, -0.07406274229288101, 0.049780044704675674, -0.0073914737440645695, -0.09680088609457016, 0.02658226154744625, 0.022061554715037346, 0.04660874605178833, 0.03219238296151161, 0.019564781337976456, 0.015125950798392296, -0.023880908265709877, 0.2234519124031067, -0.0589069202542305, -0.08924167603254318, -0.07290536910295486, 0.23682455718517303, 0.02476031519472599, 0.003924635238945484, 0.010282296687364578, -0.10507960617542267, 0.024184593930840492, 0.1884729117155075, 0.17356280982494354, -0.11001215130090714, -0.004022573586553335, -0.028118720278143883, -0.018886299803853035, -0.07910890132188797, 0.12406189739704132, 0.09669472277164459, 0.03451858460903168, -0.10281296819448471, -0.030478747561573982, -0.049811363220214844, 0.003969170153141022, -0.06701507419347763, 0.024934563785791397, 0.04151995852589607, 0.021222276613116264, -0.07201831787824631, 0.0759458839893341, -0.04374245926737785, -0.13653326034545898, 0.0737820565700531, -0.18236342072486877, -0.1610102355480194, -0.02119976282119751, 0.1312112659215927, -0.029747065156698227, 0.05350438877940178, -0.05060344189405441, 0.019139552488923073, 0.03255767375230789, -0.03129858151078224, -0.06340131908655167, -0.09871070832014084, 0.06140747293829918, -0.1236056312918663, 0.24217477440834045, -0.033363018184900284, 0.09339940547943115, 0.11264017224311829, 0.024752218276262283, -0.08611578494310379, 0.1037403866648674, 0.024223631247878075, -0.11961162835359573, 0.01224056351929903, 0.0831223875284195, -0.044036611914634705, 0.05767953395843506, 0.03236481547355652, -0.11485770344734192, 0.02329002320766449, -0.03477729856967926, -0.07813165336847305, -0.04914795234799385, -0.05898761376738548, -0.06831903755664825, 0.10780978947877884, 0.16361430287361145, -0.020880160853266716, 0.05295007303357124, -0.06647442281246185, 0.053031351417303085, 0.06561663001775742, 0.06320148706436157, -0.034582387655973434, -0.27586135268211365, 0.06158045306801796, 0.11199527978897095, -0.041019320487976074, -0.24626222252845764, -0.07116765528917313, 0.01650382950901985, -0.07116223126649857, -0.0873703882098198, 0.0627821534872055, 0.14557448029518127, 0.06816838681697845, -0.05911614000797272, -0.16699181497097015, -0.0702015832066536, 0.1612865924835205, -0.12124206125736237, -0.10991640388965607 ]
null
null
transformers
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fine-tuned-led-base-book-summary This model is a fine-tuned version of [pszemraj/led-base-book-summary](https://huggingface.co/pszemraj/led-base-book-summary) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 2.5918 - Rouge2 Precision: 0.0778 - Rouge2 Recall: 0.1291 - Rouge2 Fmeasure: 0.0958 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-06 - train_batch_size: 1 - eval_batch_size: 1 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 4 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 2 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Rouge2 Precision | Rouge2 Recall | Rouge2 Fmeasure | |:-------------:|:-----:|:----:|:---------------:|:----------------:|:-------------:|:---------------:| | 3.1612 | 0.4 | 150 | 2.7501 | 0.0605 | 0.1088 | 0.0764 | | 2.9645 | 0.8 | 300 | 2.6528 | 0.0732 | 0.1251 | 0.0909 | | 2.6754 | 1.19 | 450 | 2.6192 | 0.0752 | 0.1216 | 0.0917 | | 2.8581 | 1.59 | 600 | 2.5968 | 0.0763 | 0.1239 | 0.0933 | | 2.7604 | 1.99 | 750 | 2.5918 | 0.0778 | 0.1291 | 0.0958 | ### Framework versions - Transformers 4.35.0 - Pytorch 2.0.1 - Datasets 2.14.6 - Tokenizers 0.14.1
{"license": "bsd-3-clause", "tags": ["generated_from_trainer"], "base_model": "pszemraj/led-base-book-summary", "model-index": [{"name": "fine-tuned-led-base-book-summary", "results": []}]}
text2text-generation
Narya-ai/fine-tuned-led-base-book-summary
[ "transformers", "safetensors", "led", "text2text-generation", "generated_from_trainer", "base_model:pszemraj/led-base-book-summary", "license:bsd-3-clause", "autotrain_compatible", "endpoints_compatible", "region:us" ]
2023-11-12T19:01:34+00:00
[]
[]
TAGS #transformers #safetensors #led #text2text-generation #generated_from_trainer #base_model-pszemraj/led-base-book-summary #license-bsd-3-clause #autotrain_compatible #endpoints_compatible #region-us
fine-tuned-led-base-book-summary ================================ This model is a fine-tuned version of pszemraj/led-base-book-summary on an unknown dataset. It achieves the following results on the evaluation set: * Loss: 2.5918 * Rouge2 Precision: 0.0778 * Rouge2 Recall: 0.1291 * Rouge2 Fmeasure: 0.0958 Model description ----------------- More information needed Intended uses & limitations --------------------------- More information needed Training and evaluation data ---------------------------- More information needed Training procedure ------------------ ### Training hyperparameters The following hyperparameters were used during training: * learning\_rate: 2e-06 * train\_batch\_size: 1 * eval\_batch\_size: 1 * seed: 42 * gradient\_accumulation\_steps: 4 * total\_train\_batch\_size: 4 * optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 * lr\_scheduler\_type: linear * num\_epochs: 2 * mixed\_precision\_training: Native AMP ### Training results ### Framework versions * Transformers 4.35.0 * Pytorch 2.0.1 * Datasets 2.14.6 * Tokenizers 0.14.1
[ "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-06\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 4\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.0.1\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ "TAGS\n#transformers #safetensors #led #text2text-generation #generated_from_trainer #base_model-pszemraj/led-base-book-summary #license-bsd-3-clause #autotrain_compatible #endpoints_compatible #region-us \n", "### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-06\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 4\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP", "### Training results", "### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.0.1\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ 73, 141, 4, 30 ]
[ "passage: TAGS\n#transformers #safetensors #led #text2text-generation #generated_from_trainer #base_model-pszemraj/led-base-book-summary #license-bsd-3-clause #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-06\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 4\n* total\\_train\\_batch\\_size: 4\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.35.0\n* Pytorch 2.0.1\n* Datasets 2.14.6\n* Tokenizers 0.14.1" ]
[ -0.1295376718044281, 0.054822031408548355, -0.0012364510912448168, 0.06101413071155548, 0.14621852338314056, -0.0010982821695506573, 0.11427124589681625, 0.1317196488380432, -0.13666890561580658, 0.06607465445995331, 0.10685636103153229, 0.08442993462085724, 0.038446132093667984, 0.15786658227443695, -0.04421344771981239, -0.3027576208114624, 0.014460373669862747, 0.024503879249095917, -0.14643222093582153, 0.13060222566127777, 0.11798829585313797, -0.11550328135490417, 0.0635453388094902, 0.026369353756308556, -0.15327973663806915, 0.02370024472475052, -0.0038814435247331858, -0.06572552025318146, 0.11458121985197067, 0.043732352554798126, 0.11421724408864975, 0.03174674138426781, 0.08542044460773468, -0.19600369036197662, 0.00961544644087553, 0.06982087343931198, 0.03333275020122528, 0.08973008394241333, 0.07980011403560638, -0.014999452978372574, 0.1418909877538681, -0.0599915012717247, 0.07436398416757584, 0.061383094638586044, -0.1211223378777504, -0.31634721159935, -0.10300657153129578, 0.08071184903383255, 0.12800630927085876, 0.07843073457479477, -0.023348931223154068, 0.09152748435735703, -0.06065773218870163, 0.08027824759483337, 0.22365452349185944, -0.2677159309387207, -0.08902276307344437, -0.005658014211803675, 0.0685516744852066, 0.03255269676446915, -0.11804424971342087, -0.016368050128221512, 0.06323620676994324, 0.02559368498623371, 0.11264801770448685, 0.009268430061638355, 0.015824537724256516, 0.008940013125538826, -0.15070635080337524, -0.03350439295172691, 0.143514022231102, 0.07521238923072815, -0.061831407248973846, -0.08160606026649475, -0.03580299764871597, -0.1790066808462143, -0.03930675983428955, -0.0010388527298346162, 0.032343193888664246, -0.05060283839702606, -0.1297573745250702, 0.010230405256152153, -0.09641251713037491, -0.10023956745862961, 0.004885418340563774, 0.1813260018825531, 0.04379982873797417, -0.006496673449873924, -0.014255686663091183, 0.13621096312999725, 0.023823320865631104, -0.14887632429599762, -0.019565019756555557, 0.01208321563899517, -0.04732357710599899, -0.03566424548625946, -0.03933953866362572, -0.022225702181458473, -0.005875181872397661, 0.17546190321445465, -0.08561572432518005, 0.03671718388795853, 0.00902225449681282, 0.024132773280143738, -0.10395686328411102, 0.1738208830356598, -0.06125838682055473, -0.023695647716522217, -0.018759360536932945, 0.11398373544216156, 0.019464243203401566, 0.0037602961529046297, -0.06805657595396042, 0.029480239376425743, 0.10061664879322052, 0.04562400281429291, -0.0389048233628273, 0.034450188279151917, -0.053116995841264725, -0.017739754170179367, 0.07639282941818237, -0.09036209434270859, 0.021337948739528656, 0.007114394102245569, -0.10209780186414719, -0.030712932348251343, 0.01991012878715992, 0.02049208991229534, 0.020325927063822746, 0.13342343270778656, -0.09147541970014572, -0.01727193407714367, -0.11460492014884949, -0.11161309480667114, 0.02373678795993328, -0.001355490181595087, -0.0028564934618771076, -0.0901583656668663, -0.17413027584552765, -0.03160511702299118, 0.04349445551633835, -0.0421329028904438, -0.05804840102791786, -0.035498443990945816, -0.0804574266076088, 0.04185635223984718, -0.02706780470907688, 0.16574935615062714, -0.049217429012060165, 0.12490104883909225, 0.09108936786651611, 0.04814670607447624, 0.016569040715694427, 0.04319199174642563, -0.08454468101263046, 0.04680663347244263, -0.18643052875995636, 0.048953451216220856, -0.07142779976129532, 0.0813634842634201, -0.12238696962594986, -0.11888314038515091, -0.009119528345763683, 0.0038386303931474686, 0.09825651347637177, 0.11990628391504288, -0.11986837536096573, -0.10641612857580185, 0.15823513269424438, -0.10627401620149612, -0.1650986522436142, 0.10549239069223404, -0.021312424913048744, 0.04317736253142357, 0.04548642784357071, 0.11996520310640335, 0.06882037222385406, -0.08509577810764313, -0.028395531699061394, -0.06363092362880707, 0.10950548946857452, -0.0036295854952186346, 0.1088872179389, -0.028528526425361633, -0.006506243720650673, 0.01272730715572834, -0.046325381845235825, 0.042961157858371735, -0.11458306014537811, -0.08613976836204529, -0.029082566499710083, -0.09143021702766418, 0.05204332619905472, 0.061946045607328415, 0.069286048412323, -0.11546649038791656, -0.1268150359392166, 0.08689621090888977, 0.1158510148525238, -0.06456054002046585, 0.027134986594319344, -0.07075905799865723, 0.05410289764404297, -0.0683208554983139, -0.009362511336803436, -0.1702776998281479, -0.05954170972108841, 0.019153539091348648, -0.03889571875333786, 0.002473029773682356, -0.06852950155735016, 0.07865753769874573, 0.06885659694671631, -0.08937573432922363, -0.07443863898515701, -0.08500038087368011, -0.01937001384794712, -0.0945906713604927, -0.22576764225959778, -0.0774046927690506, -0.019663726910948753, 0.14941014349460602, -0.2383120059967041, 0.044080231338739395, 0.01000143587589264, 0.1364850103855133, 0.051361083984375, -0.030330516397953033, -0.023561015725135803, 0.07369611412286758, -0.03636937215924263, -0.05623277276754379, 0.03405212610960007, 0.007431669160723686, -0.12379308044910431, -0.004004464019089937, -0.12639960646629333, 0.15228909254074097, 0.108795166015625, -0.02856661006808281, -0.09415365755558014, -0.041835907846689224, -0.09521164745092392, -0.04780600219964981, -0.03981148079037666, -0.018866177648305893, 0.08295958489179611, 0.026862116530537605, 0.14801491796970367, -0.08499633520841599, -0.066658616065979, 0.029634110629558563, -0.013661004602909088, 0.003368969075381756, 0.12107693403959274, 0.05972782149910927, -0.07165791094303131, 0.12046273797750473, 0.1232708990573883, -0.0625002309679985, 0.1561596542596817, -0.0685093030333519, -0.10235670208930969, -0.01799740456044674, 0.018062923103570938, 0.040624089539051056, 0.13927523791790009, -0.102140411734581, -0.0028814009856432676, 0.018176397308707237, 0.019557040184736252, 0.020819850265979767, -0.20612189173698425, -0.013474258594214916, 0.03496008366346359, -0.050019215792417526, -0.03590775653719902, -0.0017272393452003598, -0.007905931212008, 0.09820888936519623, 0.020895253866910934, -0.01932312734425068, 0.01350428443402052, 0.0031591616570949554, -0.08464041352272034, 0.21265533566474915, -0.07449331879615784, -0.12725865840911865, -0.16448760032653809, -0.010304042138159275, -0.05195772647857666, -0.0006181049975566566, 0.051746584475040436, -0.10657462477684021, -0.027876781299710274, -0.06800606101751328, 0.055411674082279205, -0.030063919723033905, 0.032680779695510864, -0.005050229839980602, 0.026813901960849762, 0.09968878328800201, -0.1273731142282486, 0.02545744925737381, -0.01635970175266266, -0.06775165349245071, 0.017949869856238365, 0.034574393182992935, 0.10732581466436386, 0.1502733826637268, 0.009363537654280663, 0.016671039164066315, -0.0520721934735775, 0.15992991626262665, -0.09888311475515366, -0.03608518838882446, 0.14894257485866547, 0.023441854864358902, 0.043741244822740555, 0.13175074756145477, 0.05945771560072899, -0.09074829518795013, 0.04837212339043617, 0.06444830447435379, -0.0159110426902771, -0.24087007343769073, -0.01239178515970707, -0.041217923164367676, 0.011764327995479107, 0.10554546862840652, 0.04087819531559944, 0.025594916194677353, 0.052658144384622574, -0.03801042586565018, 0.025266749784350395, -0.012933078221976757, 0.1004675030708313, 0.07882001250982285, 0.048379674553871155, 0.14720334112644196, -0.04424877092242241, -0.031871672719717026, 0.03380599990487099, -0.022379623726010323, 0.21383506059646606, -0.01582418940961361, 0.11722065508365631, 0.06992270052433014, 0.13511276245117188, 0.00422027288004756, 0.0815517008304596, 0.012756573036313057, -0.04374469816684723, 0.022060686722397804, -0.067497618496418, -0.008634828962385654, 0.05130351707339287, -0.03854566812515259, 0.09383370727300644, -0.16788041591644287, -0.02350538969039917, 0.04644034057855606, 0.303024560213089, 0.07827669382095337, -0.3277309536933899, -0.1405884176492691, 0.017925769090652466, -0.0628882572054863, -0.042354974895715714, 0.012007039971649647, 0.07841222733259201, -0.09482678025960922, 0.07139114290475845, -0.08542193472385406, 0.10454852879047394, -0.007435179315507412, -0.0020268328953534365, 0.08012707531452179, 0.0908798798918724, -0.02637219801545143, 0.062347155064344406, -0.2727431058883667, 0.29901668429374695, 0.005910784471780062, 0.09667874127626419, -0.0166914202272892, 0.03149840608239174, 0.036925021559000015, 0.039860956370830536, 0.05181863158941269, -0.026311205700039864, -0.0660492554306984, -0.19516053795814514, -0.05992117524147034, 0.02514447458088398, 0.1278890073299408, -0.0901653990149498, 0.13800543546676636, -0.03152265399694443, -0.0079601826146245, 0.0603749118745327, -0.045403722673654556, -0.13239914178848267, -0.07320607453584671, 0.012262229807674885, -0.0118499044328928, 0.07947113364934921, -0.13094481825828552, -0.11852982640266418, -0.037901539355516434, 0.1678249090909958, -0.06581147760152817, -0.039330706000328064, -0.13341759145259857, 0.10726568847894669, 0.13780620694160461, -0.07249576598405838, 0.049721211194992065, -0.0012735965428873897, 0.12839362025260925, 0.02294461987912655, -0.02308545634150505, 0.11064400523900986, -0.07728413492441177, -0.24275562167167664, -0.059797342866659164, 0.15076924860477448, 0.02300884760916233, 0.046960778534412384, -0.026089074090123177, 0.025540931150317192, -0.01829308643937111, -0.08198504149913788, 0.04623204097151756, -0.018370937556028366, 0.040126219391822815, 0.036521248519420624, -0.048490770161151886, 0.040240395814180374, -0.04526340216398239, -0.07036395370960236, 0.09822341799736023, 0.2990491986274719, -0.07437390089035034, -0.011466024443507195, 0.06983133405447006, -0.04250532388687134, -0.14980269968509674, 0.05391012504696846, 0.11739499121904373, 0.030422702431678772, -0.00022538304619956762, -0.17968937754631042, 0.07154679298400879, 0.10314363241195679, -0.04501355066895485, 0.11293282359838486, -0.27782005071640015, -0.1492605060338974, 0.08324301242828369, 0.12492472678422928, 0.002379972953349352, -0.19044189155101776, -0.05900249257683754, -0.03173138201236725, -0.11378548294305801, 0.08842720091342926, -0.07538629323244095, 0.10407775640487671, -0.01623726636171341, 0.05791176110506058, 0.0060698529705405235, -0.05113731324672699, 0.14471550285816193, -0.03793691098690033, 0.07875190675258636, -0.009294652380049229, 0.03337378427386284, 0.07489971816539764, -0.06999494880437851, 0.017139790579676628, -0.05854850262403488, 0.035295017063617706, -0.12146058678627014, -0.030519327148795128, -0.09222474694252014, 0.049972593784332275, -0.05874568223953247, -0.05977925658226013, -0.027702219784259796, 0.05078298971056938, 0.00635371170938015, -0.021152649074792862, 0.15696381032466888, -0.035025209188461304, 0.21265971660614014, 0.10120052099227905, 0.08342725038528442, -0.021164098754525185, -0.03829256072640419, 0.0066258227452635765, -0.03378636762499809, 0.06856667250394821, -0.17664378881454468, 0.02597198635339737, 0.1305871605873108, 0.04871835187077522, 0.14477640390396118, 0.0756184533238411, -0.06279753148555756, 0.03401599079370499, 0.10232940316200256, -0.09681852161884308, -0.09481388330459595, -0.030013853684067726, 0.02104075625538826, -0.1757616102695465, 0.07736390084028244, 0.10520856827497482, -0.07104647159576416, -0.019389165565371513, 0.00968225672841072, 0.002973840106278658, -0.047798752784729004, 0.21818293631076813, 0.05957632511854172, 0.09176740795373917, -0.08202385157346725, 0.09018504619598389, 0.0354766771197319, -0.1362931728363037, -0.009778805077075958, 0.08727508783340454, -0.04951121285557747, -0.009093474596738815, 0.004078399855643511, 0.10388477891683578, -0.06611239165067673, -0.05419968068599701, -0.15961311757564545, -0.1305411159992218, 0.06988851726055145, 0.13512824475765228, 0.057304076850414276, 0.0317537747323513, -0.015956394374370575, 0.06447644531726837, -0.13084940612316132, 0.11828909069299698, 0.08378029614686966, 0.09952165186405182, -0.1481826901435852, 0.16245585680007935, 0.0029085553251206875, 0.0074134706519544125, -0.005499823484569788, 0.01781925931572914, -0.10403185337781906, 0.0029844066593796015, -0.1425415426492691, -0.06860046833753586, -0.045127786695957184, -0.014413722790777683, -0.0010858937166631222, -0.052559833973646164, -0.0735553428530693, 0.0205240398645401, -0.11611462384462357, -0.045928630977869034, 0.017211798578500748, 0.055607981979846954, -0.1289893239736557, -0.011483226902782917, 0.036881059408187866, -0.10613878816366196, 0.07190325111150742, 0.037330660969018936, 0.054452259093523026, 0.0438997820019722, -0.08375506848096848, 0.030507614836096764, 0.031453728675842285, -0.03325236961245537, 0.03227836266160011, -0.1264733076095581, 0.0031908389646559954, -0.026593049988150597, 0.050363074988126755, 0.005971330683678389, 0.018086273223161697, -0.14689593017101288, -0.022077297791838646, -0.01339088287204504, -0.04417116567492485, -0.05037955939769745, 0.04978154972195625, 0.042984843254089355, 0.0431872233748436, 0.16810324788093567, -0.0893329456448555, 0.015465982258319855, -0.238682359457016, 0.006774191744625568, -0.031558167189359665, -0.08795870095491409, -0.09444190561771393, -0.026388904079794884, 0.06795058399438858, -0.06463839113712311, 0.09235329180955887, -0.04722364991903305, 0.10386158525943756, 0.05573102459311485, -0.09139375388622284, 0.05590864270925522, 0.038073260337114334, 0.24072019755840302, 0.0461607426404953, -0.009950783103704453, 0.06102209910750389, 0.033829476684331894, 0.07007680088281631, 0.09074568748474121, 0.178373783826828, 0.13809403777122498, -0.027298597618937492, 0.10345055907964706, 0.04971221834421158, -0.08231368660926819, -0.15372927486896515, 0.027088390663266182, -0.005725933238863945, 0.10616645961999893, -0.01808753050863743, 0.18573826551437378, 0.1533297896385193, -0.1806911826133728, 0.03019719012081623, -0.0321701355278492, -0.06764023005962372, -0.1027471274137497, -0.01414502039551735, -0.07693963497877121, -0.19913946092128754, 0.014793803915381432, -0.1277550756931305, 0.030746299773454666, 0.04765079542994499, 0.021732080727815628, 0.015373867936432362, 0.15137559175491333, 0.05277237668633461, 0.02299010008573532, 0.09764691442251205, 0.004559023771435022, -0.005655092187225819, -0.026071935892105103, -0.08877156674861908, 0.025891011580824852, -0.043154992163181305, 0.034315578639507294, -0.059542570263147354, -0.12620341777801514, 0.056374065577983856, 0.031230786815285683, -0.11079853773117065, 0.019692756235599518, 0.026467079296708107, 0.08398599177598953, 0.07287473976612091, 0.0026884230319410563, 0.012327832169830799, -0.02412593550980091, 0.2841029167175293, -0.11165864020586014, -0.055553138256073, -0.13707973062992096, 0.30153539776802063, 0.025425976142287254, -0.0183135773986578, 0.02221916988492012, -0.09900232404470444, -0.006549081765115261, 0.1651688814163208, 0.14124806225299835, -0.005678236950188875, -0.023519711568951607, 0.008686940185725689, -0.020562903955578804, -0.05034719780087471, 0.08678364753723145, 0.09545981884002686, 0.0760229155421257, -0.07420412451028824, -0.028683949261903763, -0.036021750420331955, -0.04403103142976761, -0.0037911611143499613, 0.09484256058931351, 0.03087458200752735, -0.01074182242155075, -0.038313087075948715, 0.09471669793128967, -0.019916502758860588, -0.1093137189745903, 0.050401244312524796, -0.16455094516277313, -0.16944892704486847, -0.04458179697394371, 0.055359482765197754, 0.0006047903443686664, 0.06809093803167343, -0.0064804391004145145, -0.02229914255440235, 0.10041355341672897, -0.008354611694812775, -0.037391383200883865, -0.15135815739631653, 0.10316683351993561, -0.05976064130663872, 0.21434132754802704, -0.052367713302373886, 0.005464124958962202, 0.13484342396259308, 0.028204312548041344, -0.10287025570869446, 0.0422784760594368, 0.06731381267309189, -0.12007305771112442, 0.04309909790754318, 0.17220377922058105, -0.029813596978783607, 0.11740511655807495, 0.027345024049282074, -0.1613927185535431, 0.018947619944810867, -0.08169922232627869, -0.0799318179488182, -0.08417899906635284, -0.006403014063835144, -0.042490944266319275, 0.12112109363079071, 0.23065033555030823, -0.06903618574142456, 0.0026958894450217485, -0.06978590786457062, 0.04094821214675903, 0.07170494645833969, 0.10409322381019592, -0.011171834543347359, -0.2815094292163849, 0.026125360280275345, 0.08266084641218185, -0.022866325452923775, -0.3119944930076599, -0.07745048403739929, 0.04003302380442619, -0.05688300356268883, -0.06786607205867767, 0.11078821122646332, 0.08278689533472061, 0.048420682549476624, -0.0517829954624176, -0.12619666755199432, -0.07059711217880249, 0.18830254673957825, -0.1652289777994156, -0.07608591020107269 ]
null
null
peft
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed] ## Training procedure The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True - load_in_4bit: False - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: fp4 - bnb_4bit_use_double_quant: False - bnb_4bit_compute_dtype: float32 ### Framework versions - PEFT 0.6.2.dev0
{"library_name": "peft", "base_model": "meta-llama/Llama-2-7b-hf"}
null
Mavitu56/LLamaEmergency
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:meta-llama/Llama-2-7b-hf", "region:us" ]
2023-11-12T19:04:51+00:00
[ "1910.09700" ]
[]
TAGS #peft #safetensors #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us
# Model Card for Model ID ## Model Details ### Model Description - Developed by: - Shared by [optional]: - Model type: - Language(s) (NLP): - License: - Finetuned from model [optional]: ### Model Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Downstream Use [optional] ### Out-of-Scope Use ## Bias, Risks, and Limitations ### Recommendations Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. ## Training Details ### Training Data ### Training Procedure #### Preprocessing [optional] #### Training Hyperparameters - Training regime: #### Speeds, Sizes, Times [optional] ## Evaluation ### Testing Data, Factors & Metrics #### Testing Data #### Factors #### Metrics ### Results #### Summary ## Model Examination [optional] ## Environmental Impact Carbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019). - Hardware Type: - Hours used: - Cloud Provider: - Compute Region: - Carbon Emitted: ## Technical Specifications [optional] ### Model Architecture and Objective ### Compute Infrastructure #### Hardware #### Software [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Model Card Authors [optional] ## Model Card Contact ## Training procedure The following 'bitsandbytes' quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True - load_in_4bit: False - llm_int8_threshold: 6.0 - llm_int8_skip_modules: None - llm_int8_enable_fp32_cpu_offload: False - llm_int8_has_fp16_weight: False - bnb_4bit_quant_type: fp4 - bnb_4bit_use_double_quant: False - bnb_4bit_compute_dtype: float32 ### Framework versions - PEFT 0.6.2.dev0
[ "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: True\n- load_in_4bit: False\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: fp4\n- bnb_4bit_use_double_quant: False\n- bnb_4bit_compute_dtype: float32", "### Framework versions\n\n\n- PEFT 0.6.2.dev0" ]
[ "TAGS\n#peft #safetensors #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us \n", "# Model Card for Model ID", "## Model Details", "### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:", "### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Downstream Use [optional]", "### Out-of-Scope Use", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", "## How to Get Started with the Model\n\nUse the code below to get started with the model.", "## Training Details", "### Training Data", "### Training Procedure", "#### Preprocessing [optional]", "#### Training Hyperparameters\n\n- Training regime:", "#### Speeds, Sizes, Times [optional]", "## Evaluation", "### Testing Data, Factors & Metrics", "#### Testing Data", "#### Factors", "#### Metrics", "### Results", "#### Summary", "## Model Examination [optional]", "## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:", "## Technical Specifications [optional]", "### Model Architecture and Objective", "### Compute Infrastructure", "#### Hardware", "#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Model Card Authors [optional]", "## Model Card Contact", "## Training procedure\n\n\nThe following 'bitsandbytes' quantization config was used during training:\n- quant_method: bitsandbytes\n- load_in_8bit: True\n- load_in_4bit: False\n- llm_int8_threshold: 6.0\n- llm_int8_skip_modules: None\n- llm_int8_enable_fp32_cpu_offload: False\n- llm_int8_has_fp16_weight: False\n- bnb_4bit_quant_type: fp4\n- bnb_4bit_use_double_quant: False\n- bnb_4bit_compute_dtype: float32", "### Framework versions\n\n\n- PEFT 0.6.2.dev0" ]
[ 41, 6, 3, 45, 28, 3, 4, 9, 9, 10, 42, 20, 3, 4, 5, 9, 11, 13, 3, 12, 5, 4, 5, 3, 4, 9, 53, 9, 8, 6, 3, 14, 8, 7, 9, 4, 164, 14 ]
[ "passage: TAGS\n#peft #safetensors #arxiv-1910.09700 #base_model-meta-llama/Llama-2-7b-hf #region-us \n# Model Card for Model ID## Model Details### Model Description\n\n\n\n\n\n- Developed by: \n- Shared by [optional]: \n- Model type: \n- Language(s) (NLP): \n- License: \n- Finetuned from model [optional]:### Model Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Downstream Use [optional]### Out-of-Scope Use## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.## How to Get Started with the Model\n\nUse the code below to get started with the model.## Training Details### Training Data### Training Procedure#### Preprocessing [optional]#### Training Hyperparameters\n\n- Training regime:#### Speeds, Sizes, Times [optional]## Evaluation### Testing Data, Factors & Metrics#### Testing Data#### Factors#### Metrics### Results#### Summary## Model Examination [optional]## Environmental Impact\n\n\n\nCarbon emissions can be estimated using the Machine Learning Impact calculator presented in Lacoste et al. (2019).\n\n- Hardware Type: \n- Hours used: \n- Cloud Provider: \n- Compute Region: \n- Carbon Emitted:## Technical Specifications [optional]### Model Architecture and Objective### Compute Infrastructure#### Hardware#### Software\n\n\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Model Card Authors [optional]## Model Card Contact" ]
[ -0.09958921372890472, 0.17822016775608063, -0.00342088402248919, 0.03716764226555824, 0.08536183089017868, 0.02169986627995968, 0.05467161908745766, 0.12298179417848587, -0.04951082170009613, 0.09634580463171005, 0.06148029491305351, 0.10814239829778671, 0.09265368431806564, 0.18860748410224915, -0.003615743713453412, -0.18746799230575562, 0.016973497346043587, -0.10058911144733429, -0.0039013028144836426, 0.12230304628610611, 0.15522731840610504, -0.09921388328075409, 0.08481825143098831, -0.01743045635521412, -0.010665729641914368, -0.03471985086798668, -0.06978952139616013, -0.04216833785176277, 0.04152604565024376, 0.05731003358960152, 0.04789997264742851, -0.005709131248295307, 0.08320847153663635, -0.2640402913093567, 0.017988497391343117, 0.03943658247590065, -0.010584660805761814, 0.08548509329557419, 0.08888021856546402, -0.056153226643800735, 0.1107882559299469, -0.052338242530822754, 0.12888428568840027, 0.07426277548074722, -0.07098223268985748, -0.1695287823677063, -0.08212245255708694, 0.06927639245986938, 0.16731786727905273, 0.08106929808855057, -0.04149406775832176, 0.16471222043037415, -0.11644312739372253, 0.01417770516127348, 0.03833393380045891, -0.04028080403804779, -0.07792805135250092, 0.04994642361998558, 0.10899016261100769, 0.053433023393154144, -0.13499300181865692, -0.028067251667380333, 0.027877531945705414, 0.032295648008584976, 0.08345462381839752, 0.022633090615272522, 0.1481732577085495, 0.04434492811560631, -0.1394515037536621, -0.027773704379796982, 0.13568268716335297, 0.04305786266922951, -0.043535102158784866, -0.22172805666923523, 0.011950639076530933, -0.061891939491033554, -0.016445688903331757, -0.04587959870696068, 0.03560582548379898, -0.019285108894109726, 0.07921037077903748, -0.019924014806747437, -0.09220714867115021, -0.03619953989982605, 0.08456121385097504, 0.04809248819947243, 0.03091992437839508, -0.03149448335170746, -0.0047516170889139175, 0.12699179351329803, 0.0592554435133934, -0.1255553662776947, -0.06136321276426315, -0.06403952091932297, -0.0545935221016407, -0.05339999869465828, 0.024157050997018814, 0.027511732652783394, 0.06381335854530334, 0.21992355585098267, 0.00325528415851295, 0.04399377107620239, 0.05910464748740196, 0.01553115714341402, 0.0631830245256424, 0.08129668980836868, -0.07421085983514786, -0.13799047470092773, -0.01571812480688095, 0.09584537148475647, -0.006040820386260748, -0.01650642417371273, -0.03777937963604927, 0.03658251836895943, 0.04893383011221886, 0.09372691810131073, 0.09488533437252045, -0.0031196679919958115, -0.07935544848442078, -0.05289185047149658, 0.2027476578950882, -0.15783895552158356, 0.02885807491838932, 0.010250277817249298, -0.03712226822972298, -0.051801953464746475, 0.009702831506729126, 0.010319485329091549, -0.02176593616604805, 0.08355475962162018, -0.07413307577371597, -0.028188709169626236, -0.11976063251495361, -0.007857128046452999, 0.037609245628118515, 0.035730570554733276, -0.013890240341424942, -0.017010007053613663, -0.0697493925690651, -0.08487348258495331, 0.0963100716471672, -0.08318988233804703, -0.05760730057954788, -0.03286668658256531, -0.09050976485013962, 0.01972472481429577, 0.013431953266263008, 0.12348656356334686, -0.027881808578968048, 0.04443790763616562, -0.010169271379709244, 0.05268030986189842, 0.06830505281686783, 0.03691745176911354, -0.05614406615495682, 0.057997751981019974, -0.19362902641296387, 0.09298506379127502, -0.0878722220659256, 0.02299860678613186, -0.14821414649486542, -0.012775965966284275, 0.03627769649028778, 0.013312527909874916, 0.028421804308891296, 0.1352289915084839, -0.2199636995792389, -0.0070482660084962845, 0.1552991420030594, -0.08791111409664154, -0.12152370065450668, 0.05128340795636177, -0.06865998357534409, 0.15175709128379822, 0.029137104749679565, -0.03664654120802879, 0.06952672451734543, -0.15967372059822083, -0.03351756930351257, -0.029847310855984688, -0.01432150136679411, 0.10676157474517822, 0.09250682592391968, -0.0607852078974247, 0.045907869935035706, 0.018361423164606094, -0.037800319492816925, -0.038923632353544235, -0.05144637078046799, -0.12239246070384979, 0.0019676880910992622, -0.08359181135892868, 0.03673383593559265, -0.015112223103642464, -0.06548868119716644, -0.015138315036892891, -0.16905446350574493, -0.006628463044762611, 0.09118993580341339, 0.013047419488430023, -0.0255645252764225, -0.09754688292741776, 0.01881062425673008, -0.010585007257759571, -0.03382663428783417, -0.14166077971458435, -0.03205643966794014, 0.011067100800573826, -0.1383916735649109, 0.023373454809188843, -0.10949191451072693, 0.053952913731336594, 0.01889496110379696, -0.06830944120883942, -0.014190773479640484, -0.016034763306379318, 0.022800078615546227, -0.047966230660676956, -0.24973076581954956, -0.011841769330203533, -0.04341621696949005, 0.14910495281219482, -0.2251472771167755, 0.04106473922729492, 0.05546068400144577, 0.12132018059492111, -0.013419704511761665, -0.05422911420464516, 0.02401556819677353, -0.07498089969158173, -0.027493277564644814, -0.05693550407886505, -0.01175781525671482, -0.019265223294496536, -0.06369378417730331, 0.02427545003592968, -0.1161341518163681, -0.049672748893499374, 0.11037011444568634, 0.06780838221311569, -0.16364140808582306, -0.04007168859243393, -0.029392285272479057, -0.08419661223888397, -0.0857851579785347, -0.059756480157375336, 0.10583919286727905, 0.04898513853549957, 0.030219001695513725, -0.08005347102880478, -0.08314654976129532, 0.005711788311600685, -0.027461064979434013, -0.027396151795983315, 0.1023550033569336, 0.05978507548570633, -0.12831607460975647, 0.09541887789964676, 0.0842071995139122, 0.0031684827990829945, 0.1084882915019989, -0.01894545368850231, -0.11358590424060822, -0.048062797635793686, 0.037305451929569244, 0.007823348045349121, 0.17110082507133484, -0.07925617694854736, 0.06494009494781494, 0.0394056960940361, -0.025266144424676895, 0.0561736561357975, -0.09641827642917633, 0.012067606672644615, -0.0007400307804346085, -0.011821416206657887, 0.0009478467400185764, -0.03231066092848778, 0.02176767773926258, 0.0758860856294632, 0.041627928614616394, 0.03576192632317543, 0.046345192939043045, -0.04091225191950798, -0.1238069087266922, 0.1898459643125534, -0.11069045215845108, -0.21111290156841278, -0.15953733026981354, 0.04827571660280228, 0.03933708369731903, -0.025622140616178513, 0.008011803030967712, -0.04226386919617653, -0.09504260122776031, -0.07891882210969925, -0.003364281030371785, 0.03723989799618721, -0.06833995878696442, -0.08120911568403244, 0.06749874353408813, 0.05423719063401222, -0.12787361443042755, 0.039397913962602615, 0.05365509167313576, -0.03393835201859474, 0.008147161453962326, 0.07476551085710526, 0.07574170082807541, 0.1496206820011139, -0.01799558289349079, -0.011922822333872318, 0.05455413833260536, 0.2558569610118866, -0.15123629570007324, 0.09352011978626251, 0.10824105143547058, -0.07266124337911606, 0.07820230722427368, 0.1805427372455597, 0.03307907655835152, -0.10508346557617188, 0.04061733931303024, 0.03240526095032692, -0.018894856795668602, -0.2782538831233978, -0.05497017875313759, -0.0031282820273190737, -0.10625448077917099, 0.066255584359169, 0.07526764273643494, 0.08633891493082047, 0.042885489761829376, -0.061146944761276245, -0.08712387084960938, 0.029084783047437668, 0.08032681792974472, -0.028702225536108017, 0.0020488486625254154, 0.07998545467853546, -0.01765074022114277, 0.013640486635267735, 0.10159550607204437, -0.0034026948269456625, 0.18373510241508484, 0.02911691553890705, 0.10130850970745087, 0.09663517773151398, 0.10193062573671341, -0.011115482077002525, 0.01936403289437294, 0.017031943425536156, 0.019364066421985626, 0.00408623181283474, -0.08940550684928894, 0.032151028513908386, 0.11739223450422287, 0.052080847322940826, 0.03618480637669563, 0.018226759508252144, -0.04260242357850075, 0.05470382794737816, 0.16781318187713623, 0.00010272402869304642, -0.20336665213108063, -0.06527489423751831, 0.060816679149866104, -0.07732740789651871, -0.12756997346878052, -0.019896140322089195, 0.045457873493433, -0.165199413895607, 0.016857391223311424, -0.04664872586727142, 0.0914846658706665, -0.0851147472858429, -0.03824399784207344, 0.07532048225402832, 0.07193811237812042, -0.02025427110493183, 0.0799664780497551, -0.18574555218219757, 0.1330290138721466, 0.026392744854092598, 0.07464258372783661, -0.09478568285703659, 0.10053589195013046, 0.01944882795214653, -0.02308550290763378, 0.15277500450611115, 0.004975579679012299, -0.042033929377794266, -0.061026573181152344, -0.11181437969207764, -0.009596964344382286, 0.09073211252689362, -0.1146748811006546, 0.06844234466552734, -0.007484556175768375, -0.021628497168421745, 0.014299229718744755, -0.07148284465074539, -0.1364382952451706, -0.17148782312870026, 0.05709007754921913, -0.12166473269462585, 0.04358847811818123, -0.09901302307844162, -0.07089653611183167, -0.0014998811529949307, 0.18128547072410583, -0.18991319835186005, -0.0716165229678154, -0.1374252289533615, -0.08052773773670197, 0.17163676023483276, -0.043250858783721924, 0.07433997094631195, 0.023150132969021797, 0.15696649253368378, 0.026333961635828018, 0.004375650081783533, 0.10257188975811005, -0.08401047438383102, -0.18786734342575073, -0.06392818689346313, 0.14466115832328796, 0.16124357283115387, 0.04120013490319252, -0.009987834841012955, 0.007914939895272255, -0.054847944527864456, -0.11801422387361526, 0.01417339313775301, 0.15683770179748535, 0.10711493343114853, 0.009542837738990784, -0.02564515545964241, -0.1217815950512886, -0.06165655702352524, -0.0690130963921547, 0.0015849252231419086, 0.1940477341413498, -0.06275347620248795, 0.15652979910373688, 0.12236613780260086, -0.05630822479724884, -0.2051631659269333, 0.04633055627346039, 0.06086575239896774, 0.022351808845996857, 0.0648551732301712, -0.16820475459098816, 0.10421747714281082, 0.01952310837805271, -0.06534015387296677, 0.1385079175233841, -0.1364017277956009, -0.15265488624572754, 0.0983990728855133, 0.05151621624827385, -0.22317281365394592, -0.1076858788728714, -0.09420520812273026, -0.03458600491285324, -0.10834317654371262, 0.07873973250389099, -0.020781900733709335, 0.015523632057011127, 0.031120628118515015, 0.03387318551540375, 0.02092776633799076, -0.05233524739742279, 0.20298422873020172, -0.010506967082619667, 0.03181162104010582, -0.05318722128868103, -0.09121035784482956, 0.05064363032579422, -0.05286717787384987, 0.09548468887805939, -0.019732747226953506, 0.024335602298378944, -0.12479861080646515, -0.04510413110256195, -0.06451043486595154, 0.03138386830687523, -0.09918952733278275, -0.08741889894008636, -0.049199774861335754, 0.10503747314214706, 0.0885685384273529, -0.043476250022649765, -0.003026575781404972, -0.07389629632234573, 0.03511243686079979, 0.20823118090629578, 0.19870422780513763, 0.05715909227728844, -0.05762149393558502, 0.011624033562839031, -0.01966141164302826, 0.04812482371926308, -0.22687964141368866, 0.05456581339240074, 0.04254063218832016, 0.021349944174289703, 0.09846232086420059, -0.022485554218292236, -0.15062586963176727, -0.06284206360578537, 0.07375984638929367, -0.04260272532701492, -0.14572732150554657, -0.027416333556175232, 0.029867473989725113, -0.2061278373003006, -0.03803364187479019, 0.01901216246187687, -0.014575323089957237, -0.041760679334402084, 0.017218735069036484, 0.08706367015838623, -0.019906001165509224, 0.13261614739894867, 0.08794256299734116, 0.09351148456335068, -0.10187935829162598, 0.07184190303087234, 0.06371329724788666, -0.053888190537691116, 0.03328298032283783, 0.08458597213029861, -0.04431323707103729, -0.037342749536037445, 0.09626075625419617, 0.07142946869134903, 0.03557872399687767, -0.04858269542455673, -0.00517708994448185, -0.043867647647857666, 0.053505126386880875, 0.11426326632499695, 0.05048929527401924, 0.005205917172133923, 0.05185544490814209, 0.025367753580212593, -0.0865088403224945, 0.11939018964767456, 0.05885802209377289, 0.022945929318666458, -0.04252767562866211, -0.03155896067619324, 0.0004034892772324383, -0.008581339381635189, -0.018299777060747147, -0.007664135191589594, -0.08396603912115097, -0.01110026240348816, -0.12971359491348267, 0.0448303259909153, -0.08910974115133286, 0.01331083383411169, 0.024037137627601624, -0.048207614570856094, 0.0010799242882058024, 0.014358695596456528, -0.07176296412944794, -0.049648284912109375, -0.0062763867899775505, 0.11059390753507614, -0.1265411376953125, 0.03197001665830612, 0.08325210958719254, -0.10298429429531097, 0.07718981802463531, 0.0030687053222209215, 0.006807462312281132, 0.02239564061164856, -0.1752333641052246, 0.06033630669116974, -0.030138960108160973, -0.006521409843116999, 0.024311792105436325, -0.2335515320301056, -0.01286474708467722, -0.03620356321334839, -0.028182584792375565, 0.013094248250126839, -0.029198182746767998, -0.12935870885849, 0.07977861166000366, -0.005120570305734873, -0.07503266632556915, -0.026616977527737617, 0.035145439207553864, 0.10837535560131073, -0.03228594735264778, 0.14602813124656677, -0.01846432499587536, 0.06504986435174942, -0.16815154254436493, -0.004512900952249765, -0.015557577833533287, 0.038625892251729965, -0.018976163119077682, -0.017896389588713646, 0.05733758583664894, -0.03581671044230461, 0.20441192388534546, -0.030804481357336044, 0.05275256186723709, 0.05398726090788841, 0.014017169363796711, -0.002961935708299279, 0.08788510411977768, 0.06933405995368958, -0.01868467591702938, 0.014091010205447674, 0.03629021719098091, -0.009082579053938389, -0.042336322367191315, -0.1542479246854782, 0.05611913278698921, 0.16047292947769165, 0.042510565370321274, 0.014913941733539104, 0.05771127715706825, -0.10385137051343918, -0.07978618890047073, 0.14221693575382233, -0.004483410157263279, -0.03866530954837799, -0.0748148038983345, 0.14831775426864624, 0.12042757868766785, -0.1983824521303177, 0.07773032784461975, -0.07023079693317413, -0.07294114679098129, -0.10936559736728668, -0.16249965131282806, -0.06251002103090286, -0.04477237910032272, -0.013681245967745781, -0.06186382472515106, 0.05681753531098366, 0.087432362139225, 0.005865978542715311, -0.023064671084284782, 0.10344063490629196, 0.004592592362314463, -0.021423116326332092, 0.031502511352300644, 0.06395787745714188, 0.0143724475055933, -0.09641817212104797, 0.01227843202650547, -0.004429755266755819, 0.024719195440411568, 0.0602402500808239, 0.003000671276822686, -0.03586670383810997, -0.006339323706924915, -0.02812313288450241, -0.11261815577745438, 0.04151143133640289, -0.021209517493844032, -0.026242956519126892, 0.13100498914718628, 0.025027139112353325, -0.0019341352162882686, -0.022828776389360428, 0.23187139630317688, -0.07818873226642609, -0.09346118569374084, -0.1627165526151657, 0.061323873698711395, -0.054045747965574265, 0.02840062975883484, 0.04159728065133095, -0.114125557243824, 0.031495146453380585, 0.1409011334180832, 0.14579430222511292, -0.014738147146999836, 0.009881013073027134, 0.04117817059159279, -0.00315871206112206, -0.04625589773058891, 0.01649506203830242, 0.044360555708408356, 0.11293952912092209, -0.056767139583826065, 0.07962323725223541, -0.008912809193134308, -0.08277686685323715, 0.0007060576463118196, 0.10573406517505646, -0.002493718871846795, 0.01040386501699686, -0.0651342123746872, 0.13979804515838623, -0.06058548390865326, -0.23787720501422882, 0.04749435558915138, -0.06444638222455978, -0.16027259826660156, -0.037829749286174774, 0.025605279952287674, -0.021966602653265, 0.014399007894098759, 0.07956844568252563, -0.04333661496639252, 0.16981275379657745, 0.03957255929708481, -0.07112500816583633, -0.06675063818693161, 0.07269910722970963, -0.12125661224126816, 0.2867929935455322, 0.017475446686148643, 0.06958059221506119, 0.10685567557811737, -0.01759418472647667, -0.12807053327560425, 0.027357714250683784, 0.09898122400045395, -0.07573498785495758, 0.07806754112243652, 0.19079023599624634, -0.0015583541244268417, 0.14166101813316345, 0.06212562695145607, -0.03682410344481468, 0.03301938250660896, -0.11868676543235779, -0.06172965466976166, -0.11070968955755234, 0.08245040476322174, -0.07802992314100266, 0.16600510478019714, 0.13684585690498352, -0.06959374994039536, 0.001010918291285634, -0.02361506037414074, 0.08337022364139557, -0.006998652592301369, 0.10584290325641632, 0.00431731715798378, -0.2048783302307129, 0.031172044575214386, 0.031020430848002434, 0.10398752987384796, -0.21470633149147034, -0.06708450615406036, 0.0624992661178112, -0.029268886893987656, -0.057079996913671494, 0.11402390897274017, 0.057629067450761795, 0.039081644266843796, -0.038663044571876526, -0.0421401783823967, -0.02197974920272827, 0.12970057129859924, -0.1168602854013443, -0.016834843903779984 ]