diff --git "a/params-details.rtf" "b/params-details.rtf" new file mode 100644--- /dev/null +++ "b/params-details.rtf" @@ -0,0 +1,12414 @@ +{\rtf1\ansi\ansicpg1252\cocoartf2636 +\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\froman\fcharset0 Times-Roman;\f1\fnil\fcharset0 HelveticaNeue;\f2\fnil\fcharset0 HelveticaNeue-Bold; +} +{\colortbl;\red255\green255\blue255;\red226\green224\blue220;\red15\green84\blue167;\red0\green0\blue0; +} +{\*\expandedcolortbl;;\cssrgb\c90980\c90196\c89020;\cssrgb\c3922\c41569\c71373\c23922;\cssrgb\c0\c0\c0; +} +\paperw11900\paperh16840\margl1440\margr1440\vieww18380\viewh8400\viewkind0 +\deftab720 +\pard\pardeftab720\partightenfactor0 + +\f0\fs24 \cf2 \expnd0\expndtw0\kerning0 +\outl0\strokewidth0 \strokec2 ***** Running training *****\ + Num examples = 12460\ + Num Epochs = 2\ + Instantaneous batch size per device = 10\ + Total train batch size (w. parallel, distributed & accumulation) = 80\ + Gradient Accumulation steps = 4\ + Total optimization steps = 310\ + Number of trainable parameters = 161844480\ +\pard\pardeftab720\partightenfactor0 + +\f1\fs28 \cf2 \cb3 \strokec2 \ +\'a0[161/310 7:07:44 < 6:40:50, 0.01 it/s, Epoch 1.03/2]\ +\ + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrt\brdrnil \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 + +\f2\b\fs24 \cf0 \cb1 \strokec4 Step\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 Training Loss\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 Validation Loss\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 Rouge2 Precision\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 Rouge2 Recall\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 Rouge2 Fmeasure\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 + +\f1\b0 \cf0 10\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.635400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.757870\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.055400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.185500\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.082700\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 20\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.504800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.658047\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.062500\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.192100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.091700\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 30\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.419400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.546135\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.061300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.207100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.091900\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 40\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.353900\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.522316\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.060600\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.206800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.090900\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 50\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.332600\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.483913\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.059400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.211300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.090200\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 60\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.278900\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.551230\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.063900\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.212300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.095400\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 70\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.286100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.461083\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.065100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.223400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.098000\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 80\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.297700\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.452156\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.067700\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.227200\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.101300\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 90\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.240300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.419080\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.069500\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.237600\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.104400\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.223500\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.425878\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.065800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.220100\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.098800\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 110\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.294400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.397157\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.066800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.230300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.100800\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 120\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.218400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.404413\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.068800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.243400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.104500\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 130\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.257900\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.397923\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.067800\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.231700\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.102000\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 140\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.187500\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.431973\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.069400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.233300\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.104000\cell \row + +\itap1\trowd \taflags5 \trgaph108\trleft-108 \trcbpat3 \tamarb240 \trbrdrl\brdrnil \trbrdrt\brdrnil \trbrdrr\brdrnil +\clvertalc \clshdrawnil \clwWidth559\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx1440 +\clvertalc \clshdrawnil \clwWidth1599\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx2880 +\clvertalc \clshdrawnil \clwWidth1821\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx4320 +\clvertalc \clshdrawnil \clwWidth2098\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx5760 +\clvertalc \clshdrawnil \clwWidth1708\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx7200 +\clvertalc \clshdrawnil \clwWidth2162\clftsWidth3 \clbrdrt\brdrnil \clbrdrl\brdrnil \clbrdrb\brdrnil \clbrdrr\brdrnil \clpadt120 \clpadl120 \clpadb120 \clpadr120 \gaph\cellx8640 +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 150\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.211600\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 1.417688\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.069000\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.233400\cell +\pard\intbl\itap1\pardeftab720\qr\partightenfactor0 +\cf0 0.103400\cell \lastrow\row +\pard\pardeftab720\partightenfactor0 + +\f0 \cf2 \strokec2 \ +\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/transformers/generation/utils.py:1186: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use a generation configuration file (see {\field{\*\fldinst{HYPERLINK "https://huggingface.co/docs/transformers/main_classes/text_generation"}}{\fldrslt https://huggingface.co/docs/transformers/main_classes/text_generation}})\ + warnings.warn(\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-10\ +Configuration saved in ./checkpoint-10/config.json\ +Configuration saved in ./checkpoint-10/generation_config.json\ +Model weights saved in ./checkpoint-10/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-10/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-10/special_tokens_map.json\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-20\ +Configuration saved in ./checkpoint-20/config.json\ +Configuration saved in ./checkpoint-20/generation_config.json\ +Model weights saved in ./checkpoint-20/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-20/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-20/special_tokens_map.json\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-30\ +Configuration saved in ./checkpoint-30/config.json\ +Configuration saved in ./checkpoint-30/generation_config.json\ +Model weights saved in ./checkpoint-30/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-30/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-30/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-10] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-40\ +Configuration saved in ./checkpoint-40/config.json\ +Configuration saved in ./checkpoint-40/generation_config.json\ +Model weights saved in ./checkpoint-40/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-40/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-40/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-20] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-50\ +Configuration saved in ./checkpoint-50/config.json\ +Configuration saved in ./checkpoint-50/generation_config.json\ +Model weights saved in ./checkpoint-50/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-50/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-50/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-30] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-60\ +Configuration saved in ./checkpoint-60/config.json\ +Configuration saved in ./checkpoint-60/generation_config.json\ +Model weights saved in ./checkpoint-60/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-60/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-60/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-40] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-70\ +Configuration saved in ./checkpoint-70/config.json\ +Configuration saved in ./checkpoint-70/generation_config.json\ +Model weights saved in ./checkpoint-70/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-70/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-70/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-50] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-80\ +Configuration saved in ./checkpoint-80/config.json\ +Configuration saved in ./checkpoint-80/generation_config.json\ +Model weights saved in ./checkpoint-80/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-80/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-80/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-60] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-90\ +Configuration saved in ./checkpoint-90/config.json\ +Configuration saved in ./checkpoint-90/generation_config.json\ +Model weights saved in ./checkpoint-90/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-90/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-90/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-70] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-100\ +Configuration saved in ./checkpoint-100/config.json\ +Configuration saved in ./checkpoint-100/generation_config.json\ +Model weights saved in ./checkpoint-100/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-100/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-100/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-80] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-110\ +Configuration saved in ./checkpoint-110/config.json\ +Configuration saved in ./checkpoint-110/generation_config.json\ +Model weights saved in ./checkpoint-110/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-110/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-110/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-90] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-120\ +Configuration saved in ./checkpoint-120/config.json\ +Configuration saved in ./checkpoint-120/generation_config.json\ +Model weights saved in ./checkpoint-120/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-120/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-120/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-100] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-130\ +Configuration saved in ./checkpoint-130/config.json\ +Configuration saved in ./checkpoint-130/generation_config.json\ +Model weights saved in ./checkpoint-130/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-130/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-130/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-110] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-140\ +Configuration saved in ./checkpoint-140/config.json\ +Configuration saved in ./checkpoint-140/generation_config.json\ +Model weights saved in ./checkpoint-140/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-140/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-140/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-120] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Saving model checkpoint to ./checkpoint-150\ +Configuration saved in ./checkpoint-150/config.json\ +Configuration saved in ./checkpoint-150/generation_config.json\ +Model weights saved in ./checkpoint-150/pytorch_model.bin\ +tokenizer config file saved in ./checkpoint-150/tokenizer_config.json\ +Special tokens file saved in ./checkpoint-150/special_tokens_map.json\ +Deleting older checkpoint [checkpoint-130] due to args.save_total_limit\ +/home/pageocr/bart-decoder-san/venv/testing/venv/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.\ + warnings.warn('Was asked to gather along dimension 0, but all '\ +***** Running Evaluation *****\ + Num examples = 500\ + Batch size = 10\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +\ +Generate config GenerationConfig \{\ + "bos_token_id": 0,\ + "decoder_start_token_id": 2,\ + "early_stopping": true,\ + "eos_token_id": 2,\ + "length_penalty": 2.0,\ + "max_length": 512,\ + "min_length": 100,\ + "no_repeat_ngram_size": 3,\ + "num_beams": 2,\ + "pad_token_id": 1,\ + "transformers_version": "4.26.1",\ + "use_cache": false\ +\}\ +} \ No newline at end of file