xzuyn's picture
Upload 48 files
7b7c60e
raw
history blame
12.8 kB
{
"best_metric": 0.740916907787323,
"best_model_checkpoint": "/media/xzuyn/c41d93ed-83a7-4577-9d38-5062b7e14438/xzuyn/Documents/AI_ROCM_5-6/LLaMa-LoRA-Tuner/data/lora_models/LLaMa-2-WizardLM-Uncensored-Tulu-Format-50K-7B-LoRA/checkpoint-10000",
"epoch": 0.2,
"global_step": 10000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 0.0001,
"loss": 1.1136,
"step": 100
},
{
"epoch": 0.0,
"learning_rate": 9.997999599919985e-05,
"loss": 0.9595,
"step": 200
},
{
"epoch": 0.01,
"learning_rate": 9.995999199839968e-05,
"loss": 0.8936,
"step": 300
},
{
"epoch": 0.01,
"learning_rate": 9.993998799759952e-05,
"loss": 0.9237,
"step": 400
},
{
"epoch": 0.01,
"learning_rate": 9.992038407681536e-05,
"loss": 0.9005,
"step": 500
},
{
"epoch": 0.01,
"learning_rate": 9.99003800760152e-05,
"loss": 0.8433,
"step": 600
},
{
"epoch": 0.01,
"learning_rate": 9.988037607521505e-05,
"loss": 0.863,
"step": 700
},
{
"epoch": 0.02,
"learning_rate": 9.98603720744149e-05,
"loss": 0.8634,
"step": 800
},
{
"epoch": 0.02,
"learning_rate": 9.984036807361474e-05,
"loss": 0.8463,
"step": 900
},
{
"epoch": 0.02,
"learning_rate": 9.982036407281457e-05,
"loss": 0.8527,
"step": 1000
},
{
"epoch": 0.02,
"learning_rate": 9.980036007201441e-05,
"loss": 0.8259,
"step": 1100
},
{
"epoch": 0.02,
"learning_rate": 9.978035607121424e-05,
"loss": 0.8473,
"step": 1200
},
{
"epoch": 0.03,
"learning_rate": 9.976035207041409e-05,
"loss": 0.8462,
"step": 1300
},
{
"epoch": 0.03,
"learning_rate": 9.974034806961393e-05,
"loss": 0.8609,
"step": 1400
},
{
"epoch": 0.03,
"learning_rate": 9.972034406881376e-05,
"loss": 0.8238,
"step": 1500
},
{
"epoch": 0.03,
"learning_rate": 9.970034006801361e-05,
"loss": 0.8212,
"step": 1600
},
{
"epoch": 0.03,
"learning_rate": 9.968033606721344e-05,
"loss": 0.8391,
"step": 1700
},
{
"epoch": 0.04,
"learning_rate": 9.966033206641328e-05,
"loss": 0.774,
"step": 1800
},
{
"epoch": 0.04,
"learning_rate": 9.964032806561313e-05,
"loss": 0.8149,
"step": 1900
},
{
"epoch": 0.04,
"learning_rate": 9.962032406481296e-05,
"loss": 0.8794,
"step": 2000
},
{
"epoch": 0.04,
"learning_rate": 9.96003200640128e-05,
"loss": 0.7747,
"step": 2100
},
{
"epoch": 0.04,
"learning_rate": 9.958031606321264e-05,
"loss": 0.8191,
"step": 2200
},
{
"epoch": 0.05,
"learning_rate": 9.956031206241248e-05,
"loss": 0.8219,
"step": 2300
},
{
"epoch": 0.05,
"learning_rate": 9.954030806161232e-05,
"loss": 0.8191,
"step": 2400
},
{
"epoch": 0.05,
"learning_rate": 9.952030406081217e-05,
"loss": 0.8662,
"step": 2500
},
{
"epoch": 0.05,
"learning_rate": 9.950030006001201e-05,
"loss": 0.787,
"step": 2600
},
{
"epoch": 0.05,
"learning_rate": 9.948029605921184e-05,
"loss": 0.8668,
"step": 2700
},
{
"epoch": 0.06,
"learning_rate": 9.946029205841169e-05,
"loss": 0.7817,
"step": 2800
},
{
"epoch": 0.06,
"learning_rate": 9.944028805761153e-05,
"loss": 0.8021,
"step": 2900
},
{
"epoch": 0.06,
"learning_rate": 9.942028405681136e-05,
"loss": 0.8324,
"step": 3000
},
{
"epoch": 0.06,
"learning_rate": 9.940028005601121e-05,
"loss": 0.8631,
"step": 3100
},
{
"epoch": 0.06,
"learning_rate": 9.938027605521105e-05,
"loss": 0.797,
"step": 3200
},
{
"epoch": 0.07,
"learning_rate": 9.936027205441088e-05,
"loss": 0.8224,
"step": 3300
},
{
"epoch": 0.07,
"learning_rate": 9.934026805361073e-05,
"loss": 0.8164,
"step": 3400
},
{
"epoch": 0.07,
"learning_rate": 9.932026405281056e-05,
"loss": 0.7975,
"step": 3500
},
{
"epoch": 0.07,
"learning_rate": 9.930046009201841e-05,
"loss": 0.8468,
"step": 3600
},
{
"epoch": 0.07,
"learning_rate": 9.928045609121824e-05,
"loss": 0.8169,
"step": 3700
},
{
"epoch": 0.08,
"learning_rate": 9.926045209041809e-05,
"loss": 0.7761,
"step": 3800
},
{
"epoch": 0.08,
"learning_rate": 9.924044808961793e-05,
"loss": 0.8042,
"step": 3900
},
{
"epoch": 0.08,
"learning_rate": 9.922044408881776e-05,
"loss": 0.8146,
"step": 4000
},
{
"epoch": 0.08,
"learning_rate": 9.920044008801761e-05,
"loss": 0.8243,
"step": 4100
},
{
"epoch": 0.08,
"learning_rate": 9.918043608721744e-05,
"loss": 0.8315,
"step": 4200
},
{
"epoch": 0.09,
"learning_rate": 9.916043208641728e-05,
"loss": 0.8014,
"step": 4300
},
{
"epoch": 0.09,
"learning_rate": 9.914042808561713e-05,
"loss": 0.8241,
"step": 4400
},
{
"epoch": 0.09,
"learning_rate": 9.912042408481696e-05,
"loss": 0.8233,
"step": 4500
},
{
"epoch": 0.09,
"learning_rate": 9.91004200840168e-05,
"loss": 0.7748,
"step": 4600
},
{
"epoch": 0.09,
"learning_rate": 9.908041608321665e-05,
"loss": 0.8197,
"step": 4700
},
{
"epoch": 0.1,
"learning_rate": 9.906041208241649e-05,
"loss": 0.8195,
"step": 4800
},
{
"epoch": 0.1,
"learning_rate": 9.904040808161634e-05,
"loss": 0.7864,
"step": 4900
},
{
"epoch": 0.1,
"learning_rate": 9.902040408081617e-05,
"loss": 0.7812,
"step": 5000
},
{
"epoch": 0.1,
"learning_rate": 9.900040008001601e-05,
"loss": 0.7966,
"step": 5100
},
{
"epoch": 0.1,
"learning_rate": 9.898039607921584e-05,
"loss": 0.793,
"step": 5200
},
{
"epoch": 0.11,
"learning_rate": 9.896039207841569e-05,
"loss": 0.8568,
"step": 5300
},
{
"epoch": 0.11,
"learning_rate": 9.894038807761553e-05,
"loss": 0.7614,
"step": 5400
},
{
"epoch": 0.11,
"learning_rate": 9.892038407681536e-05,
"loss": 0.8171,
"step": 5500
},
{
"epoch": 0.11,
"learning_rate": 9.890038007601521e-05,
"loss": 0.7649,
"step": 5600
},
{
"epoch": 0.11,
"learning_rate": 9.888037607521505e-05,
"loss": 0.8074,
"step": 5700
},
{
"epoch": 0.12,
"learning_rate": 9.886037207441488e-05,
"loss": 0.7591,
"step": 5800
},
{
"epoch": 0.12,
"learning_rate": 9.884036807361473e-05,
"loss": 0.8102,
"step": 5900
},
{
"epoch": 0.12,
"learning_rate": 9.882036407281456e-05,
"loss": 0.784,
"step": 6000
},
{
"epoch": 0.12,
"learning_rate": 9.88003600720144e-05,
"loss": 0.8367,
"step": 6100
},
{
"epoch": 0.12,
"learning_rate": 9.878035607121425e-05,
"loss": 0.8418,
"step": 6200
},
{
"epoch": 0.13,
"learning_rate": 9.876035207041409e-05,
"loss": 0.798,
"step": 6300
},
{
"epoch": 0.13,
"learning_rate": 9.874034806961394e-05,
"loss": 0.7928,
"step": 6400
},
{
"epoch": 0.13,
"learning_rate": 9.872034406881377e-05,
"loss": 0.8262,
"step": 6500
},
{
"epoch": 0.13,
"learning_rate": 9.870034006801361e-05,
"loss": 0.7965,
"step": 6600
},
{
"epoch": 0.13,
"learning_rate": 9.868033606721346e-05,
"loss": 0.7952,
"step": 6700
},
{
"epoch": 0.14,
"learning_rate": 9.866033206641329e-05,
"loss": 0.7987,
"step": 6800
},
{
"epoch": 0.14,
"learning_rate": 9.864032806561313e-05,
"loss": 0.7939,
"step": 6900
},
{
"epoch": 0.14,
"learning_rate": 9.862032406481296e-05,
"loss": 0.7484,
"step": 7000
},
{
"epoch": 0.14,
"learning_rate": 9.860032006401281e-05,
"loss": 0.819,
"step": 7100
},
{
"epoch": 0.14,
"learning_rate": 9.858031606321265e-05,
"loss": 0.747,
"step": 7200
},
{
"epoch": 0.15,
"learning_rate": 9.856031206241248e-05,
"loss": 0.8029,
"step": 7300
},
{
"epoch": 0.15,
"learning_rate": 9.854030806161233e-05,
"loss": 0.9013,
"step": 7400
},
{
"epoch": 0.15,
"learning_rate": 9.852030406081216e-05,
"loss": 0.8337,
"step": 7500
},
{
"epoch": 0.15,
"learning_rate": 9.8500300060012e-05,
"loss": 0.7927,
"step": 7600
},
{
"epoch": 0.15,
"learning_rate": 9.848029605921185e-05,
"loss": 0.8725,
"step": 7700
},
{
"epoch": 0.16,
"learning_rate": 9.846029205841169e-05,
"loss": 0.7865,
"step": 7800
},
{
"epoch": 0.16,
"learning_rate": 9.844028805761154e-05,
"loss": 0.7726,
"step": 7900
},
{
"epoch": 0.16,
"learning_rate": 9.842048409681936e-05,
"loss": 0.7868,
"step": 8000
},
{
"epoch": 0.16,
"learning_rate": 9.840048009601921e-05,
"loss": 0.7918,
"step": 8100
},
{
"epoch": 0.16,
"learning_rate": 9.838047609521905e-05,
"loss": 0.7924,
"step": 8200
},
{
"epoch": 0.17,
"learning_rate": 9.836047209441888e-05,
"loss": 0.8036,
"step": 8300
},
{
"epoch": 0.17,
"learning_rate": 9.834046809361873e-05,
"loss": 0.7844,
"step": 8400
},
{
"epoch": 0.17,
"learning_rate": 9.832046409281857e-05,
"loss": 0.8124,
"step": 8500
},
{
"epoch": 0.17,
"learning_rate": 9.830046009201842e-05,
"loss": 0.837,
"step": 8600
},
{
"epoch": 0.17,
"learning_rate": 9.828045609121826e-05,
"loss": 0.8315,
"step": 8700
},
{
"epoch": 0.18,
"learning_rate": 9.826045209041809e-05,
"loss": 0.7738,
"step": 8800
},
{
"epoch": 0.18,
"learning_rate": 9.824044808961794e-05,
"loss": 0.8164,
"step": 8900
},
{
"epoch": 0.18,
"learning_rate": 9.822044408881777e-05,
"loss": 0.7529,
"step": 9000
},
{
"epoch": 0.18,
"learning_rate": 9.820044008801761e-05,
"loss": 0.8037,
"step": 9100
},
{
"epoch": 0.18,
"learning_rate": 9.818043608721746e-05,
"loss": 0.8537,
"step": 9200
},
{
"epoch": 0.19,
"learning_rate": 9.816043208641729e-05,
"loss": 0.8241,
"step": 9300
},
{
"epoch": 0.19,
"learning_rate": 9.814042808561713e-05,
"loss": 0.7729,
"step": 9400
},
{
"epoch": 0.19,
"learning_rate": 9.812042408481696e-05,
"loss": 0.8116,
"step": 9500
},
{
"epoch": 0.19,
"learning_rate": 9.81004200840168e-05,
"loss": 0.8254,
"step": 9600
},
{
"epoch": 0.19,
"learning_rate": 9.808041608321665e-05,
"loss": 0.8285,
"step": 9700
},
{
"epoch": 0.2,
"learning_rate": 9.806041208241648e-05,
"loss": 0.7577,
"step": 9800
},
{
"epoch": 0.2,
"learning_rate": 9.804040808161633e-05,
"loss": 0.8024,
"step": 9900
},
{
"epoch": 0.2,
"learning_rate": 9.802040408081616e-05,
"loss": 0.8078,
"step": 10000
},
{
"epoch": 0.2,
"eval_loss": 0.740916907787323,
"eval_runtime": 689.7578,
"eval_samples_per_second": 7.211,
"eval_steps_per_second": 0.902,
"step": 10000
}
],
"max_steps": 500000,
"num_train_epochs": 10,
"total_flos": 1.4097962687142298e+17,
"trial_name": null,
"trial_params": null
}