| { | |
| "best_metric": 0.12982094287872314, | |
| "best_model_checkpoint": "./results/checkpoint-881", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 881, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9716231555051076e-05, | |
| "loss": 7.8572, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.943246311010216e-05, | |
| "loss": 3.0916, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.914869466515324e-05, | |
| "loss": 1.8922, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.886492622020432e-05, | |
| "loss": 1.3329, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.858115777525539e-05, | |
| "loss": 0.981, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.829738933030647e-05, | |
| "loss": 0.7023, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.8013620885357555e-05, | |
| "loss": 0.5746, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.772985244040863e-05, | |
| "loss": 0.4267, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.74460839954597e-05, | |
| "loss": 0.4005, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.7162315550510784e-05, | |
| "loss": 0.3931, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.6878547105561864e-05, | |
| "loss": 0.3501, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.6594778660612945e-05, | |
| "loss": 0.3233, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.631101021566402e-05, | |
| "loss": 0.3265, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.602724177071509e-05, | |
| "loss": 0.3082, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.574347332576618e-05, | |
| "loss": 0.2988, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.5459704880817255e-05, | |
| "loss": 0.2786, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.5175936435868336e-05, | |
| "loss": 0.2757, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.489216799091941e-05, | |
| "loss": 0.2801, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.460839954597049e-05, | |
| "loss": 0.2606, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.432463110102157e-05, | |
| "loss": 0.2792, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.4040862656072645e-05, | |
| "loss": 0.2439, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.375709421112372e-05, | |
| "loss": 0.272, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.347332576617481e-05, | |
| "loss": 0.2479, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.318955732122588e-05, | |
| "loss": 0.2476, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.290578887627696e-05, | |
| "loss": 0.2336, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.2622020431328036e-05, | |
| "loss": 0.2419, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.233825198637912e-05, | |
| "loss": 0.2395, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.20544835414302e-05, | |
| "loss": 0.2335, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.177071509648127e-05, | |
| "loss": 0.2412, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.1486946651532346e-05, | |
| "loss": 0.2253, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.120317820658343e-05, | |
| "loss": 0.2131, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.091940976163451e-05, | |
| "loss": 0.2243, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.063564131668559e-05, | |
| "loss": 0.2353, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.035187287173666e-05, | |
| "loss": 0.192, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.006810442678774e-05, | |
| "loss": 0.2204, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 3.9784335981838824e-05, | |
| "loss": 0.2041, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.95005675368899e-05, | |
| "loss": 0.2267, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.921679909194098e-05, | |
| "loss": 0.2103, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.893303064699206e-05, | |
| "loss": 0.2004, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.8649262202043134e-05, | |
| "loss": 0.2099, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.8365493757094214e-05, | |
| "loss": 0.2022, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.808172531214529e-05, | |
| "loss": 0.2, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.779795686719637e-05, | |
| "loss": 0.2324, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.751418842224745e-05, | |
| "loss": 0.2039, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.7230419977298524e-05, | |
| "loss": 0.2233, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.6946651532349605e-05, | |
| "loss": 0.1974, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.6662883087400686e-05, | |
| "loss": 0.2119, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.637911464245176e-05, | |
| "loss": 0.1909, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.609534619750284e-05, | |
| "loss": 0.1823, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.5811577752553915e-05, | |
| "loss": 0.1917, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.5527809307604996e-05, | |
| "loss": 0.1837, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.5244040862656076e-05, | |
| "loss": 0.1968, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.496027241770715e-05, | |
| "loss": 0.182, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.467650397275823e-05, | |
| "loss": 0.185, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.439273552780931e-05, | |
| "loss": 0.1836, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.4108967082860386e-05, | |
| "loss": 0.1922, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.382519863791147e-05, | |
| "loss": 0.1887, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.354143019296254e-05, | |
| "loss": 0.174, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.325766174801362e-05, | |
| "loss": 0.1812, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.29738933030647e-05, | |
| "loss": 0.1687, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.269012485811578e-05, | |
| "loss": 0.1959, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.240635641316686e-05, | |
| "loss": 0.1923, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.212258796821794e-05, | |
| "loss": 0.18, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.183881952326901e-05, | |
| "loss": 0.168, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.155505107832009e-05, | |
| "loss": 0.1802, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.127128263337117e-05, | |
| "loss": 0.183, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.098751418842225e-05, | |
| "loss": 0.1921, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.070374574347333e-05, | |
| "loss": 0.1784, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.0419977298524406e-05, | |
| "loss": 0.1674, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.013620885357548e-05, | |
| "loss": 0.1747, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.9852440408626565e-05, | |
| "loss": 0.1785, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.9568671963677642e-05, | |
| "loss": 0.1893, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.928490351872872e-05, | |
| "loss": 0.177, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.9001135073779793e-05, | |
| "loss": 0.1889, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.8717366628830878e-05, | |
| "loss": 0.2004, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.8433598183881955e-05, | |
| "loss": 0.1796, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.8149829738933033e-05, | |
| "loss": 0.1771, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.7866061293984107e-05, | |
| "loss": 0.1895, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.758229284903519e-05, | |
| "loss": 0.187, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7298524404086268e-05, | |
| "loss": 0.1786, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.7014755959137346e-05, | |
| "loss": 0.1728, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.673098751418842e-05, | |
| "loss": 0.1815, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.6447219069239504e-05, | |
| "loss": 0.1797, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.616345062429058e-05, | |
| "loss": 0.1735, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.587968217934166e-05, | |
| "loss": 0.1708, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.5595913734392736e-05, | |
| "loss": 0.1642, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.5312145289443817e-05, | |
| "loss": 0.1642, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5028376844494894e-05, | |
| "loss": 0.1825, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.4744608399545972e-05, | |
| "loss": 0.1802, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.446083995459705e-05, | |
| "loss": 0.1739, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4177071509648127e-05, | |
| "loss": 0.1513, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.3893303064699208e-05, | |
| "loss": 0.1767, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3609534619750285e-05, | |
| "loss": 0.1631, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3325766174801362e-05, | |
| "loss": 0.1935, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.304199772985244e-05, | |
| "loss": 0.1956, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.275822928490352e-05, | |
| "loss": 0.1657, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2474460839954598e-05, | |
| "loss": 0.143, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.2190692395005676e-05, | |
| "loss": 0.1641, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1906923950056753e-05, | |
| "loss": 0.1633, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.1623155505107834e-05, | |
| "loss": 0.1852, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.133938706015891e-05, | |
| "loss": 0.1725, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.1055618615209992e-05, | |
| "loss": 0.1621, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0771850170261066e-05, | |
| "loss": 0.1827, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 2.0488081725312147e-05, | |
| "loss": 0.1664, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.0204313280363224e-05, | |
| "loss": 0.1532, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.9920544835414305e-05, | |
| "loss": 0.1447, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.963677639046538e-05, | |
| "loss": 0.1669, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.935300794551646e-05, | |
| "loss": 0.1469, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.9069239500567538e-05, | |
| "loss": 0.1685, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.878547105561862e-05, | |
| "loss": 0.1745, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8501702610669692e-05, | |
| "loss": 0.1508, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8217934165720773e-05, | |
| "loss": 0.1753, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.793416572077185e-05, | |
| "loss": 0.1572, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.765039727582293e-05, | |
| "loss": 0.1433, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.7366628830874006e-05, | |
| "loss": 0.1774, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.7082860385925086e-05, | |
| "loss": 0.1551, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.6799091940976164e-05, | |
| "loss": 0.1603, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.6515323496027245e-05, | |
| "loss": 0.1623, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.6231555051078322e-05, | |
| "loss": 0.1705, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.59477866061294e-05, | |
| "loss": 0.1731, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5664018161180477e-05, | |
| "loss": 0.1682, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5380249716231558e-05, | |
| "loss": 0.1628, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.5096481271282633e-05, | |
| "loss": 0.172, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.4812712826333713e-05, | |
| "loss": 0.1616, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.452894438138479e-05, | |
| "loss": 0.1504, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.424517593643587e-05, | |
| "loss": 0.1684, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.3961407491486947e-05, | |
| "loss": 0.1714, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.3677639046538027e-05, | |
| "loss": 0.1684, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.3393870601589103e-05, | |
| "loss": 0.1635, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.3110102156640184e-05, | |
| "loss": 0.1519, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.282633371169126e-05, | |
| "loss": 0.1634, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.254256526674234e-05, | |
| "loss": 0.1731, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.2258796821793416e-05, | |
| "loss": 0.1826, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.1975028376844495e-05, | |
| "loss": 0.1599, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.1691259931895573e-05, | |
| "loss": 0.1749, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.1407491486946652e-05, | |
| "loss": 0.17, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.112372304199773e-05, | |
| "loss": 0.1584, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.0839954597048808e-05, | |
| "loss": 0.169, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.0556186152099886e-05, | |
| "loss": 0.1766, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.0272417707150965e-05, | |
| "loss": 0.1638, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.988649262202042e-06, | |
| "loss": 0.1688, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.704880817253122e-06, | |
| "loss": 0.1636, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.4211123723042e-06, | |
| "loss": 0.162, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.137343927355278e-06, | |
| "loss": 0.1615, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 8.853575482406357e-06, | |
| "loss": 0.1642, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.569807037457435e-06, | |
| "loss": 0.1699, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 8.286038592508514e-06, | |
| "loss": 0.1576, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 8.002270147559591e-06, | |
| "loss": 0.1582, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.71850170261067e-06, | |
| "loss": 0.1569, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.434733257661748e-06, | |
| "loss": 0.1569, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.150964812712826e-06, | |
| "loss": 0.1768, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 6.8671963677639044e-06, | |
| "loss": 0.1574, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.583427922814983e-06, | |
| "loss": 0.1725, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.299659477866062e-06, | |
| "loss": 0.1539, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 6.01589103291714e-06, | |
| "loss": 0.1675, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.732122587968218e-06, | |
| "loss": 0.1571, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.448354143019297e-06, | |
| "loss": 0.1484, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.164585698070375e-06, | |
| "loss": 0.1498, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.880817253121453e-06, | |
| "loss": 0.174, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.5970488081725315e-06, | |
| "loss": 0.1514, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.31328036322361e-06, | |
| "loss": 0.1531, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.029511918274688e-06, | |
| "loss": 0.1438, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.7457434733257664e-06, | |
| "loss": 0.1741, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.4619750283768446e-06, | |
| "loss": 0.1585, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.178206583427923e-06, | |
| "loss": 0.1661, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.894438138479001e-06, | |
| "loss": 0.1448, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.6106696935300795e-06, | |
| "loss": 0.1601, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.3269012485811578e-06, | |
| "loss": 0.1632, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.043132803632236e-06, | |
| "loss": 0.162, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7593643586833145e-06, | |
| "loss": 0.1598, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4755959137343928e-06, | |
| "loss": 0.183, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.191827468785471e-06, | |
| "loss": 0.1614, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 9.080590238365494e-07, | |
| "loss": 0.154, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.242905788876277e-07, | |
| "loss": 0.1445, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.4052213393870604e-07, | |
| "loss": 0.1591, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 5.6753688989784336e-08, | |
| "loss": 0.1638, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.12982094287872314, | |
| "eval_runtime": 78.2688, | |
| "eval_samples_per_second": 107.591, | |
| "eval_steps_per_second": 1.686, | |
| "step": 881 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 881, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "total_flos": 4289727740313600.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |