| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.301327088212334, | |
| "global_step": 2500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.6935, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.6974, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.694, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.6966, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.6948, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.6957, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.6952, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.6972, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.6925, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.6903, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.6837, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.6754, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.6447, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.6391, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.6118, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.6071, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.6027, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.5978, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.5974, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.5929, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.5853, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.582, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.5443, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.5475, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.5348, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "eval_webgpt_accuracy": 0.5572012257405515, | |
| "eval_webgpt_loss": 0.6882389783859253, | |
| "eval_webgpt_runtime": 166.4816, | |
| "eval_webgpt_samples_per_second": 23.522, | |
| "eval_webgpt_steps_per_second": 4.709, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "eval_hfsummary_accuracy": 0.6711301877096999, | |
| "eval_hfsummary_loss": 0.6183204054832458, | |
| "eval_hfsummary_runtime": 3223.0913, | |
| "eval_hfsummary_samples_per_second": 10.264, | |
| "eval_hfsummary_steps_per_second": 2.053, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "eval_gptsynthetic_accuracy": 0.9993966817496229, | |
| "eval_gptsynthetic_loss": 0.009097465313971043, | |
| "eval_gptsynthetic_runtime": 101.5011, | |
| "eval_gptsynthetic_samples_per_second": 32.66, | |
| "eval_gptsynthetic_steps_per_second": 6.532, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.537, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.5286, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.4957, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.5117, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.4977, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.4801, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.499, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.4702, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.4715, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.4686, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.488, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.4671, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.4889, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.4718, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.4706, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.4839, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.4576, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.4865, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.468, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.4335, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.4516, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.4603, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.467, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.4678, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.4456, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_webgpt_accuracy": 0.5893769152196119, | |
| "eval_webgpt_loss": 0.6772251725196838, | |
| "eval_webgpt_runtime": 166.9484, | |
| "eval_webgpt_samples_per_second": 23.456, | |
| "eval_webgpt_steps_per_second": 4.696, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_hfsummary_accuracy": 0.7065864643472478, | |
| "eval_hfsummary_loss": 0.5687452554702759, | |
| "eval_hfsummary_runtime": 3231.8075, | |
| "eval_hfsummary_samples_per_second": 10.237, | |
| "eval_hfsummary_steps_per_second": 2.047, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "eval_gptsynthetic_accuracy": 0.9993966817496229, | |
| "eval_gptsynthetic_loss": 0.005173402838408947, | |
| "eval_gptsynthetic_runtime": 101.6573, | |
| "eval_gptsynthetic_samples_per_second": 32.61, | |
| "eval_gptsynthetic_steps_per_second": 6.522, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.999821166271659e-06, | |
| "loss": 0.4375, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.999202992029537e-06, | |
| "loss": 0.4608, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.998143324029546e-06, | |
| "loss": 0.4674, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.996642255853966e-06, | |
| "loss": 0.4468, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.99469992006637e-06, | |
| "loss": 0.4466, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.992316488199927e-06, | |
| "loss": 0.4608, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.989492170742248e-06, | |
| "loss": 0.436, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.986227217116798e-06, | |
| "loss": 0.4249, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.982521915660865e-06, | |
| "loss": 0.4702, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.978376593600107e-06, | |
| "loss": 0.4336, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.973791617019634e-06, | |
| "loss": 0.4617, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.968767390831707e-06, | |
| "loss": 0.4547, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.963304358739943e-06, | |
| "loss": 0.4285, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.957403003200167e-06, | |
| "loss": 0.4626, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.951063845377778e-06, | |
| "loss": 0.4153, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.944287445101733e-06, | |
| "loss": 0.4196, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.937074400815114e-06, | |
| "loss": 0.4461, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.929425349522264e-06, | |
| "loss": 0.4108, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.921340966732539e-06, | |
| "loss": 0.4466, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.912821966400652e-06, | |
| "loss": 0.4608, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.903869100863622e-06, | |
| "loss": 0.4382, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.894483160774331e-06, | |
| "loss": 0.4172, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.884664975031698e-06, | |
| "loss": 0.4021, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.874415410707476e-06, | |
| "loss": 0.4108, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.863735372969692e-06, | |
| "loss": 0.4402, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_webgpt_accuracy": 0.5947395301327886, | |
| "eval_webgpt_loss": 0.6755640506744385, | |
| "eval_webgpt_runtime": 167.2694, | |
| "eval_webgpt_samples_per_second": 23.411, | |
| "eval_webgpt_steps_per_second": 4.687, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_hfsummary_accuracy": 0.7158661548227186, | |
| "eval_hfsummary_loss": 0.5612313151359558, | |
| "eval_hfsummary_runtime": 3314.4907, | |
| "eval_hfsummary_samples_per_second": 9.981, | |
| "eval_hfsummary_steps_per_second": 1.996, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "eval_gptsynthetic_accuracy": 0.9996983408748115, | |
| "eval_gptsynthetic_loss": 0.005162315908819437, | |
| "eval_gptsynthetic_runtime": 105.2324, | |
| "eval_gptsynthetic_samples_per_second": 31.502, | |
| "eval_gptsynthetic_steps_per_second": 6.3, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.852625805002682e-06, | |
| "loss": 0.4411, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.841087687923826e-06, | |
| "loss": 0.4239, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.82912204069688e-06, | |
| "loss": 0.4489, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.816729920041996e-06, | |
| "loss": 0.4337, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.803912420342406e-06, | |
| "loss": 0.4346, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.790670673547766e-06, | |
| "loss": 0.3994, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.777005849074195e-06, | |
| "loss": 0.4151, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.762919153700996e-06, | |
| "loss": 0.4519, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.748411831464083e-06, | |
| "loss": 0.4409, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.733485163546124e-06, | |
| "loss": 0.4069, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.718140468163386e-06, | |
| "loss": 0.3974, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.702379100449325e-06, | |
| "loss": 0.4192, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.686202452334909e-06, | |
| "loss": 0.4106, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.669611952425694e-06, | |
| "loss": 0.4224, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.652609065875656e-06, | |
| "loss": 0.4176, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.635195294257806e-06, | |
| "loss": 0.3897, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.617372175431574e-06, | |
| "loss": 0.4201, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.599141283407004e-06, | |
| "loss": 0.3955, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.580504228205736e-06, | |
| "loss": 0.3754, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.56146265571884e-06, | |
| "loss": 0.407, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.542018247561442e-06, | |
| "loss": 0.4225, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.522172720924236e-06, | |
| "loss": 0.415, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 9.501927828421816e-06, | |
| "loss": 0.4014, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.481285357937905e-06, | |
| "loss": 0.4074, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.460247132467466e-06, | |
| "loss": 0.3874, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "eval_webgpt_accuracy": 0.6021450459652706, | |
| "eval_webgpt_loss": 0.6651596426963806, | |
| "eval_webgpt_runtime": 167.2303, | |
| "eval_webgpt_samples_per_second": 23.417, | |
| "eval_webgpt_steps_per_second": 4.688, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "eval_hfsummary_accuracy": 0.714052534534353, | |
| "eval_hfsummary_loss": 0.5728897452354431, | |
| "eval_hfsummary_runtime": 3240.5714, | |
| "eval_hfsummary_samples_per_second": 10.209, | |
| "eval_hfsummary_steps_per_second": 2.042, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "eval_gptsynthetic_accuracy": 0.9990950226244344, | |
| "eval_gptsynthetic_loss": 0.0032840222120285034, | |
| "eval_gptsynthetic_runtime": 101.8215, | |
| "eval_gptsynthetic_samples_per_second": 32.557, | |
| "eval_gptsynthetic_steps_per_second": 6.511, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.438815009955701e-06, | |
| "loss": 0.386, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.41699088313398e-06, | |
| "loss": 0.401, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.394776679352675e-06, | |
| "loss": 0.4199, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.372174360410963e-06, | |
| "loss": 0.4093, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.349185922383564e-06, | |
| "loss": 0.4313, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.325813395444472e-06, | |
| "loss": 0.4164, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.302058843687655e-06, | |
| "loss": 0.3871, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.277924364944774e-06, | |
| "loss": 0.4115, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.253412090599923e-06, | |
| "loss": 0.4094, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.228524185401384e-06, | |
| "loss": 0.3957, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.203262847270469e-06, | |
| "loss": 0.3952, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.177630307107408e-06, | |
| "loss": 0.4011, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.15162882859433e-06, | |
| "loss": 0.4042, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.125260707995356e-06, | |
| "loss": 0.404, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.098528273953807e-06, | |
| "loss": 0.3904, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.071433887286551e-06, | |
| "loss": 0.4011, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.04397994077552e-06, | |
| "loss": 0.3806, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.01616885895639e-06, | |
| "loss": 0.3878, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.988003097904472e-06, | |
| "loss": 0.384, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.959485145017794e-06, | |
| "loss": 0.4023, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.930617518797448e-06, | |
| "loss": 0.3609, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 8.901402768625169e-06, | |
| "loss": 0.3879, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.871843474538182e-06, | |
| "loss": 0.4003, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.841942247001368e-06, | |
| "loss": 0.3974, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 8.811701726676711e-06, | |
| "loss": 0.4067, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "eval_webgpt_accuracy": 0.5967824310520939, | |
| "eval_webgpt_loss": 0.6645339727401733, | |
| "eval_webgpt_runtime": 167.52, | |
| "eval_webgpt_samples_per_second": 23.376, | |
| "eval_webgpt_steps_per_second": 4.68, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "eval_hfsummary_accuracy": 0.7167125109572893, | |
| "eval_hfsummary_loss": 0.559929370880127, | |
| "eval_hfsummary_runtime": 3290.2163, | |
| "eval_hfsummary_samples_per_second": 10.055, | |
| "eval_hfsummary_steps_per_second": 2.011, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "eval_gptsynthetic_accuracy": 0.9993966817496229, | |
| "eval_gptsynthetic_loss": 0.002522215712815523, | |
| "eval_gptsynthetic_runtime": 116.8538, | |
| "eval_gptsynthetic_samples_per_second": 28.369, | |
| "eval_gptsynthetic_steps_per_second": 5.674, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.78112458419011e-06, | |
| "loss": 0.3581, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 8.750213519895511e-06, | |
| "loss": 0.3628, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 8.718971263636441e-06, | |
| "loss": 0.3799, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 8.687400574504929e-06, | |
| "loss": 0.4083, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 8.655504240597841e-06, | |
| "loss": 0.3771, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 8.623285078770649e-06, | |
| "loss": 0.4016, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 8.590745934388675e-06, | |
| "loss": 0.3878, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 8.557889681075808e-06, | |
| "loss": 0.3568, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8.524719220460718e-06, | |
| "loss": 0.3914, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8.491237481920616e-06, | |
| "loss": 0.4001, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 8.457447422322545e-06, | |
| "loss": 0.3683, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 8.42335202576225e-06, | |
| "loss": 0.3741, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 8.388954303300651e-06, | |
| "loss": 0.3636, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 8.354257292697927e-06, | |
| "loss": 0.3815, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.319264058145227e-06, | |
| "loss": 0.3596, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.283977689994085e-06, | |
| "loss": 0.3826, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.248401304483492e-06, | |
| "loss": 0.41, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.21253804346469e-06, | |
| "loss": 0.3648, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 8.176391074123705e-06, | |
| "loss": 0.3439, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 8.139963588701656e-06, | |
| "loss": 0.3635, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 8.103258804212824e-06, | |
| "loss": 0.3633, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 8.066279962160554e-06, | |
| "loss": 0.3989, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 8.029030328250997e-06, | |
| "loss": 0.3504, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.991513192104694e-06, | |
| "loss": 0.3665, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 7.953731866966063e-06, | |
| "loss": 0.3874, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "eval_webgpt_accuracy": 0.6113381001021451, | |
| "eval_webgpt_loss": 0.6596858501434326, | |
| "eval_webgpt_runtime": 179.0732, | |
| "eval_webgpt_samples_per_second": 21.868, | |
| "eval_webgpt_steps_per_second": 4.378, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "eval_hfsummary_accuracy": 0.7207327025964997, | |
| "eval_hfsummary_loss": 0.5737717747688293, | |
| "eval_hfsummary_runtime": 3606.3862, | |
| "eval_hfsummary_samples_per_second": 9.173, | |
| "eval_hfsummary_steps_per_second": 1.835, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "eval_gptsynthetic_accuracy": 0.9993966817496229, | |
| "eval_gptsynthetic_loss": 0.002385695930570364, | |
| "eval_gptsynthetic_runtime": 109.239, | |
| "eval_gptsynthetic_samples_per_second": 30.346, | |
| "eval_gptsynthetic_steps_per_second": 6.069, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.915689689410806e-06, | |
| "loss": 0.3532, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 7.877390019051232e-06, | |
| "loss": 0.403, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.838836238239574e-06, | |
| "loss": 0.3735, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 7.800031751769274e-06, | |
| "loss": 0.3773, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.760979986574304e-06, | |
| "loss": 0.3644, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.721684391426516e-06, | |
| "loss": 0.3692, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 7.682148436631075e-06, | |
| "loss": 0.3804, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 7.642375613719988e-06, | |
| "loss": 0.397, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 7.602369435143755e-06, | |
| "loss": 0.3842, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 7.562133433961176e-06, | |
| "loss": 0.3571, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.5216711635273305e-06, | |
| "loss": 0.3784, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.480986197179785e-06, | |
| "loss": 0.3947, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.440082127923002e-06, | |
| "loss": 0.3583, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.39896256811105e-06, | |
| "loss": 0.3671, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.357631149128568e-06, | |
| "loss": 0.3691, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 7.31609152107009e-06, | |
| "loss": 0.4069, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 7.2743473524176734e-06, | |
| "loss": 0.3515, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 7.232402329716938e-06, | |
| "loss": 0.3783, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.19026015725149e-06, | |
| "loss": 0.362, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.147924556715793e-06, | |
| "loss": 0.3807, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.1053992668864855e-06, | |
| "loss": 0.3632, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 7.0626880432922095e-06, | |
| "loss": 0.3619, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 7.019794657881942e-06, | |
| "loss": 0.3883, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 6.976722898691881e-06, | |
| "loss": 0.351, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 6.933476569510924e-06, | |
| "loss": 0.3613, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "eval_webgpt_accuracy": 0.609805924412666, | |
| "eval_webgpt_loss": 0.6571772694587708, | |
| "eval_webgpt_runtime": 167.3559, | |
| "eval_webgpt_samples_per_second": 23.399, | |
| "eval_webgpt_steps_per_second": 4.685, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "eval_hfsummary_accuracy": 0.7195538494090621, | |
| "eval_hfsummary_loss": 0.5861465334892273, | |
| "eval_hfsummary_runtime": 3235.9038, | |
| "eval_hfsummary_samples_per_second": 10.224, | |
| "eval_hfsummary_steps_per_second": 2.045, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "eval_gptsynthetic_accuracy": 0.9996983408748115, | |
| "eval_gptsynthetic_loss": 0.0027254121378064156, | |
| "eval_gptsynthetic_runtime": 101.6222, | |
| "eval_gptsynthetic_samples_per_second": 32.621, | |
| "eval_gptsynthetic_steps_per_second": 6.524, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 6.890059489544735e-06, | |
| "loss": 0.3651, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 6.846475493078462e-06, | |
| "loss": 0.3617, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.802728429138122e-06, | |
| "loss": 0.3174, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.758822161150687e-06, | |
| "loss": 0.3399, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.714760566602876e-06, | |
| "loss": 0.3664, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.67054753669875e-06, | |
| "loss": 0.3432, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 6.626186976016049e-06, | |
| "loss": 0.3673, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 6.581682802161371e-06, | |
| "loss": 0.3562, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.537038945424201e-06, | |
| "loss": 0.338, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.492259348429818e-06, | |
| "loss": 0.3387, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.4473479657910995e-06, | |
| "loss": 0.3504, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.402308763759296e-06, | |
| "loss": 0.3276, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.357145719873742e-06, | |
| "loss": 0.3243, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.3118628226105995e-06, | |
| "loss": 0.3548, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.266464071030621e-06, | |
| "loss": 0.3099, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.220953474425984e-06, | |
| "loss": 0.3387, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.175335051966211e-06, | |
| "loss": 0.3403, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.129612832343233e-06, | |
| "loss": 0.3385, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 6.0837908534156076e-06, | |
| "loss": 0.3305, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 6.037873161851911e-06, | |
| "loss": 0.3421, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 5.9918638127733764e-06, | |
| "loss": 0.3381, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 5.945766869395772e-06, | |
| "loss": 0.3062, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 5.899586402670566e-06, | |
| "loss": 0.3459, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 5.853326490925407e-06, | |
| "loss": 0.3175, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 5.806991219503963e-06, | |
| "loss": 0.3117, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_webgpt_accuracy": 0.6046986721144024, | |
| "eval_webgpt_loss": 0.6649239659309387, | |
| "eval_webgpt_runtime": 166.8662, | |
| "eval_webgpt_samples_per_second": 23.468, | |
| "eval_webgpt_steps_per_second": 4.698, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_hfsummary_accuracy": 0.7152616147265968, | |
| "eval_hfsummary_loss": 0.6051351428031921, | |
| "eval_hfsummary_runtime": 3227.7808, | |
| "eval_hfsummary_samples_per_second": 10.249, | |
| "eval_hfsummary_steps_per_second": 2.05, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "eval_gptsynthetic_accuracy": 1.0, | |
| "eval_gptsynthetic_loss": 0.002014968078583479, | |
| "eval_gptsynthetic_runtime": 101.558, | |
| "eval_gptsynthetic_samples_per_second": 32.641, | |
| "eval_gptsynthetic_steps_per_second": 6.528, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 5.765228424606831e-06, | |
| "loss": 0.3206, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 5.718761248466732e-06, | |
| "loss": 0.3073, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 5.672230596487563e-06, | |
| "loss": 0.3113, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 5.625640577922828e-06, | |
| "loss": 0.3213, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 5.578995307268855e-06, | |
| "loss": 0.3353, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 5.532298903901448e-06, | |
| "loss": 0.3293, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 5.485555491712082e-06, | |
| "loss": 0.3158, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 5.438769198743713e-06, | |
| "loss": 0.2968, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 5.391944156826221e-06, | |
| "loss": 0.2877, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 5.345084501211517e-06, | |
| "loss": 0.3361, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 5.298194370208341e-06, | |
| "loss": 0.3051, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 5.251277904816809e-06, | |
| "loss": 0.3398, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 5.2043392483626906e-06, | |
| "loss": 0.3155, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 5.157382546131519e-06, | |
| "loss": 0.315, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 5.110411945002497e-06, | |
| "loss": 0.304, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 5.063431593082283e-06, | |
| "loss": 0.3458, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 5.016445639338647e-06, | |
| "loss": 0.3192, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.969458233234083e-06, | |
| "loss": 0.3188, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 4.922473524359335e-06, | |
| "loss": 0.308, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.875495662066958e-06, | |
| "loss": 0.3052, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.82852879510486e-06, | |
| "loss": 0.2962, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.781577071249918e-06, | |
| "loss": 0.3067, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.734644636941687e-06, | |
| "loss": 0.2886, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.6877356369161975e-06, | |
| "loss": 0.2832, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.64085421383994e-06, | |
| "loss": 0.3177, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "eval_webgpt_accuracy": 0.6052093973442288, | |
| "eval_webgpt_loss": 0.67301344871521, | |
| "eval_webgpt_runtime": 167.0393, | |
| "eval_webgpt_samples_per_second": 23.444, | |
| "eval_webgpt_steps_per_second": 4.694, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "eval_hfsummary_accuracy": 0.7188586282985219, | |
| "eval_hfsummary_loss": 0.665126621723175, | |
| "eval_hfsummary_runtime": 3228.0014, | |
| "eval_hfsummary_samples_per_second": 10.249, | |
| "eval_hfsummary_steps_per_second": 2.05, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "eval_gptsynthetic_accuracy": 1.0, | |
| "eval_gptsynthetic_loss": 0.0015043159946799278, | |
| "eval_gptsynthetic_runtime": 101.5554, | |
| "eval_gptsynthetic_samples_per_second": 32.642, | |
| "eval_gptsynthetic_steps_per_second": 6.528, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.594004507944001e-06, | |
| "loss": 0.2988, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 4.547190656658441e-06, | |
| "loss": 0.3037, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.500416794246885e-06, | |
| "loss": 0.3024, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.453687051441443e-06, | |
| "loss": 0.3008, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.407005555077885e-06, | |
| "loss": 0.3059, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.36037642773121e-06, | |
| "loss": 0.3191, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.313803787351555e-06, | |
| "loss": 0.3228, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.267291746900538e-06, | |
| "loss": 0.3054, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.220844413988022e-06, | |
| "loss": 0.3161, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.174465890509365e-06, | |
| "loss": 0.296, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.128160272283172e-06, | |
| "loss": 0.2718, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.081931648689577e-06, | |
| "loss": 0.3204, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.035784102309097e-06, | |
| "loss": 0.3418, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.989721708562098e-06, | |
| "loss": 0.3246, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.943748535348872e-06, | |
| "loss": 0.2989, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.8978686426903885e-06, | |
| "loss": 0.3318, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.852086082369758e-06, | |
| "loss": 0.2856, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.806404897574388e-06, | |
| "loss": 0.2934, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.760829122538931e-06, | |
| "loss": 0.3017, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.7153627821890026e-06, | |
| "loss": 0.318, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.6700098917857387e-06, | |
| "loss": 0.2964, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.624774456571176e-06, | |
| "loss": 0.2909, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.579660471414565e-06, | |
| "loss": 0.3043, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.5346719204595497e-06, | |
| "loss": 0.305, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.48981277677233e-06, | |
| "loss": 0.2756, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_webgpt_accuracy": 0.6131256384065373, | |
| "eval_webgpt_loss": 0.6625245809555054, | |
| "eval_webgpt_runtime": 166.7899, | |
| "eval_webgpt_samples_per_second": 23.479, | |
| "eval_webgpt_steps_per_second": 4.701, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_hfsummary_accuracy": 0.7223347338512227, | |
| "eval_hfsummary_loss": 0.6251139044761658, | |
| "eval_hfsummary_runtime": 3221.6572, | |
| "eval_hfsummary_samples_per_second": 10.269, | |
| "eval_hfsummary_steps_per_second": 2.054, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "eval_gptsynthetic_accuracy": 0.9993966817496229, | |
| "eval_gptsynthetic_loss": 0.00219121971167624, | |
| "eval_gptsynthetic_runtime": 101.3035, | |
| "eval_gptsynthetic_samples_per_second": 32.723, | |
| "eval_gptsynthetic_steps_per_second": 6.545, | |
| "step": 2500 | |
| } | |
| ], | |
| "max_steps": 3842, | |
| "num_train_epochs": 2, | |
| "total_flos": 4.2100836618337517e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |