Training in progress, step 150, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 313820248
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:64e2d3e599ceccb51131143c3535a8bf1fea1f8fb7090f2e42512e31a7c5f29d
|
| 3 |
size 313820248
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 159641092
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:586995f97ab94b839676b5fdccfb670e75e5b09c5f1602bb7e1893c1d33964b0
|
| 3 |
size 159641092
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:be13dfc437ebaf22fe1c460077f957a008e3b4eec1919313f3b5733f42e57f8b
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d8ce05761f46e7cf72fb17a02e3a0ca15c9d25ce3babf590eeb40568923b8bac
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
-
"best_metric": 3.
|
| 3 |
-
"best_model_checkpoint": "miner_id_24/checkpoint-
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 50,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -731,6 +731,364 @@
|
|
| 731 |
"eval_samples_per_second": 28.699,
|
| 732 |
"eval_steps_per_second": 7.177,
|
| 733 |
"step": 100
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 734 |
}
|
| 735 |
],
|
| 736 |
"logging_steps": 1,
|
|
@@ -759,7 +1117,7 @@
|
|
| 759 |
"attributes": {}
|
| 760 |
}
|
| 761 |
},
|
| 762 |
-
"total_flos":
|
| 763 |
"train_batch_size": 8,
|
| 764 |
"trial_name": null,
|
| 765 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
+
"best_metric": 3.3761205673217773,
|
| 3 |
+
"best_model_checkpoint": "miner_id_24/checkpoint-150",
|
| 4 |
+
"epoch": 0.02395687762028349,
|
| 5 |
"eval_steps": 50,
|
| 6 |
+
"global_step": 150,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 731 |
"eval_samples_per_second": 28.699,
|
| 732 |
"eval_steps_per_second": 7.177,
|
| 733 |
"step": 100
|
| 734 |
+
},
|
| 735 |
+
{
|
| 736 |
+
"epoch": 0.016130964264324216,
|
| 737 |
+
"grad_norm": 2.563222646713257,
|
| 738 |
+
"learning_rate": 5.330452921628497e-05,
|
| 739 |
+
"loss": 3.1601,
|
| 740 |
+
"step": 101
|
| 741 |
+
},
|
| 742 |
+
{
|
| 743 |
+
"epoch": 0.016290676781792773,
|
| 744 |
+
"grad_norm": 2.3833115100860596,
|
| 745 |
+
"learning_rate": 5.247918773366112e-05,
|
| 746 |
+
"loss": 3.2734,
|
| 747 |
+
"step": 102
|
| 748 |
+
},
|
| 749 |
+
{
|
| 750 |
+
"epoch": 0.01645038929926133,
|
| 751 |
+
"grad_norm": 2.2120771408081055,
|
| 752 |
+
"learning_rate": 5.165316846586541e-05,
|
| 753 |
+
"loss": 3.3793,
|
| 754 |
+
"step": 103
|
| 755 |
+
},
|
| 756 |
+
{
|
| 757 |
+
"epoch": 0.016610101816729887,
|
| 758 |
+
"grad_norm": 1.8844492435455322,
|
| 759 |
+
"learning_rate": 5.0826697238317935e-05,
|
| 760 |
+
"loss": 3.2532,
|
| 761 |
+
"step": 104
|
| 762 |
+
},
|
| 763 |
+
{
|
| 764 |
+
"epoch": 0.016769814334198444,
|
| 765 |
+
"grad_norm": 1.4038431644439697,
|
| 766 |
+
"learning_rate": 5e-05,
|
| 767 |
+
"loss": 3.2878,
|
| 768 |
+
"step": 105
|
| 769 |
+
},
|
| 770 |
+
{
|
| 771 |
+
"epoch": 0.016929526851667,
|
| 772 |
+
"grad_norm": 1.0654053688049316,
|
| 773 |
+
"learning_rate": 4.917330276168208e-05,
|
| 774 |
+
"loss": 3.2445,
|
| 775 |
+
"step": 106
|
| 776 |
+
},
|
| 777 |
+
{
|
| 778 |
+
"epoch": 0.017089239369135555,
|
| 779 |
+
"grad_norm": 0.8803927302360535,
|
| 780 |
+
"learning_rate": 4.834683153413459e-05,
|
| 781 |
+
"loss": 3.3826,
|
| 782 |
+
"step": 107
|
| 783 |
+
},
|
| 784 |
+
{
|
| 785 |
+
"epoch": 0.01724895188660411,
|
| 786 |
+
"grad_norm": 0.8809193968772888,
|
| 787 |
+
"learning_rate": 4.7520812266338885e-05,
|
| 788 |
+
"loss": 3.3263,
|
| 789 |
+
"step": 108
|
| 790 |
+
},
|
| 791 |
+
{
|
| 792 |
+
"epoch": 0.01740866440407267,
|
| 793 |
+
"grad_norm": 0.9295204281806946,
|
| 794 |
+
"learning_rate": 4.669547078371504e-05,
|
| 795 |
+
"loss": 3.3115,
|
| 796 |
+
"step": 109
|
| 797 |
+
},
|
| 798 |
+
{
|
| 799 |
+
"epoch": 0.017568376921541225,
|
| 800 |
+
"grad_norm": 0.9930161833763123,
|
| 801 |
+
"learning_rate": 4.5871032726383386e-05,
|
| 802 |
+
"loss": 3.2609,
|
| 803 |
+
"step": 110
|
| 804 |
+
},
|
| 805 |
+
{
|
| 806 |
+
"epoch": 0.017728089439009782,
|
| 807 |
+
"grad_norm": 1.0246449708938599,
|
| 808 |
+
"learning_rate": 4.504772348747687e-05,
|
| 809 |
+
"loss": 3.3752,
|
| 810 |
+
"step": 111
|
| 811 |
+
},
|
| 812 |
+
{
|
| 813 |
+
"epoch": 0.01788780195647834,
|
| 814 |
+
"grad_norm": 0.9885937571525574,
|
| 815 |
+
"learning_rate": 4.4225768151520694e-05,
|
| 816 |
+
"loss": 3.3546,
|
| 817 |
+
"step": 112
|
| 818 |
+
},
|
| 819 |
+
{
|
| 820 |
+
"epoch": 0.018047514473946896,
|
| 821 |
+
"grad_norm": 0.934032142162323,
|
| 822 |
+
"learning_rate": 4.3405391432896555e-05,
|
| 823 |
+
"loss": 3.4028,
|
| 824 |
+
"step": 113
|
| 825 |
+
},
|
| 826 |
+
{
|
| 827 |
+
"epoch": 0.018207226991415453,
|
| 828 |
+
"grad_norm": 0.9788658618927002,
|
| 829 |
+
"learning_rate": 4.2586817614407895e-05,
|
| 830 |
+
"loss": 3.3554,
|
| 831 |
+
"step": 114
|
| 832 |
+
},
|
| 833 |
+
{
|
| 834 |
+
"epoch": 0.01836693950888401,
|
| 835 |
+
"grad_norm": 0.9676387310028076,
|
| 836 |
+
"learning_rate": 4.17702704859633e-05,
|
| 837 |
+
"loss": 3.3169,
|
| 838 |
+
"step": 115
|
| 839 |
+
},
|
| 840 |
+
{
|
| 841 |
+
"epoch": 0.018526652026352567,
|
| 842 |
+
"grad_norm": 1.0396462678909302,
|
| 843 |
+
"learning_rate": 4.095597328339452e-05,
|
| 844 |
+
"loss": 3.2757,
|
| 845 |
+
"step": 116
|
| 846 |
+
},
|
| 847 |
+
{
|
| 848 |
+
"epoch": 0.01868636454382112,
|
| 849 |
+
"grad_norm": 1.0794548988342285,
|
| 850 |
+
"learning_rate": 4.0144148627425993e-05,
|
| 851 |
+
"loss": 3.327,
|
| 852 |
+
"step": 117
|
| 853 |
+
},
|
| 854 |
+
{
|
| 855 |
+
"epoch": 0.018846077061289677,
|
| 856 |
+
"grad_norm": 1.0206615924835205,
|
| 857 |
+
"learning_rate": 3.933501846281267e-05,
|
| 858 |
+
"loss": 3.225,
|
| 859 |
+
"step": 118
|
| 860 |
+
},
|
| 861 |
+
{
|
| 862 |
+
"epoch": 0.019005789578758234,
|
| 863 |
+
"grad_norm": 0.9828562140464783,
|
| 864 |
+
"learning_rate": 3.852880399766243e-05,
|
| 865 |
+
"loss": 3.3342,
|
| 866 |
+
"step": 119
|
| 867 |
+
},
|
| 868 |
+
{
|
| 869 |
+
"epoch": 0.01916550209622679,
|
| 870 |
+
"grad_norm": 1.2915130853652954,
|
| 871 |
+
"learning_rate": 3.772572564296005e-05,
|
| 872 |
+
"loss": 3.3889,
|
| 873 |
+
"step": 120
|
| 874 |
+
},
|
| 875 |
+
{
|
| 876 |
+
"epoch": 0.01932521461369535,
|
| 877 |
+
"grad_norm": 1.0610451698303223,
|
| 878 |
+
"learning_rate": 3.6926002952309016e-05,
|
| 879 |
+
"loss": 3.3425,
|
| 880 |
+
"step": 121
|
| 881 |
+
},
|
| 882 |
+
{
|
| 883 |
+
"epoch": 0.019484927131163905,
|
| 884 |
+
"grad_norm": 1.0585017204284668,
|
| 885 |
+
"learning_rate": 3.612985456190778e-05,
|
| 886 |
+
"loss": 3.3319,
|
| 887 |
+
"step": 122
|
| 888 |
+
},
|
| 889 |
+
{
|
| 890 |
+
"epoch": 0.019644639648632462,
|
| 891 |
+
"grad_norm": 1.0622979402542114,
|
| 892 |
+
"learning_rate": 3.533749813077677e-05,
|
| 893 |
+
"loss": 3.527,
|
| 894 |
+
"step": 123
|
| 895 |
+
},
|
| 896 |
+
{
|
| 897 |
+
"epoch": 0.01980435216610102,
|
| 898 |
+
"grad_norm": 1.0423346757888794,
|
| 899 |
+
"learning_rate": 3.4549150281252636e-05,
|
| 900 |
+
"loss": 3.3608,
|
| 901 |
+
"step": 124
|
| 902 |
+
},
|
| 903 |
+
{
|
| 904 |
+
"epoch": 0.019964064683569576,
|
| 905 |
+
"grad_norm": 1.0512298345565796,
|
| 906 |
+
"learning_rate": 3.3765026539765834e-05,
|
| 907 |
+
"loss": 3.4576,
|
| 908 |
+
"step": 125
|
| 909 |
+
},
|
| 910 |
+
{
|
| 911 |
+
"epoch": 0.020123777201038133,
|
| 912 |
+
"grad_norm": 1.050781011581421,
|
| 913 |
+
"learning_rate": 3.298534127791785e-05,
|
| 914 |
+
"loss": 3.429,
|
| 915 |
+
"step": 126
|
| 916 |
+
},
|
| 917 |
+
{
|
| 918 |
+
"epoch": 0.020283489718506686,
|
| 919 |
+
"grad_norm": 1.0709890127182007,
|
| 920 |
+
"learning_rate": 3.221030765387417e-05,
|
| 921 |
+
"loss": 3.4985,
|
| 922 |
+
"step": 127
|
| 923 |
+
},
|
| 924 |
+
{
|
| 925 |
+
"epoch": 0.020443202235975243,
|
| 926 |
+
"grad_norm": 1.146832823753357,
|
| 927 |
+
"learning_rate": 3.144013755408895e-05,
|
| 928 |
+
"loss": 3.4954,
|
| 929 |
+
"step": 128
|
| 930 |
+
},
|
| 931 |
+
{
|
| 932 |
+
"epoch": 0.0206029147534438,
|
| 933 |
+
"grad_norm": 1.070890188217163,
|
| 934 |
+
"learning_rate": 3.0675041535377405e-05,
|
| 935 |
+
"loss": 3.3281,
|
| 936 |
+
"step": 129
|
| 937 |
+
},
|
| 938 |
+
{
|
| 939 |
+
"epoch": 0.020762627270912357,
|
| 940 |
+
"grad_norm": 1.0944342613220215,
|
| 941 |
+
"learning_rate": 2.991522876735154e-05,
|
| 942 |
+
"loss": 3.4404,
|
| 943 |
+
"step": 130
|
| 944 |
+
},
|
| 945 |
+
{
|
| 946 |
+
"epoch": 0.020922339788380914,
|
| 947 |
+
"grad_norm": 1.1096227169036865,
|
| 948 |
+
"learning_rate": 2.916090697523549e-05,
|
| 949 |
+
"loss": 3.4476,
|
| 950 |
+
"step": 131
|
| 951 |
+
},
|
| 952 |
+
{
|
| 953 |
+
"epoch": 0.02108205230584947,
|
| 954 |
+
"grad_norm": 1.2392953634262085,
|
| 955 |
+
"learning_rate": 2.8412282383075363e-05,
|
| 956 |
+
"loss": 3.4012,
|
| 957 |
+
"step": 132
|
| 958 |
+
},
|
| 959 |
+
{
|
| 960 |
+
"epoch": 0.021241764823318028,
|
| 961 |
+
"grad_norm": 1.1994134187698364,
|
| 962 |
+
"learning_rate": 2.766955965735968e-05,
|
| 963 |
+
"loss": 3.3475,
|
| 964 |
+
"step": 133
|
| 965 |
+
},
|
| 966 |
+
{
|
| 967 |
+
"epoch": 0.021401477340786585,
|
| 968 |
+
"grad_norm": 1.1271154880523682,
|
| 969 |
+
"learning_rate": 2.693294185106562e-05,
|
| 970 |
+
"loss": 3.1871,
|
| 971 |
+
"step": 134
|
| 972 |
+
},
|
| 973 |
+
{
|
| 974 |
+
"epoch": 0.021561189858255142,
|
| 975 |
+
"grad_norm": 1.1170655488967896,
|
| 976 |
+
"learning_rate": 2.6202630348146324e-05,
|
| 977 |
+
"loss": 3.2844,
|
| 978 |
+
"step": 135
|
| 979 |
+
},
|
| 980 |
+
{
|
| 981 |
+
"epoch": 0.0217209023757237,
|
| 982 |
+
"grad_norm": 1.148924469947815,
|
| 983 |
+
"learning_rate": 2.547882480847461e-05,
|
| 984 |
+
"loss": 3.5097,
|
| 985 |
+
"step": 136
|
| 986 |
+
},
|
| 987 |
+
{
|
| 988 |
+
"epoch": 0.021880614893192252,
|
| 989 |
+
"grad_norm": 1.1717835664749146,
|
| 990 |
+
"learning_rate": 2.476172311325783e-05,
|
| 991 |
+
"loss": 3.5366,
|
| 992 |
+
"step": 137
|
| 993 |
+
},
|
| 994 |
+
{
|
| 995 |
+
"epoch": 0.02204032741066081,
|
| 996 |
+
"grad_norm": 1.1314111948013306,
|
| 997 |
+
"learning_rate": 2.405152131093926e-05,
|
| 998 |
+
"loss": 3.2612,
|
| 999 |
+
"step": 138
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"epoch": 0.022200039928129366,
|
| 1003 |
+
"grad_norm": 1.1890403032302856,
|
| 1004 |
+
"learning_rate": 2.3348413563600325e-05,
|
| 1005 |
+
"loss": 3.4438,
|
| 1006 |
+
"step": 139
|
| 1007 |
+
},
|
| 1008 |
+
{
|
| 1009 |
+
"epoch": 0.022359752445597923,
|
| 1010 |
+
"grad_norm": 1.1819334030151367,
|
| 1011 |
+
"learning_rate": 2.2652592093878666e-05,
|
| 1012 |
+
"loss": 3.4172,
|
| 1013 |
+
"step": 140
|
| 1014 |
+
},
|
| 1015 |
+
{
|
| 1016 |
+
"epoch": 0.02251946496306648,
|
| 1017 |
+
"grad_norm": 1.2623519897460938,
|
| 1018 |
+
"learning_rate": 2.196424713241637e-05,
|
| 1019 |
+
"loss": 3.4651,
|
| 1020 |
+
"step": 141
|
| 1021 |
+
},
|
| 1022 |
+
{
|
| 1023 |
+
"epoch": 0.022679177480535037,
|
| 1024 |
+
"grad_norm": 1.2643638849258423,
|
| 1025 |
+
"learning_rate": 2.128356686585282e-05,
|
| 1026 |
+
"loss": 3.4054,
|
| 1027 |
+
"step": 142
|
| 1028 |
+
},
|
| 1029 |
+
{
|
| 1030 |
+
"epoch": 0.022838889998003594,
|
| 1031 |
+
"grad_norm": 1.3270361423492432,
|
| 1032 |
+
"learning_rate": 2.061073738537635e-05,
|
| 1033 |
+
"loss": 3.5708,
|
| 1034 |
+
"step": 143
|
| 1035 |
+
},
|
| 1036 |
+
{
|
| 1037 |
+
"epoch": 0.02299860251547215,
|
| 1038 |
+
"grad_norm": 1.2927286624908447,
|
| 1039 |
+
"learning_rate": 1.9945942635848748e-05,
|
| 1040 |
+
"loss": 3.5297,
|
| 1041 |
+
"step": 144
|
| 1042 |
+
},
|
| 1043 |
+
{
|
| 1044 |
+
"epoch": 0.023158315032940708,
|
| 1045 |
+
"grad_norm": 1.3989403247833252,
|
| 1046 |
+
"learning_rate": 1.928936436551661e-05,
|
| 1047 |
+
"loss": 3.3454,
|
| 1048 |
+
"step": 145
|
| 1049 |
+
},
|
| 1050 |
+
{
|
| 1051 |
+
"epoch": 0.023318027550409265,
|
| 1052 |
+
"grad_norm": 1.3746222257614136,
|
| 1053 |
+
"learning_rate": 1.8641182076323148e-05,
|
| 1054 |
+
"loss": 3.3294,
|
| 1055 |
+
"step": 146
|
| 1056 |
+
},
|
| 1057 |
+
{
|
| 1058 |
+
"epoch": 0.02347774006787782,
|
| 1059 |
+
"grad_norm": 1.6690480709075928,
|
| 1060 |
+
"learning_rate": 1.800157297483417e-05,
|
| 1061 |
+
"loss": 3.4948,
|
| 1062 |
+
"step": 147
|
| 1063 |
+
},
|
| 1064 |
+
{
|
| 1065 |
+
"epoch": 0.023637452585346375,
|
| 1066 |
+
"grad_norm": 1.5143362283706665,
|
| 1067 |
+
"learning_rate": 1.7370711923791567e-05,
|
| 1068 |
+
"loss": 3.5697,
|
| 1069 |
+
"step": 148
|
| 1070 |
+
},
|
| 1071 |
+
{
|
| 1072 |
+
"epoch": 0.023797165102814932,
|
| 1073 |
+
"grad_norm": 1.5836552381515503,
|
| 1074 |
+
"learning_rate": 1.6748771394307585e-05,
|
| 1075 |
+
"loss": 3.5201,
|
| 1076 |
+
"step": 149
|
| 1077 |
+
},
|
| 1078 |
+
{
|
| 1079 |
+
"epoch": 0.02395687762028349,
|
| 1080 |
+
"grad_norm": 1.9442321062088013,
|
| 1081 |
+
"learning_rate": 1.6135921418712956e-05,
|
| 1082 |
+
"loss": 3.5552,
|
| 1083 |
+
"step": 150
|
| 1084 |
+
},
|
| 1085 |
+
{
|
| 1086 |
+
"epoch": 0.02395687762028349,
|
| 1087 |
+
"eval_loss": 3.3761205673217773,
|
| 1088 |
+
"eval_runtime": 367.4841,
|
| 1089 |
+
"eval_samples_per_second": 28.695,
|
| 1090 |
+
"eval_steps_per_second": 7.176,
|
| 1091 |
+
"step": 150
|
| 1092 |
}
|
| 1093 |
],
|
| 1094 |
"logging_steps": 1,
|
|
|
|
| 1117 |
"attributes": {}
|
| 1118 |
}
|
| 1119 |
},
|
| 1120 |
+
"total_flos": 6.086290321283482e+16,
|
| 1121 |
"train_batch_size": 8,
|
| 1122 |
"trial_name": null,
|
| 1123 |
"trial_params": null
|