The uploaded model is from epoch 9 with Matthews Correlation of 66.77
"best_metric": 0.667660908939119,<br> "best_model_checkpoint": "/content/output_dir/checkpoint-2412",<br> "epoch": 10.0,<br> "global_step": 2680,<br> "is_hyper_param_search": false,<br> "is_local_process_zero": true,<br> "is_world_process_zero": true,<br> "max_steps": 2680,<br> "num_train_epochs": 10,<br> "total_flos": 7189983634007040.0,<br> "trial_name": null,<br> "trial_params": null<br> <table class="table table-bordered table-hover table-condensed"> <thead><tr><th title="Field #1">epoch</th> <th title="Field #2">eval_loss</th> <th title="Field #3">eval_matthews_correlation</th> <th title="Field #4">eval_runtime</th> <th title="Field #5">eval_samples_per_second</th> <th title="Field #6">eval_steps_per_second</th> <th title="Field #7">step</th> <th title="Field #8">learning_rate</th> <th title="Field #9">loss</th> </tr></thead> <tbody><tr> <td align="right">1</td> <td align="right">0.5115634202957153</td> <td align="right">0.5385290213636863</td> <td align="right">7.985</td> <td align="right">130.62</td> <td align="right">16.406</td> <td align="right">268</td> <td align="right">0.00009280492497114274</td> <td align="right">0.4622</td> </tr> <tr> <td align="right">2</td> <td align="right">0.4201788902282715</td> <td align="right">0.6035894895952164</td> <td align="right">8.0283</td> <td align="right">129.916</td> <td align="right">16.317</td> <td align="right">536</td> <td align="right">0.00008249326664101577</td> <td align="right">0.2823</td> </tr> <tr> <td align="right">3</td> <td align="right">0.580650806427002</td> <td align="right">0.5574138665741355</td> <td align="right">8.1314</td> <td align="right">128.268</td> <td align="right">16.11</td> <td align="right">804</td> <td align="right">0.00007218160831088881</td> <td align="right">0.1804</td> </tr> <tr> <td align="right">4</td> <td align="right">0.4439031779766083</td> <td align="right">0.6557697896854868</td> <td align="right">8.1435</td> <td align="right">128.078</td> <td align="right">16.087</td> <td align="right">1072</td> <td align="right">0.00006186994998076183</td> <td align="right">0.1357</td> </tr> <tr> <td align="right">5</td> <td align="right">0.5736830830574036</td> <td align="right">0.6249925495853809</td> <td align="right">8.0533</td> <td align="right">129.512</td> <td align="right">16.267</td> <td align="right">1340</td> <td align="right">0.00005155829165063486</td> <td align="right">0.0913</td> </tr> <tr> <td align="right">6</td> <td align="right">0.7729296684265137</td> <td align="right">0.6188970025554703</td> <td align="right">8.081</td> <td align="right">129.068</td> <td align="right">16.211</td> <td align="right">1608</td> <td align="right">0.000041246633320507885</td> <td align="right">0.065</td> </tr> <tr> <td align="right">7</td> <td align="right">0.7351673245429993</td> <td align="right">0.6405767700619004</td> <td align="right">8.1372</td> <td align="right">128.176</td> <td align="right">16.099</td> <td align="right">1876</td> <td align="right">0.00003093497499038092</td> <td align="right">0.0433</td> </tr> <tr> <td align="right">8</td> <td align="right">0.7900031208992004</td> <td align="right">0.6565021466238845</td> <td align="right">8.1095</td> <td align="right">128.615</td> <td align="right">16.154</td> <td align="right">2144</td> <td align="right">0.000020623316660253942</td> <td align="right">0.0199</td> </tr> <tr> <td align="right">9</td> <td align="right">0.8539554476737976</td> <td align="right">0.667660908939119</td> <td align="right">8.1204</td> <td align="right">128.442</td> <td align="right">16.132</td> <td align="right">2412</td> <td align="right">0.000010311658330126971</td> <td align="right">0.0114</td> </tr> <tr> <td align="right">10</td> <td align="right">0.9261117577552795</td> <td align="right">0.660301076782038</td> <td align="right">8.0088</td> <td align="right">130.231</td> <td align="right">16.357</td> <td align="right">2680</td> <td align="right">0</td> <td align="right">0.0066</td> </tr> </tbody></table>