| { | |
| "best_global_step": 126, | |
| "best_metric": 0.8264642357826233, | |
| "best_model_checkpoint": "outputs\\meta-llama__Llama-3.2-1B\\amazon_13k\\run_0\\checkpoint-126", | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 126, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.0, | |
| "eval_f1_macro": 0.6783075928688049, | |
| "eval_f1_micro": 0.7890295386314392, | |
| "eval_loss": 0.1833878755569458, | |
| "eval_runtime": 30.1882, | |
| "eval_samples_per_second": 16.563, | |
| "eval_steps_per_second": 4.141, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_f1_macro": 0.7235337495803833, | |
| "eval_f1_micro": 0.8264642357826233, | |
| "eval_loss": 0.15523777902126312, | |
| "eval_runtime": 30.0805, | |
| "eval_samples_per_second": 16.622, | |
| "eval_steps_per_second": 4.156, | |
| "step": 126 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 1260, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 20, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "EarlyStoppingCallback": { | |
| "args": { | |
| "early_stopping_patience": 2, | |
| "early_stopping_threshold": 0.0 | |
| }, | |
| "attributes": { | |
| "early_stopping_patience_counter": 0 | |
| } | |
| }, | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.4378769774870528e+16, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |