| { | |
| "lora_name": "alpaca", | |
| "always_override": false, | |
| "save_steps": 0.0, | |
| "micro_batch_size": 4, | |
| "batch_size": 128, | |
| "epochs": 5.0, | |
| "learning_rate": "2e-5", | |
| "lr_scheduler_type": "linear", | |
| "lora_rank": 512, | |
| "lora_alpha": 1024, | |
| "lora_dropout": 0.05, | |
| "cutoff_len": 256, | |
| "dataset": "alpaca_data_cleaned_spanish", | |
| "eval_dataset": "None", | |
| "format": "alpaca-chatbot-format", | |
| "eval_steps": 100.0, | |
| "raw_text_file": "None", | |
| "overlap_len": 128, | |
| "newline_favor_len": 128, | |
| "higher_rank_limit": false, | |
| "warmup_steps": 100.0, | |
| "optimizer": "adamw_torch", | |
| "hard_cut_string": "\\n\\n\\n", | |
| "train_only_after": "", | |
| "stop_at_loss": 0, | |
| "add_eos_token": false, | |
| "min_chars": 0.0, | |
| "report_to": "None" | |
| } |