| models: | |
| - model: TareksLab/L2-MERGE2a | |
| parameters: | |
| weight: 0.20 | |
| density: 0.5 | |
| - model: TareksLab/L2-MERGE4 | |
| parameters: | |
| weight: 0.20 | |
| density: 0.5 | |
| - model: TareksLab/L-BASE-V1 | |
| parameters: | |
| weight: 0.20 | |
| density: 0.5 | |
| - model: TareksLab/L2-MERGE3 | |
| parameters: | |
| weight: 0.20 | |
| density: 0.5 | |
| - model: TareksLab/L2-MERGE1 | |
| parameters: | |
| weight: 0.20 | |
| density: 0.5 | |
| merge_method: dare_ties | |
| base_model: TareksLab/L-BASE-V1 | |
| parameters: | |
| normalize: false | |
| out_dtype: bfloat16 | |
| chat_template: llama3 | |
| tokenizer: | |
| source: base |