Legion-V2.1-LLaMa-70B / mergekit_config.yml
Tarek07's picture
Upload folder using huggingface_hub
29482b2 verified
raw
history blame contribute delete
587 Bytes
models:
- model: TareksLab/L2-MERGE2a
parameters:
weight: 0.20
density: 0.5
- model: TareksLab/L2-MERGE4
parameters:
weight: 0.20
density: 0.5
- model: TareksLab/L-BASE-V1
parameters:
weight: 0.20
density: 0.5
- model: TareksLab/L2-MERGE3
parameters:
weight: 0.20
density: 0.5
- model: TareksLab/L2-MERGE1
parameters:
weight: 0.20
density: 0.5
merge_method: dare_ties
base_model: TareksLab/L-BASE-V1
parameters:
normalize: false
out_dtype: bfloat16
chat_template: llama3
tokenizer:
source: base