Primogenitor-V1-LLaMa-70B / mergekit_config.yml
Tarek07's picture
Upload folder using huggingface_hub
f8eb615 verified
raw
history blame contribute delete
532 Bytes
models:
- model: Tarek07/Progenitor-V1.1-LLaMa-70B
parameters:
weight: 0.75
density: 0.7
- model: LatitudeGames/Wayfarer-Large-70B-Llama-3.3
parameters:
weight: 0.20
density: 0.7
- model: SicariusSicariiStuff/Negative_LLAMA_70B
parameters:
weight: 0.05
density: 0.7
merge_method: della_linear
base_model: meta-llama/Llama-3.3-70B-Instruct
parameters:
epsilon: 0.2
lambda: 1.1
normalize: false
out_dtype: bfloat16
tokenizer:
source: SicariusSicariiStuff/Negative_LLAMA_70B