AbyssSynth-12B / mergekit_config.yml
Marcjoni's picture
Upload folder using huggingface_hub
e555aac verified
merge_method: ties
base_model: yamatazen/LorablatedStock-12B
models:
- model: yamatazen/LorablatedStock-12B
parameters:
weight: 0.65
density: 1.0
- model: yamatazen/EtherealAurora-12B-v2
parameters:
weight: 0.35
density: 1.0
parameters:
normalize: false
int8_mask: false
dtype: bfloat16
layer_parameters:
- filter: "attn"
sources:
- model: yamatazen/LorablatedStock-12B
weight: 0.6
- model: yamatazen/EtherealAurora-12B-v2
weight: 0.4
- filter: "mlp"
sources:
- model: yamatazen/LorablatedStock-12B
weight: 0.55
- model: yamatazen/EtherealAurora-12B-v2
weight: 0.45
- filter: "embed_tokens"
sources:
- model: yamatazen/LorablatedStock-12B
weight: 1.0
- model: yamatazen/EtherealAurora-12B-v2
weight: 0.0
- filter: "layer_norm"
sources:
- model: yamatazen/LorablatedStock-12B
weight: 0.7
- model: yamatazen/EtherealAurora-12B-v2
weight: 0.3