add nnsight config file
Browse files- config-nnsight.yaml +44 -0
config-nnsight.yaml
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Transcoder Configuration Gemma Scope Transcoders (lowest L0)
|
2 |
+
model_name: "google/gemma-2-2b"
|
3 |
+
model_kind: "transcoder_set"
|
4 |
+
feature_input_location_pattern: "model.layers[{layer}].pre_feedforward_layernorm.source.self__norm_0"
|
5 |
+
feature_output_location_pattern: "model.layers[{layer}].post_feedforward_layernorm"
|
6 |
+
attention_location_pattern: "model.layers[{layer}].self_attn.source.attention_interface_0.source.nn_functional_dropout_0"
|
7 |
+
layernorm_scale_location_patterns:
|
8 |
+
- "model.layers[{layer}].input_layernorm.source.self__norm_0.source.torch_rsqrt_0"
|
9 |
+
- "model.layers[{layer}].post_attention_layernorm.source.self__norm_0.source.torch_rsqrt_0"
|
10 |
+
- "model.layers[{layer}].pre_feedforward_layernorm.source.self__norm_0.source.torch_rsqrt_0"
|
11 |
+
- "model.layers[{layer}].post_feedforward_layernorm.source.self__norm_0.source.torch_rsqrt_0"
|
12 |
+
- "model.norm.source.self__norm_0.source.torch_rsqrt_0"
|
13 |
+
pre_logit_location: "model.norm"
|
14 |
+
embed_location: "model.embed_tokens"
|
15 |
+
embed_weight: "model.embed_tokens.weight"
|
16 |
+
unembed_weight: "lm_head.weight"
|
17 |
+
|
18 |
+
transcoders:
|
19 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_0/width_16k/average_l0_76/params.npz"
|
20 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_1/width_16k/average_l0_65/params.npz"
|
21 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_2/width_16k/average_l0_49/params.npz"
|
22 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_3/width_16k/average_l0_54/params.npz"
|
23 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_4/width_16k/average_l0_88/params.npz"
|
24 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_5/width_16k/average_l0_87/params.npz"
|
25 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_6/width_16k/average_l0_95/params.npz"
|
26 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_7/width_16k/average_l0_70/params.npz"
|
27 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_8/width_16k/average_l0_52/params.npz"
|
28 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_9/width_16k/average_l0_72/params.npz"
|
29 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_10/width_16k/average_l0_88/params.npz"
|
30 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_11/width_16k/average_l0_5/params.npz"
|
31 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_12/width_16k/average_l0_6/params.npz"
|
32 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_13/width_16k/average_l0_8/params.npz"
|
33 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_14/width_16k/average_l0_8/params.npz"
|
34 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_15/width_16k/average_l0_8/params.npz"
|
35 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_16/width_16k/average_l0_10/params.npz"
|
36 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_17/width_16k/average_l0_12/params.npz"
|
37 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_18/width_16k/average_l0_13/params.npz"
|
38 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_19/width_16k/average_l0_12/params.npz"
|
39 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_20/width_16k/average_l0_11/params.npz"
|
40 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_21/width_16k/average_l0_13/params.npz"
|
41 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_22/width_16k/average_l0_15/params.npz"
|
42 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_23/width_16k/average_l0_25/params.npz"
|
43 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_24/width_16k/average_l0_37/params.npz"
|
44 |
+
- "hf://google/gemma-scope-2b-pt-transcoders/layer_25/width_16k/average_l0_41/params.npz"
|