SAELens
CallumMcDougallGDM commited on
Commit
ebb94ec
·
verified ·
1 Parent(s): 4bab4b7

Add files using upload-large-folder tool

Browse files
Files changed (39) hide show
  1. crosscoder/layer_9_17_22_29_width_1m_l0_big/config.json +10 -0
  2. crosscoder/layer_9_17_22_29_width_1m_l0_medium/config.json +10 -0
  3. crosscoder/layer_9_17_22_29_width_262k_l0_big/config.json +10 -0
  4. crosscoder/layer_9_17_22_29_width_262k_l0_medium/config.json +10 -0
  5. crosscoder/layer_9_17_22_29_width_524k_l0_big/config.json +10 -0
  6. crosscoder/layer_9_17_22_29_width_524k_l0_medium/config.json +10 -0
  7. crosscoder/layer_9_17_22_29_width_65k_l0_big/config.json +10 -0
  8. crosscoder/layer_9_17_22_29_width_65k_l0_medium/config.json +10 -0
  9. mlp_out/layer_17_width_16k_l0_big/config.json +10 -0
  10. mlp_out/layer_17_width_16k_l0_small/config.json +10 -0
  11. mlp_out/layer_17_width_262k_l0_medium/config.json +10 -0
  12. mlp_out/layer_17_width_65k_l0_big/config.json +10 -0
  13. mlp_out/layer_17_width_65k_l0_medium/config.json +10 -0
  14. mlp_out/layer_17_width_65k_l0_small/config.json +10 -0
  15. mlp_out/layer_22_width_16k_l0_medium/config.json +10 -0
  16. mlp_out/layer_22_width_16k_l0_small/config.json +10 -0
  17. mlp_out/layer_22_width_262k_l0_medium/config.json +10 -0
  18. mlp_out/layer_22_width_262k_l0_small/config.json +10 -0
  19. mlp_out/layer_22_width_65k_l0_big/config.json +10 -0
  20. mlp_out/layer_22_width_65k_l0_medium/config.json +10 -0
  21. mlp_out/layer_22_width_65k_l0_small/config.json +10 -0
  22. mlp_out/layer_29_width_16k_l0_medium/config.json +10 -0
  23. mlp_out/layer_29_width_16k_l0_small/config.json +10 -0
  24. mlp_out/layer_29_width_262k_l0_big/config.json +10 -0
  25. mlp_out/layer_29_width_262k_l0_medium/config.json +10 -0
  26. mlp_out/layer_29_width_65k_l0_big/config.json +10 -0
  27. mlp_out/layer_29_width_65k_l0_medium/config.json +10 -0
  28. mlp_out/layer_9_width_16k_l0_big/config.json +10 -0
  29. mlp_out/layer_9_width_16k_l0_medium/config.json +10 -0
  30. mlp_out/layer_9_width_16k_l0_small/config.json +10 -0
  31. mlp_out/layer_9_width_262k_l0_medium/config.json +10 -0
  32. mlp_out/layer_9_width_65k_l0_big/config.json +10 -0
  33. mlp_out/layer_9_width_65k_l0_small/config.json +10 -0
  34. transcoder/layer_17_width_16k_l0_medium/config.json +10 -0
  35. transcoder/layer_17_width_16k_l0_small_affine/config.json +10 -0
  36. transcoder/layer_17_width_262k_l0_small/config.json +10 -0
  37. transcoder/layer_22_width_16k_l0_big_affine/config.json +10 -0
  38. transcoder/layer_29_width_262k_l0_small_affine/config.json +10 -0
  39. transcoder/layer_9_width_16k_l0_small_affine/config.json +10 -0
crosscoder/layer_9_17_22_29_width_1m_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_1m_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 1048576,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 50,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 50,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_524k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 524288,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_524k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 524288,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 50,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
crosscoder/layer_9_17_22_29_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.{9,17,22,29}.output",
3
+ "hf_hook_point_out": "model.layers.{9,17,22,29}.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 50,
8
+ "affine_connection": false,
9
+ "type": "crosscoder"
10
+ }
mlp_out/layer_17_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_17_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_22_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_29_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 131,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 53,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 17,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 53,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 131,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
mlp_out/layer_9_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.post_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 17,
8
+ "affine_connection": false,
9
+ "type": "sae"
10
+ }
transcoder/layer_17_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_16k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_16k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_9_width_16k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 17,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }