hijohnnylin commited on
Commit
03f827c
·
verified ·
1 Parent(s): 144c64b

Upload 1822 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_12_width_16k_canonical_eval_results.json +297 -0
  2. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_12_width_65k_canonical_eval_results.json +297 -0
  3. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_19_width_16k_canonical_eval_results.json +297 -0
  4. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_19_width_65k_canonical_eval_results.json +297 -0
  5. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_5_width_16k_canonical_eval_results.json +297 -0
  6. results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_5_width_65k_canonical_eval_results.json +297 -0
  7. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_176_eval_results.json +297 -0
  8. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_22_eval_results.json +297 -0
  9. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_41_eval_results.json +297 -0
  10. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_445_eval_results.json +297 -0
  11. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_82_eval_results.json +297 -0
  12. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_141_eval_results.json +297 -0
  13. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_21_eval_results.json +297 -0
  14. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_297_eval_results.json +297 -0
  15. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_38_eval_results.json +297 -0
  16. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_72_eval_results.json +297 -0
  17. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_137_eval_results.json +297 -0
  18. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_23_eval_results.json +297 -0
  19. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_279_eval_results.json +297 -0
  20. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_40_eval_results.json +297 -0
  21. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_73_eval_results.json +297 -0
  22. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_115_eval_results.json +297 -0
  23. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_216_eval_results.json +297 -0
  24. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_21_eval_results.json +297 -0
  25. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_35_eval_results.json +297 -0
  26. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_63_eval_results.json +297 -0
  27. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_143_eval_results.json +297 -0
  28. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_18_eval_results.json +297 -0
  29. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_309_eval_results.json +297 -0
  30. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_34_eval_results.json +297 -0
  31. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_68_eval_results.json +297 -0
  32. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_105_eval_results.json +297 -0
  33. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_17_eval_results.json +297 -0
  34. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_211_eval_results.json +297 -0
  35. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_29_eval_results.json +297 -0
  36. results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_53_eval_results.json +297 -0
  37. results_scr/gemma-scope-9b-pt-res-canonical/scr/gemma-scope-9b-pt-res-canonical_layer_9_width_16k_canonical_eval_results.json +297 -0
  38. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_0_eval_results.json +297 -0
  39. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_1_eval_results.json +297 -0
  40. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_2_eval_results.json +297 -0
  41. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_3_eval_results.json +297 -0
  42. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_4_eval_results.json +297 -0
  43. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_5_eval_results.json +297 -0
  44. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_0_eval_results.json +297 -0
  45. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_1_eval_results.json +297 -0
  46. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_2_eval_results.json +297 -0
  47. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_3_eval_results.json +297 -0
  48. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_4_eval_results.json +297 -0
  49. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_5_eval_results.json +297 -0
  50. results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.5.hook_resid_post__trainer_0_eval_results.json +297 -0
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_12_width_16k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "341c427b-e726-4a96-a136-d7b22d90a964",
72
+ "datetime_epoch_millis": 1732140271999,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.29538624167885136,
76
+ "scr_metric_threshold_2": 0.13466911203574558,
77
+ "scr_dir2_threshold_2": 0.12493838720251739,
78
+ "scr_dir1_threshold_5": 0.33162325188553093,
79
+ "scr_metric_threshold_5": 0.21467081912678176,
80
+ "scr_dir2_threshold_5": 0.20058179176740487,
81
+ "scr_dir1_threshold_10": 0.31609306995184094,
82
+ "scr_metric_threshold_10": 0.2892841392955365,
83
+ "scr_dir2_threshold_10": 0.27756841175214314,
84
+ "scr_dir1_threshold_20": 0.3097389277918645,
85
+ "scr_metric_threshold_20": 0.3698075389973075,
86
+ "scr_dir2_threshold_20": 0.3547477671222778,
87
+ "scr_dir1_threshold_50": 0.2686493734151709,
88
+ "scr_metric_threshold_50": 0.41033936450364406,
89
+ "scr_dir2_threshold_50": 0.39330224687604975,
90
+ "scr_dir1_threshold_100": 0.20669912797818638,
91
+ "scr_metric_threshold_100": 0.32932748806542533,
92
+ "scr_dir2_threshold_100": 0.3192569673478806,
93
+ "scr_dir1_threshold_500": 0.07118524901628968,
94
+ "scr_metric_threshold_500": 0.34228784301110815,
95
+ "scr_dir2_threshold_500": 0.3238686774102922
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.43749988358469727,
102
+ "scr_metric_threshold_2": 0.019704325740837254,
103
+ "scr_dir2_threshold_2": 0.019704325740837254,
104
+ "scr_dir1_threshold_5": 0.5,
105
+ "scr_metric_threshold_5": 0.03694577592471611,
106
+ "scr_dir2_threshold_5": 0.03694577592471611,
107
+ "scr_dir1_threshold_10": 0.4531256111803394,
108
+ "scr_metric_threshold_10": 0.051724056932709886,
109
+ "scr_dir2_threshold_10": 0.051724056932709886,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.08374378812458251,
112
+ "scr_dir2_threshold_20": 0.08374378812458251,
113
+ "scr_dir1_threshold_50": 0.43749988358469727,
114
+ "scr_metric_threshold_50": 0.1502462728747495,
115
+ "scr_dir2_threshold_50": 0.1502462728747495,
116
+ "scr_dir1_threshold_100": 0.3593749708961743,
117
+ "scr_metric_threshold_100": 0.17733981252431533,
118
+ "scr_dir2_threshold_100": 0.17733981252431533,
119
+ "scr_dir1_threshold_500": 0.21874994179234863,
120
+ "scr_metric_threshold_500": 0.3620688389573592,
121
+ "scr_dir2_threshold_500": 0.3620688389573592
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.21782190488161987,
126
+ "scr_metric_threshold_2": 0.21652430893001515,
127
+ "scr_dir2_threshold_2": 0.21652430893001515,
128
+ "scr_dir1_threshold_5": 0.2376236279867973,
129
+ "scr_metric_threshold_5": 0.3105413782733095,
130
+ "scr_dir2_threshold_5": 0.3105413782733095,
131
+ "scr_dir1_threshold_10": 0.26732680278949705,
132
+ "scr_metric_threshold_10": 0.34188034478314183,
133
+ "scr_dir2_threshold_10": 0.34188034478314183,
134
+ "scr_dir1_threshold_20": 0.20792104332903116,
135
+ "scr_metric_threshold_20": 0.41310551567101156,
136
+ "scr_dir2_threshold_20": 0.41310551567101156,
137
+ "scr_dir1_threshold_50": -0.6930691608552144,
138
+ "scr_metric_threshold_50": 0.48433051674508387,
139
+ "scr_dir2_threshold_50": 0.48433051674508387,
140
+ "scr_dir1_threshold_100": -0.6930691608552144,
141
+ "scr_metric_threshold_100": 0.1111112054521097,
142
+ "scr_dir2_threshold_100": 0.1111112054521097,
143
+ "scr_dir1_threshold_500": -0.9405936503946004,
144
+ "scr_metric_threshold_500": 0.051282068698851026,
145
+ "scr_dir2_threshold_500": 0.051282068698851026
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.037974794330044616,
151
+ "scr_dir2_threshold_2": 0.037974794330044616,
152
+ "scr_dir1_threshold_5": 0.5714285714285714,
153
+ "scr_metric_threshold_5": 0.09873425400116885,
154
+ "scr_dir2_threshold_5": 0.09873425400116885,
155
+ "scr_dir1_threshold_10": 0.5238092084411499,
156
+ "scr_metric_threshold_10": 0.13670889743339407,
157
+ "scr_dir2_threshold_10": 0.13670889743339407,
158
+ "scr_dir1_threshold_20": 0.5238092084411499,
159
+ "scr_metric_threshold_20": 0.22531654967307232,
160
+ "scr_dir2_threshold_20": 0.22531654967307232,
161
+ "scr_dir1_threshold_50": 0.4444442341988618,
162
+ "scr_metric_threshold_50": 0.23037977510490792,
163
+ "scr_dir2_threshold_50": 0.23037977510490792,
164
+ "scr_dir1_threshold_100": 0.0634921686148548,
165
+ "scr_metric_threshold_100": -0.0025316127159178037,
166
+ "scr_dir2_threshold_100": -0.0025316127159178037,
167
+ "scr_dir1_threshold_500": -0.30158709134171896,
168
+ "scr_metric_threshold_500": 0.022784816238899015,
169
+ "scr_dir2_threshold_500": 0.022784816238899015
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.24409421102676765,
174
+ "scr_metric_threshold_2": 0.15680483129152156,
175
+ "scr_dir2_threshold_2": 0.15680483129152156,
176
+ "scr_dir1_threshold_5": 0.17322802864402692,
177
+ "scr_metric_threshold_5": 0.233727799694535,
178
+ "scr_dir2_threshold_5": 0.233727799694535,
179
+ "scr_dir1_threshold_10": -0.007874385297643128,
180
+ "scr_metric_threshold_10": 0.3017751427116833,
181
+ "scr_dir2_threshold_10": 0.3017751427116833,
182
+ "scr_dir1_threshold_20": -0.08661448365002022,
183
+ "scr_metric_threshold_20": 0.43786982874598,
184
+ "scr_dir2_threshold_20": 0.43786982874598,
185
+ "scr_dir1_threshold_50": 0.41732270899880136,
186
+ "scr_metric_threshold_50": 0.44970417227223664,
187
+ "scr_dir2_threshold_50": 0.44970417227223664,
188
+ "scr_dir1_threshold_100": 0.3779526598226128,
189
+ "scr_metric_threshold_100": 0.16568045667738668,
190
+ "scr_dir2_threshold_100": 0.16568045667738668,
191
+ "scr_dir1_threshold_500": 0.1889763299113064,
192
+ "scr_metric_threshold_500": 0.18934914372989997,
193
+ "scr_dir2_threshold_500": 0.18934914372989997
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.06557389329988525,
198
+ "scr_metric_threshold_2": 0.125,
199
+ "scr_dir2_threshold_2": 0.125,
200
+ "scr_dir1_threshold_5": 0.07650271266233713,
201
+ "scr_metric_threshold_5": 0.28125005820765137,
202
+ "scr_dir2_threshold_5": 0.28125005820765137,
203
+ "scr_dir1_threshold_10": 0.07650271266233713,
204
+ "scr_metric_threshold_10": 0.542968888243172,
205
+ "scr_dir2_threshold_10": 0.542968888243172,
206
+ "scr_dir1_threshold_20": 0.021857964433295084,
207
+ "scr_metric_threshold_20": 0.671875087311477,
208
+ "scr_dir2_threshold_20": 0.671875087311477,
209
+ "scr_dir1_threshold_50": -0.027322374114521025,
210
+ "scr_metric_threshold_50": 0.7304687718278693,
211
+ "scr_dir2_threshold_50": 0.7304687718278693,
212
+ "scr_dir1_threshold_100": -0.021857964433295084,
213
+ "scr_metric_threshold_100": 0.7890624563442614,
214
+ "scr_dir2_threshold_100": 0.7890624563442614,
215
+ "scr_dir1_threshold_500": 0.027322374114521025,
216
+ "scr_metric_threshold_500": 0.8320313445874334,
217
+ "scr_dir2_threshold_500": 0.8320313445874334
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.29230760766203984,
222
+ "scr_metric_threshold_2": 0.04838718980952953,
223
+ "scr_dir2_threshold_2": 0.04838718980952953,
224
+ "scr_dir1_threshold_5": 0.369230614047073,
225
+ "scr_metric_threshold_5": 0.11290320254761761,
226
+ "scr_dir2_threshold_5": 0.11290320254761761,
227
+ "scr_dir1_threshold_10": 0.38461533759002214,
228
+ "scr_metric_threshold_10": 0.1733872498948507,
229
+ "scr_dir2_threshold_10": 0.1733872498948507,
230
+ "scr_dir1_threshold_20": 0.4615383439750553,
231
+ "scr_metric_threshold_20": 0.22983885116865949,
232
+ "scr_dir2_threshold_20": 0.22983885116865949,
233
+ "scr_dir1_threshold_50": 0.4974359303536512,
234
+ "scr_metric_threshold_50": 0.33870960764285285,
235
+ "scr_dir2_threshold_50": 0.33870960764285285,
236
+ "scr_dir1_threshold_100": 0.5076922089390464,
237
+ "scr_metric_threshold_100": 0.463709667728174,
238
+ "scr_dir2_threshold_100": 0.463709667728174,
239
+ "scr_dir1_threshold_500": 0.4615383439750553,
240
+ "scr_metric_threshold_500": 0.5080646518055639,
241
+ "scr_dir2_threshold_500": 0.5080646518055639
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.375565603537441,
246
+ "scr_metric_threshold_2": 0.31415918817422805,
247
+ "scr_dir2_threshold_2": 0.31415918817422805,
248
+ "scr_dir1_threshold_5": 0.49321275755070776,
249
+ "scr_metric_threshold_5": 0.4115043816005218,
250
+ "scr_dir2_threshold_5": 0.4115043816005218,
251
+ "scr_dir1_threshold_10": 0.5565611628568693,
252
+ "scr_metric_threshold_10": 0.4911504249731845,
253
+ "scr_dir2_threshold_10": 0.4911504249731845,
254
+ "scr_dir1_threshold_20": 0.5927603286616061,
255
+ "scr_metric_threshold_20": 0.561946761450355,
256
+ "scr_dir2_threshold_20": 0.561946761450355,
257
+ "scr_dir1_threshold_50": 0.6651583905668239,
258
+ "scr_metric_threshold_50": 0.4911504249731845,
259
+ "scr_dir2_threshold_50": 0.4911504249731845,
260
+ "scr_dir1_threshold_100": 0.6651583905668239,
261
+ "scr_metric_threshold_100": 0.5353981682385852,
262
+ "scr_dir2_threshold_100": 0.5353981682385852,
263
+ "scr_dir1_threshold_500": 0.4343891805440744,
264
+ "scr_metric_threshold_500": 0.2920353165415277,
265
+ "scr_dir2_threshold_500": 0.2920353165415277
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.15879825800978847,
270
+ "scr_metric_threshold_2": 0.15879825800978847,
271
+ "scr_dir2_threshold_2": 0.08095245934396302,
272
+ "scr_dir1_threshold_5": 0.23175970276473412,
273
+ "scr_metric_threshold_5": 0.23175970276473412,
274
+ "scr_dir2_threshold_5": 0.11904748388971893,
275
+ "scr_dir1_threshold_10": 0.27467810939215553,
276
+ "scr_metric_threshold_10": 0.27467810939215553,
277
+ "scr_dir2_threshold_10": 0.18095228904500887,
278
+ "scr_dir1_threshold_20": 0.3347639298333219,
279
+ "scr_metric_threshold_20": 0.3347639298333219,
280
+ "scr_dir2_threshold_20": 0.21428575483308432,
281
+ "scr_dir1_threshold_50": 0.4077253745882676,
282
+ "scr_metric_threshold_50": 0.4077253745882676,
283
+ "scr_dir2_threshold_50": 0.27142843356751334,
284
+ "scr_dir1_threshold_100": 0.3948497502744883,
285
+ "scr_metric_threshold_100": 0.3948497502744883,
286
+ "scr_dir2_threshold_100": 0.3142855845341302,
287
+ "scr_dir1_threshold_500": 0.4806865635293311,
288
+ "scr_metric_threshold_500": 0.4806865635293311,
289
+ "scr_dir2_threshold_500": 0.33333323872280324
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_12_width_65k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "dc0c6a5a-b9b4-48bc-b43c-b57dff428ad8",
72
+ "datetime_epoch_millis": 1732179367388,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.21782108635654443,
76
+ "scr_metric_threshold_2": 0.05198406220746681,
77
+ "scr_dir2_threshold_2": 0.05274790898422797,
78
+ "scr_dir1_threshold_5": 0.2736353482899159,
79
+ "scr_metric_threshold_5": 0.11364522800610186,
80
+ "scr_dir2_threshold_5": 0.10903661699683478,
81
+ "scr_dir1_threshold_10": 0.2548046623405194,
82
+ "scr_metric_threshold_10": 0.17602022856464683,
83
+ "scr_dir2_threshold_10": 0.16747230742113575,
84
+ "scr_dir1_threshold_20": 0.27007297281939036,
85
+ "scr_metric_threshold_20": 0.27074226421465636,
86
+ "scr_dir2_threshold_20": 0.2624217161604107,
87
+ "scr_dir1_threshold_50": 0.27526224550358364,
88
+ "scr_metric_threshold_50": 0.36654492749632883,
89
+ "scr_dir2_threshold_50": 0.36286108728037586,
90
+ "scr_dir1_threshold_100": 0.3201452494141222,
91
+ "scr_metric_threshold_100": 0.378308324992422,
92
+ "scr_dir2_threshold_100": 0.37462448477646904,
93
+ "scr_dir1_threshold_500": -0.0764709120658077,
94
+ "scr_metric_threshold_500": 0.36028058623533077,
95
+ "scr_dir2_threshold_500": 0.3566912564421671
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4062502910382569,
102
+ "scr_metric_threshold_2": 0.009852089465686957,
103
+ "scr_dir2_threshold_2": 0.009852089465686957,
104
+ "scr_dir1_threshold_5": 0.42187508731147705,
105
+ "scr_metric_threshold_5": 0.024630517283144072,
106
+ "scr_dir2_threshold_5": 0.024630517283144072,
107
+ "scr_dir1_threshold_10": 0.4062502910382569,
108
+ "scr_metric_threshold_10": 0.04187182065755959,
109
+ "scr_dir2_threshold_10": 0.04187182065755959,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.0566502484750167,
112
+ "scr_dir2_threshold_20": 0.0566502484750167,
113
+ "scr_dir1_threshold_50": 0.3593749708961743,
114
+ "scr_metric_threshold_50": 0.08374378812458251,
115
+ "scr_dir2_threshold_50": 0.08374378812458251,
116
+ "scr_dir1_threshold_100": 0.43749988358469727,
117
+ "scr_metric_threshold_100": 0.1133004969500334,
118
+ "scr_dir2_threshold_100": 0.1133004969500334,
119
+ "scr_dir1_threshold_500": -0.32812444702731197,
120
+ "scr_metric_threshold_500": 0.044334989833444666,
121
+ "scr_dir2_threshold_500": 0.044334989833444666
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.19801959163150878,
126
+ "scr_metric_threshold_2": 0.08547017110268419,
127
+ "scr_dir2_threshold_2": 0.08547017110268419,
128
+ "scr_dir1_threshold_5": 0.2376236279867973,
129
+ "scr_metric_threshold_5": 0.15669517217675646,
130
+ "scr_dir2_threshold_5": 0.15669517217675646,
131
+ "scr_dir1_threshold_10": 0.09900979581575439,
132
+ "scr_metric_threshold_10": 0.24216534327944067,
133
+ "scr_dir2_threshold_10": 0.24216534327944067,
134
+ "scr_dir1_threshold_20": 0.10891124751327678,
135
+ "scr_metric_threshold_20": 0.2792024117634772,
136
+ "scr_dir2_threshold_20": 0.2792024117634772,
137
+ "scr_dir1_threshold_50": 0.1386138321710429,
138
+ "scr_metric_threshold_50": 0.41595448175121497,
139
+ "scr_dir2_threshold_50": 0.41595448175121497,
140
+ "scr_dir1_threshold_100": 0.14851469372363157,
141
+ "scr_metric_threshold_100": 0.13390327372133182,
142
+ "scr_dir2_threshold_100": 0.13390327372133182,
143
+ "scr_dir1_threshold_500": -0.59405936503946,
144
+ "scr_metric_threshold_500": 0.14814827393614627,
145
+ "scr_dir2_threshold_500": 0.14814827393614627
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.037974794330044616,
151
+ "scr_dir2_threshold_2": 0.037974794330044616,
152
+ "scr_dir1_threshold_5": 0.5714285714285714,
153
+ "scr_metric_threshold_5": 0.08354442680784264,
154
+ "scr_dir2_threshold_5": 0.08354442680784264,
155
+ "scr_dir1_threshold_10": 0.380952065584007,
156
+ "scr_metric_threshold_10": 0.1265822956719035,
157
+ "scr_dir2_threshold_10": 0.1265822956719035,
158
+ "scr_dir1_threshold_20": 0.36507925995657375,
159
+ "scr_metric_threshold_20": 0.20759495886600893,
160
+ "scr_dir2_threshold_20": 0.20759495886600893,
161
+ "scr_dir1_threshold_50": 0.2857142857142857,
162
+ "scr_metric_threshold_50": 0.32151904006050397,
163
+ "scr_dir2_threshold_50": 0.32151904006050397,
164
+ "scr_dir1_threshold_100": 0.26984148008685244,
165
+ "scr_metric_threshold_100": 0.3392406308675674,
166
+ "scr_dir2_threshold_100": 0.3392406308675674,
167
+ "scr_dir1_threshold_500": -1.3492055082240189,
168
+ "scr_metric_threshold_500": 0.030379805284471813,
169
+ "scr_dir2_threshold_500": 0.030379805284471813
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.2362202950571313,
174
+ "scr_metric_threshold_2": 0.0591717176312832,
175
+ "scr_dir2_threshold_2": 0.0591717176312832,
176
+ "scr_dir1_threshold_5": 0.24409421102676765,
177
+ "scr_metric_threshold_5": 0.1301776024437199,
178
+ "scr_dir2_threshold_5": 0.1301776024437199,
179
+ "scr_dir1_threshold_10": 0.19685024588094274,
180
+ "scr_metric_threshold_10": 0.23668651783492653,
181
+ "scr_dir2_threshold_10": 0.23668651783492653,
182
+ "scr_dir1_threshold_20": 0.14960628073511784,
183
+ "scr_metric_threshold_20": 0.3254438297641966,
184
+ "scr_dir2_threshold_20": 0.3254438297641966,
185
+ "scr_dir1_threshold_50": 0.17322802864402692,
186
+ "scr_metric_threshold_50": 0.34023671508574166,
187
+ "scr_dir2_threshold_50": 0.34023671508574166,
188
+ "scr_dir1_threshold_100": 0.5511811577946465,
189
+ "scr_metric_threshold_100": 0.42899420336011485,
190
+ "scr_dir2_threshold_100": 0.42899420336011485,
191
+ "scr_dir1_threshold_500": 0.29133864550059935,
192
+ "scr_metric_threshold_500": 0.17751480020364332,
193
+ "scr_dir2_threshold_500": 0.17751480020364332
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.05464474822904205,
198
+ "scr_metric_threshold_2": 0.05078128637978211,
199
+ "scr_dir2_threshold_2": 0.05078128637978211,
200
+ "scr_dir1_threshold_5": 0.07650271266233713,
201
+ "scr_metric_threshold_5": 0.12890643189891055,
202
+ "scr_dir2_threshold_5": 0.12890643189891055,
203
+ "scr_dir1_threshold_10": 0.06557389329988525,
204
+ "scr_metric_threshold_10": 0.21093754365573852,
205
+ "scr_dir2_threshold_10": 0.21093754365573852,
206
+ "scr_dir1_threshold_20": 0.08196712234356307,
207
+ "scr_metric_threshold_20": 0.4140624563442615,
208
+ "scr_dir2_threshold_20": 0.4140624563442615,
209
+ "scr_dir1_threshold_50": 0.04918033854781611,
210
+ "scr_metric_threshold_50": 0.5742187136202179,
211
+ "scr_dir2_threshold_50": 0.5742187136202179,
212
+ "scr_dir1_threshold_100": 0.01092881936245188,
213
+ "scr_metric_threshold_100": 0.6757812863797821,
214
+ "scr_dir2_threshold_100": 0.6757812863797821,
215
+ "scr_dir1_threshold_500": -0.00546440968122594,
216
+ "scr_metric_threshold_500": 0.8007812863797821,
217
+ "scr_dir2_threshold_500": 0.8007812863797821
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.10256400851337757,
222
+ "scr_metric_threshold_2": 0.07258066454365199,
223
+ "scr_dir2_threshold_2": 0.07258066454365199,
224
+ "scr_dir1_threshold_5": 0.21538460127700665,
225
+ "scr_metric_threshold_5": 0.08870972781349516,
226
+ "scr_dir2_threshold_5": 0.08870972781349516,
227
+ "scr_dir1_threshold_10": 0.27179474482639304,
228
+ "scr_metric_threshold_10": 0.10483879108333834,
229
+ "scr_dir2_threshold_10": 0.10483879108333834,
230
+ "scr_dir1_threshold_20": 0.28717946836934216,
231
+ "scr_metric_threshold_20": 0.20967734182539205,
232
+ "scr_dir2_threshold_20": 0.20967734182539205,
233
+ "scr_dir1_threshold_50": 0.37948719829732447,
234
+ "scr_metric_threshold_50": 0.2943548639067476,
235
+ "scr_dir2_threshold_50": 0.2943548639067476,
236
+ "scr_dir1_threshold_100": 0.35384589050412385,
237
+ "scr_metric_threshold_100": 0.3709677341825392,
238
+ "scr_dir2_threshold_100": 0.3709677341825392,
239
+ "scr_dir1_threshold_500": 0.4923074853960973,
240
+ "scr_metric_threshold_500": 0.5927419335456348,
241
+ "scr_dir2_threshold_500": 0.5927419335456348
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.11764715401326675,
246
+ "scr_metric_threshold_2": 0.04424774326540074,
247
+ "scr_dir2_threshold_2": 0.04424774326540074,
248
+ "scr_dir1_threshold_5": 0.28054313043032664,
249
+ "scr_metric_threshold_5": 0.15486710142890261,
250
+ "scr_dir2_threshold_5": 0.15486710142890261,
251
+ "scr_dir1_threshold_10": 0.416289597641706,
252
+ "scr_metric_threshold_10": 0.24336285169705757,
253
+ "scr_dir2_threshold_10": 0.24336285169705757,
254
+ "scr_dir1_threshold_20": 0.4841628312473957,
255
+ "scr_metric_threshold_20": 0.4115043816005218,
256
+ "scr_dir2_threshold_20": 0.4115043816005218,
257
+ "scr_dir1_threshold_50": 0.5203619970521325,
258
+ "scr_metric_threshold_50": 0.6061945047157558,
259
+ "scr_dir2_threshold_50": 0.6061945047157558,
260
+ "scr_dir1_threshold_100": 0.49321275755070776,
261
+ "scr_metric_threshold_100": 0.668141661772141,
262
+ "scr_dir2_threshold_100": 0.668141661772141,
263
+ "scr_dir1_threshold_500": 0.4479639351469146,
264
+ "scr_metric_threshold_500": 0.6548672332975793,
265
+ "scr_dir2_threshold_500": 0.6548672332975793
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.05579403094120067,
270
+ "scr_metric_threshold_2": 0.05579403094120067,
271
+ "scr_dir2_threshold_2": 0.06190480515528994,
272
+ "scr_dir1_threshold_5": 0.1416308441960435,
273
+ "scr_metric_threshold_5": 0.1416308441960435,
274
+ "scr_dir2_threshold_5": 0.10476195612190681,
275
+ "scr_dir1_threshold_10": 0.20171666463720986,
276
+ "scr_metric_threshold_10": 0.20171666463720986,
277
+ "scr_dir2_threshold_10": 0.1333332954891213,
278
+ "scr_dir1_threshold_20": 0.2618024850783763,
279
+ "scr_metric_threshold_20": 0.2618024850783763,
280
+ "scr_dir2_threshold_20": 0.19523810064441124,
281
+ "scr_dir1_threshold_50": 0.29613731270586624,
282
+ "scr_metric_threshold_50": 0.29613731270586624,
283
+ "scr_dir2_threshold_50": 0.2666665909782426,
284
+ "scr_dir1_threshold_100": 0.29613731270586624,
285
+ "scr_metric_threshold_100": 0.29613731270586624,
286
+ "scr_dir2_threshold_100": 0.2666665909782426,
287
+ "scr_dir1_threshold_500": 0.433476367401944,
288
+ "scr_metric_threshold_500": 0.433476367401944,
289
+ "scr_dir2_threshold_500": 0.4047617290566346
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_19_width_16k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "341c427b-e726-4a96-a136-d7b22d90a964",
72
+ "datetime_epoch_millis": 1732143073788,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.29413948593556816,
76
+ "scr_metric_threshold_2": 0.2044518234593106,
77
+ "scr_dir2_threshold_2": 0.2044518234593106,
78
+ "scr_dir1_threshold_5": 0.35460829340921973,
79
+ "scr_metric_threshold_5": 0.25725384227387843,
80
+ "scr_dir2_threshold_5": 0.25725384227387843,
81
+ "scr_dir1_threshold_10": 0.32818072868003084,
82
+ "scr_metric_threshold_10": 0.33237747277750745,
83
+ "scr_dir2_threshold_10": 0.33237747277750745,
84
+ "scr_dir1_threshold_20": 0.17427830458714275,
85
+ "scr_metric_threshold_20": 0.411792574033676,
86
+ "scr_dir2_threshold_20": 0.411792574033676,
87
+ "scr_dir1_threshold_50": 0.18657935917538992,
88
+ "scr_metric_threshold_50": 0.49355999028482206,
89
+ "scr_dir2_threshold_50": 0.49355999028482206,
90
+ "scr_dir1_threshold_100": 0.008516299851019031,
91
+ "scr_metric_threshold_100": 0.495451665386671,
92
+ "scr_dir2_threshold_100": 0.495451665386671,
93
+ "scr_dir1_threshold_500": -1.3263589317627682,
94
+ "scr_metric_threshold_500": 0.4131513813373891,
95
+ "scr_dir2_threshold_500": 0.4131513813373891
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.27941247367097016,
102
+ "scr_metric_threshold_2": 0.06835444871669703,
103
+ "scr_dir2_threshold_2": 0.06835444871669703,
104
+ "scr_dir1_threshold_5": 0.39705886220023473,
105
+ "scr_metric_threshold_5": 0.08101266319410545,
106
+ "scr_dir2_threshold_5": 0.08101266319410545,
107
+ "scr_dir1_threshold_10": 0.3676474842025821,
108
+ "scr_metric_threshold_10": 0.08860765223967824,
109
+ "scr_dir2_threshold_10": 0.08860765223967824,
110
+ "scr_dir1_threshold_20": 0.3235295406674491,
111
+ "scr_metric_threshold_20": 0.11139246847857727,
112
+ "scr_dir2_threshold_20": 0.11139246847857727,
113
+ "scr_dir1_threshold_50": 0.39705886220023473,
114
+ "scr_metric_threshold_50": 0.2050633461500911,
115
+ "scr_dir2_threshold_50": 0.2050633461500911,
116
+ "scr_dir1_threshold_100": 0.4117654277377151,
117
+ "scr_metric_threshold_100": 0.37215189797013765,
118
+ "scr_dir2_threshold_100": 0.37215189797013765,
119
+ "scr_dir1_threshold_500": -3.4264693636592334,
120
+ "scr_metric_threshold_500": 0.40759492868644503,
121
+ "scr_dir2_threshold_500": 0.40759492868644503
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3513511191443136,
126
+ "scr_metric_threshold_2": 0.26176464091535895,
127
+ "scr_dir2_threshold_2": 0.26176464091535895,
128
+ "scr_dir1_threshold_5": 0.3783783348395588,
129
+ "scr_metric_threshold_5": 0.36470588441538543,
130
+ "scr_dir2_threshold_5": 0.36470588441538543,
131
+ "scr_dir1_threshold_10": 0.34234222623882343,
132
+ "scr_metric_threshold_10": 0.4970588616846304,
133
+ "scr_dir2_threshold_10": 0.4970588616846304,
134
+ "scr_dir1_threshold_20": 0.3513511191443136,
135
+ "scr_metric_threshold_20": 0.6029410681922649,
136
+ "scr_dir2_threshold_20": 0.6029410681922649,
137
+ "scr_dir1_threshold_50": 0.297297224732598,
138
+ "scr_metric_threshold_50": 0.7029411733769219,
139
+ "scr_dir2_threshold_50": 0.7029411733769219,
140
+ "scr_dir1_threshold_100": 0.2792794389216177,
141
+ "scr_metric_threshold_100": 0.6235293870153749,
142
+ "scr_dir2_threshold_100": 0.6235293870153749,
143
+ "scr_dir1_threshold_500": -0.7747749924688727,
144
+ "scr_metric_threshold_500": 0.4441176707769324,
145
+ "scr_dir2_threshold_500": 0.4441176707769324
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3518516065653388,
150
+ "scr_metric_threshold_2": 0.1102940145248341,
151
+ "scr_dir2_threshold_2": 0.1102940145248341,
152
+ "scr_dir1_threshold_5": 0.4814806229786858,
153
+ "scr_metric_threshold_5": 0.18382350363120853,
154
+ "scr_dir2_threshold_5": 0.18382350363120853,
155
+ "scr_dir1_threshold_10": 0.40740753005066394,
156
+ "scr_metric_threshold_10": 0.2965686131284492,
157
+ "scr_dir2_threshold_10": 0.2965686131284492,
158
+ "scr_dir1_threshold_20": -0.07407419671733059,
159
+ "scr_metric_threshold_20": 0.38970583938534753,
160
+ "scr_dir2_threshold_20": 0.38970583938534753,
161
+ "scr_dir1_threshold_50": -0.05555592348532512,
162
+ "scr_metric_threshold_50": 0.48774510949724065,
163
+ "scr_dir2_threshold_50": 0.48774510949724065,
164
+ "scr_dir1_threshold_100": -0.9259258032826694,
165
+ "scr_metric_threshold_100": 0.24509795614500524,
166
+ "scr_dir2_threshold_100": 0.24509795614500524,
167
+ "scr_dir1_threshold_500": -5.1111096393920326,
168
+ "scr_metric_threshold_500": -0.05637255474862033,
169
+ "scr_dir2_threshold_500": -0.05637255474862033
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.27343766007104126,
174
+ "scr_metric_threshold_2": 0.21791046422583005,
175
+ "scr_dir2_threshold_2": 0.21791046422583005,
176
+ "scr_dir1_threshold_5": 0.32812491268852295,
177
+ "scr_metric_threshold_5": 0.31343285706558716,
178
+ "scr_dir2_threshold_5": 0.31343285706558716,
179
+ "scr_dir1_threshold_10": 0.28125005820765137,
180
+ "scr_metric_threshold_10": 0.4447760360175704,
181
+ "scr_dir2_threshold_10": 0.4447760360175704,
182
+ "scr_dir1_threshold_20": 0.046874854480871565,
183
+ "scr_metric_threshold_20": 0.5432836426369234,
184
+ "scr_dir2_threshold_20": 0.5432836426369234,
185
+ "scr_dir1_threshold_50": 0.07031251455191284,
186
+ "scr_metric_threshold_50": 0.5910447500946557,
187
+ "scr_dir2_threshold_50": 0.5910447500946557,
188
+ "scr_dir1_threshold_100": 0.10937497089617432,
189
+ "scr_metric_threshold_100": 0.5253730716565179,
190
+ "scr_dir2_threshold_100": 0.5253730716565179,
191
+ "scr_dir1_threshold_500": -0.2968748544808716,
192
+ "scr_metric_threshold_500": 0.5283581075118213,
193
+ "scr_dir2_threshold_500": 0.5283581075118213
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.07142869813909379,
198
+ "scr_metric_threshold_2": 0.4317342134585075,
199
+ "scr_dir2_threshold_2": 0.4317342134585075,
200
+ "scr_dir1_threshold_5": 0.11904783023182298,
201
+ "scr_metric_threshold_5": 0.509225082511735,
202
+ "scr_dir2_threshold_5": 0.509225082511735,
203
+ "scr_dir1_threshold_10": 0.13095243586027397,
204
+ "scr_metric_threshold_10": 0.5276752475352052,
205
+ "scr_dir2_threshold_10": 0.5276752475352052,
206
+ "scr_dir1_threshold_20": 0.15476200190663858,
207
+ "scr_metric_threshold_20": 0.5719557755575233,
208
+ "scr_dir2_threshold_20": 0.5719557755575233,
209
+ "scr_dir1_threshold_50": -0.2678571745347734,
210
+ "scr_metric_threshold_50": 0.7084871286971922,
211
+ "scr_dir2_threshold_50": 0.7084871286971922,
212
+ "scr_dir1_threshold_100": -0.18452387076722868,
213
+ "scr_metric_threshold_100": 0.7306272827366931,
214
+ "scr_dir2_threshold_100": 0.7306272827366931,
215
+ "scr_dir1_threshold_500": -0.24999991130263435,
216
+ "scr_metric_threshold_500": 0.6605166116588432,
217
+ "scr_dir2_threshold_500": 0.6605166116588432
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.2923977240088898,
222
+ "scr_metric_threshold_2": 0.056391140868614346,
223
+ "scr_dir2_threshold_2": 0.056391140868614346,
224
+ "scr_dir1_threshold_5": 0.32163746155326217,
225
+ "scr_metric_threshold_5": 0.0864661620439461,
226
+ "scr_dir2_threshold_5": 0.0864661620439461,
227
+ "scr_dir1_threshold_10": 0.32163746155326217,
228
+ "scr_metric_threshold_10": 0.1691729744506738,
229
+ "scr_dir2_threshold_10": 0.1691729744506738,
230
+ "scr_dir1_threshold_20": 0.38011693664200685,
231
+ "scr_metric_threshold_20": 0.23684216423094331,
232
+ "scr_dir2_threshold_20": 0.23684216423094331,
233
+ "scr_dir1_threshold_50": 0.39766091859469677,
234
+ "scr_metric_threshold_50": 0.30075200437399446,
235
+ "scr_dir2_threshold_50": 0.30075200437399446,
236
+ "scr_dir1_threshold_100": 0.49707623538466256,
237
+ "scr_metric_threshold_100": 0.38721816641794055,
238
+ "scr_dir2_threshold_100": 0.38721816641794055,
239
+ "scr_dir1_threshold_500": 0.4853801312278138,
240
+ "scr_metric_threshold_500": 0.5037593496372184,
241
+ "scr_dir2_threshold_500": 0.5037593496372184
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.4159289934835792,
246
+ "scr_metric_threshold_2": 0.3647416600600322,
247
+ "scr_dir2_threshold_2": 0.3647416600600322,
248
+ "scr_dir1_threshold_5": 0.4601767250792003,
249
+ "scr_metric_threshold_5": 0.4133737968557734,
250
+ "scr_dir2_threshold_5": 0.4133737968557734,
251
+ "scr_dir1_threshold_10": 0.442477843430779,
252
+ "scr_metric_threshold_10": 0.5015196467056929,
253
+ "scr_dir2_threshold_10": 0.5015196467056929,
254
+ "scr_dir1_threshold_20": -0.1681414855582738,
255
+ "scr_metric_threshold_20": 0.6170213112877068,
256
+ "scr_dir2_threshold_20": 0.6170213112877068,
257
+ "scr_dir1_threshold_50": 0.1681414855582738,
258
+ "scr_metric_threshold_50": 0.7082066357181472,
259
+ "scr_dir2_threshold_50": 0.7082066357181472,
260
+ "scr_dir1_threshold_100": -0.6814159105318737,
261
+ "scr_metric_threshold_100": 0.7386017438616274,
262
+ "scr_dir2_threshold_100": 0.7386017438616274,
263
+ "scr_dir1_threshold_500": -1.7610614054243374,
264
+ "scr_metric_threshold_500": 0.28267475937115466,
265
+ "scr_dir2_threshold_500": 0.28267475937115466
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.3173076124013192,
270
+ "scr_metric_threshold_2": 0.1244240049046106,
271
+ "scr_dir2_threshold_2": 0.1244240049046106,
272
+ "scr_dir1_threshold_5": 0.35096159770247026,
273
+ "scr_metric_threshold_5": 0.10599078847328613,
274
+ "scr_dir2_threshold_5": 0.10599078847328613,
275
+ "scr_dir1_threshold_10": 0.3317307898962106,
276
+ "scr_metric_threshold_10": 0.13364075045815918,
277
+ "scr_dir2_threshold_10": 0.13364075045815918,
278
+ "scr_dir1_threshold_20": 0.37980766613146666,
279
+ "scr_metric_threshold_20": 0.22119832250012086,
280
+ "scr_dir2_threshold_20": 0.22119832250012086,
281
+ "scr_dir1_threshold_50": 0.4855769657855018,
282
+ "scr_metric_threshold_50": 0.24423977437033326,
283
+ "scr_dir2_threshold_50": 0.24423977437033326,
284
+ "scr_dir1_threshold_100": 0.5624999104497543,
285
+ "scr_metric_threshold_100": 0.3410138172900708,
286
+ "scr_dir2_threshold_100": 0.3410138172900708,
287
+ "scr_dir1_threshold_500": 0.5240385813980212,
288
+ "scr_metric_threshold_500": 0.5345621778053186,
289
+ "scr_dir2_threshold_500": 0.5345621778053186
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_19_width_65k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "dc0c6a5a-b9b4-48bc-b43c-b57dff428ad8",
72
+ "datetime_epoch_millis": 1732180292296,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.26540917872531267,
76
+ "scr_metric_threshold_2": 0.17676805731304376,
77
+ "scr_dir2_threshold_2": 0.17676805731304376,
78
+ "scr_dir1_threshold_5": 0.31976397682531105,
79
+ "scr_metric_threshold_5": 0.23798548405632222,
80
+ "scr_dir2_threshold_5": 0.23798548405632222,
81
+ "scr_dir1_threshold_10": 0.29888250848627496,
82
+ "scr_metric_threshold_10": 0.2933723593138509,
83
+ "scr_dir2_threshold_10": 0.2933723593138509,
84
+ "scr_dir1_threshold_20": 0.2286730228944795,
85
+ "scr_metric_threshold_20": 0.36328865319113113,
86
+ "scr_dir2_threshold_20": 0.36328865319113113,
87
+ "scr_dir1_threshold_50": 0.23066294403101006,
88
+ "scr_metric_threshold_50": 0.4482671808256601,
89
+ "scr_dir2_threshold_50": 0.4482671808256601,
90
+ "scr_dir1_threshold_100": 0.15686999236588467,
91
+ "scr_metric_threshold_100": 0.4633773116392915,
92
+ "scr_dir2_threshold_100": 0.4633773116392915,
93
+ "scr_dir1_threshold_500": -0.9192007624881388,
94
+ "scr_metric_threshold_500": 0.45229140201993157,
95
+ "scr_dir2_threshold_500": 0.45229140201993157
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3235295406674491,
102
+ "scr_metric_threshold_2": 0.05063300880745302,
103
+ "scr_dir2_threshold_2": 0.05063300880745302,
104
+ "scr_dir1_threshold_5": 0.4411768057353677,
105
+ "scr_metric_threshold_5": 0.10379747943300446,
106
+ "scr_dir2_threshold_5": 0.10379747943300446,
107
+ "scr_dir1_threshold_10": 0.38235317320140844,
108
+ "scr_metric_threshold_10": 0.09620264128525105,
109
+ "scr_dir2_threshold_10": 0.09620264128525105,
110
+ "scr_dir1_threshold_20": 0.4411768057353677,
111
+ "scr_metric_threshold_20": 0.1265822956719035,
112
+ "scr_dir2_threshold_20": 0.1265822956719035,
113
+ "scr_dir1_threshold_50": 0.455882494734194,
114
+ "scr_metric_threshold_50": 0.13417728471747628,
115
+ "scr_dir2_threshold_50": 0.13417728471747628,
116
+ "scr_dir1_threshold_100": 0.39705886220023473,
117
+ "scr_metric_threshold_100": 0.21772156062749953,
118
+ "scr_dir2_threshold_100": 0.21772156062749953,
119
+ "scr_dir1_threshold_500": -2.323528664128795,
120
+ "scr_metric_threshold_500": 0.42784813220942625,
121
+ "scr_dir2_threshold_500": 0.42784813220942625
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.36036054902857856,
126
+ "scr_metric_threshold_2": 0.20294117337692186,
127
+ "scr_dir2_threshold_2": 0.20294117337692186,
128
+ "scr_dir1_threshold_5": 0.44144165913553934,
129
+ "scr_metric_threshold_5": 0.2941176883077086,
130
+ "scr_dir2_threshold_5": 0.2941176883077086,
131
+ "scr_dir1_threshold_10": 0.41441444344029416,
132
+ "scr_metric_threshold_10": 0.3999998948153431,
133
+ "scr_dir2_threshold_10": 0.3999998948153431,
134
+ "scr_dir1_threshold_20": 0.4594594449465196,
135
+ "scr_metric_threshold_20": 0.5323528720845881,
136
+ "scr_dir2_threshold_20": 0.5323528720845881,
137
+ "scr_dir1_threshold_50": 0.4684683378520097,
138
+ "scr_metric_threshold_50": 0.6647058493538331,
139
+ "scr_dir2_threshold_50": 0.6647058493538331,
140
+ "scr_dir1_threshold_100": 0.4864866606417648,
141
+ "scr_metric_threshold_100": 0.526470595453849,
142
+ "scr_dir2_threshold_100": 0.526470595453849,
143
+ "scr_dir1_threshold_500": -0.07207221720147068,
144
+ "scr_metric_threshold_500": 0.4117646233845828,
145
+ "scr_dir2_threshold_500": 0.4117646233845828
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4444440765146749,
150
+ "scr_metric_threshold_2": 0.029411678770695084,
151
+ "scr_dir2_threshold_2": 0.029411678770695084,
152
+ "scr_dir1_threshold_5": 0.5,
153
+ "scr_metric_threshold_5": 0.09558817513948656,
154
+ "scr_dir2_threshold_5": 0.09558817513948656,
155
+ "scr_dir1_threshold_10": 0.4444440765146749,
156
+ "scr_metric_threshold_10": 0.14215678826793574,
157
+ "scr_dir2_threshold_10": 0.14215678826793574,
158
+ "scr_dir1_threshold_20": 0.4629623497466804,
159
+ "scr_metric_threshold_20": 0.21813722625689835,
160
+ "scr_dir2_threshold_20": 0.21813722625689835,
161
+ "scr_dir1_threshold_50": 0.4814806229786858,
162
+ "scr_metric_threshold_50": 0.3382351824019036,
163
+ "scr_dir2_threshold_50": 0.3382351824019036,
164
+ "scr_dir1_threshold_100": 0.3518516065653388,
165
+ "scr_metric_threshold_100": 0.26960773715052394,
166
+ "scr_dir2_threshold_100": 0.26960773715052394,
167
+ "scr_dir1_threshold_500": -4.537035442674703,
168
+ "scr_metric_threshold_500": 0.0024509488825881975,
169
+ "scr_dir2_threshold_500": 0.0024509488825881975
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3437501746229541,
174
+ "scr_metric_threshold_2": 0.17910446433400803,
175
+ "scr_dir2_threshold_2": 0.17910446433400803,
176
+ "scr_dir1_threshold_5": 0.32031251455191284,
177
+ "scr_metric_threshold_5": 0.2925372502298782,
178
+ "scr_dir2_threshold_5": 0.2925372502298782,
179
+ "scr_dir1_threshold_10": 0.2656247962732202,
180
+ "scr_metric_threshold_10": 0.3910446789249387,
181
+ "scr_dir2_threshold_10": 0.3910446789249387,
182
+ "scr_dir1_threshold_20": 0.19531274738251833,
183
+ "scr_metric_threshold_20": 0.45970139321838,
184
+ "scr_dir2_threshold_20": 0.45970139321838,
185
+ "scr_dir1_threshold_50": 0.046874854480871565,
186
+ "scr_metric_threshold_50": 0.5671641074036433,
187
+ "scr_dir2_threshold_50": 0.5671641074036433,
188
+ "scr_dir1_threshold_100": 0.21093754365573852,
189
+ "scr_metric_threshold_100": 0.5313433212914173,
190
+ "scr_dir2_threshold_100": 0.5313433212914173,
191
+ "scr_dir1_threshold_500": -0.05468725261748167,
192
+ "scr_metric_threshold_500": 0.3373134997565996,
193
+ "scr_dir2_threshold_500": 0.3373134997565996
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.053571434906954686,
198
+ "scr_metric_threshold_2": 0.468634763448764,
199
+ "scr_dir2_threshold_2": 0.468634763448764,
200
+ "scr_dir1_threshold_5": 0.06547639532486829,
201
+ "scr_metric_threshold_5": 0.4833949394562033,
202
+ "scr_dir2_threshold_5": 0.4833949394562033,
203
+ "scr_dir1_threshold_10": 0.029761868860590093,
204
+ "scr_metric_threshold_10": 0.5276752475352052,
205
+ "scr_dir2_threshold_10": 0.5276752475352052,
206
+ "scr_dir1_threshold_20": 0.029761868860590093,
207
+ "scr_metric_threshold_20": 0.5719557755575233,
208
+ "scr_dir2_threshold_20": 0.5719557755575233,
209
+ "scr_dir1_threshold_50": 0.10714286981390937,
210
+ "scr_metric_threshold_50": 0.6715867986502518,
211
+ "scr_dir2_threshold_50": 0.6715867986502518,
212
+ "scr_dir1_threshold_100": -0.25595221411685987,
213
+ "scr_metric_threshold_100": 0.7084871286971922,
214
+ "scr_dir2_threshold_100": 0.7084871286971922,
215
+ "scr_dir1_threshold_500": -0.3630950839307692,
216
+ "scr_metric_threshold_500": 0.7749078107590112,
217
+ "scr_dir2_threshold_500": 0.7749078107590112
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.1871345294230828,
222
+ "scr_metric_threshold_2": 0.026315895615697978,
223
+ "scr_dir2_threshold_2": 0.026315895615697978,
224
+ "scr_dir1_threshold_5": 0.21052638917161398,
225
+ "scr_metric_threshold_5": 0.03007524525291637,
226
+ "scr_dir2_threshold_5": 0.03007524525291637,
227
+ "scr_dir1_threshold_10": 0.26315798646451743,
228
+ "scr_metric_threshold_10": 0.07894746276950931,
229
+ "scr_dir2_threshold_10": 0.07894746276950931,
230
+ "scr_dir1_threshold_20": 0.3333335657101109,
231
+ "scr_metric_threshold_20": 0.13533837956053904,
232
+ "scr_dir2_threshold_20": 0.13533837956053904,
233
+ "scr_dir1_threshold_50": 0.31578958375742094,
234
+ "scr_metric_threshold_50": 0.22556389124170353,
235
+ "scr_dir2_threshold_50": 0.22556389124170353,
236
+ "scr_dir1_threshold_100": 0.3742690588461656,
237
+ "scr_metric_threshold_100": 0.3308270255493262,
238
+ "scr_dir2_threshold_100": 0.3308270255493262,
239
+ "scr_dir1_threshold_500": 0.403508796390538,
240
+ "scr_metric_threshold_500": 0.40225578904439874,
241
+ "scr_dir2_threshold_500": 0.40225578904439874
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.25663694874951604,
246
+ "scr_metric_threshold_2": 0.34650445023863596,
247
+ "scr_dir2_threshold_2": 0.34650445023863596,
248
+ "scr_dir1_threshold_5": 0.3628318210637474,
249
+ "scr_metric_threshold_5": 0.5075987770358701,
250
+ "scr_dir2_threshold_5": 0.5075987770358701,
251
+ "scr_dir1_threshold_10": 0.3893806710109472,
252
+ "scr_metric_threshold_10": 0.5957446268857896,
253
+ "scr_dir2_threshold_10": 0.5957446268857896,
254
+ "scr_dir1_threshold_20": -0.3185840894681263,
255
+ "scr_metric_threshold_20": 0.668692922663969,
256
+ "scr_dir2_threshold_20": 0.668692922663969,
257
+ "scr_dir1_threshold_50": -0.3716812618879581,
258
+ "scr_metric_threshold_50": 0.7082066357181472,
259
+ "scr_dir2_threshold_50": 0.7082066357181472,
260
+ "scr_dir1_threshold_100": -0.699114792180295,
261
+ "scr_metric_threshold_100": 0.7446808741918045,
262
+ "scr_dir2_threshold_100": 0.7446808741918045,
263
+ "scr_dir1_threshold_500": -0.91150400933419,
264
+ "scr_metric_threshold_500": 0.6534953685922289,
265
+ "scr_dir2_threshold_500": 0.6534953685922289
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.15384617588929125,
270
+ "scr_metric_threshold_2": 0.11059902391217406,
271
+ "scr_dir2_threshold_2": 0.11059902391217406,
272
+ "scr_dir1_threshold_5": 0.21634622961943867,
273
+ "scr_metric_threshold_5": 0.09677431759551026,
274
+ "scr_dir2_threshold_5": 0.09677431759551026,
275
+ "scr_dir1_threshold_10": 0.20192305212454734,
276
+ "scr_metric_threshold_10": 0.11520753402683473,
277
+ "scr_dir2_threshold_10": 0.11520753402683473,
278
+ "scr_dir1_threshold_20": 0.22596149024217538,
279
+ "scr_metric_threshold_20": 0.1935483605152478,
280
+ "scr_dir2_threshold_20": 0.1935483605152478,
281
+ "scr_dir1_threshold_50": 0.34134605051894723,
282
+ "scr_metric_threshold_50": 0.2764976971183215,
283
+ "scr_dir2_threshold_50": 0.2764976971183215,
284
+ "scr_dir1_threshold_100": 0.38942321331498964,
285
+ "scr_metric_threshold_100": 0.37788025015271975,
286
+ "scr_dir2_threshold_100": 0.37788025015271975,
287
+ "scr_dir1_threshold_500": 0.5048077735917615,
288
+ "scr_metric_threshold_500": 0.6082950435306165,
289
+ "scr_dir2_threshold_500": 0.6082950435306165
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_5_width_16k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "341c427b-e726-4a96-a136-d7b22d90a964",
72
+ "datetime_epoch_millis": 1732137842890,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.16928244116508742,
76
+ "scr_metric_threshold_2": 0.06267078848432106,
77
+ "scr_dir2_threshold_2": 0.06267078848432106,
78
+ "scr_dir1_threshold_5": 0.2870248620647946,
79
+ "scr_metric_threshold_5": 0.11518038713162128,
80
+ "scr_dir2_threshold_5": 0.11518038713162128,
81
+ "scr_dir1_threshold_10": 0.3287743105577888,
82
+ "scr_metric_threshold_10": 0.15454135593783394,
83
+ "scr_dir2_threshold_10": 0.15454135593783394,
84
+ "scr_dir1_threshold_20": 0.3626534857288454,
85
+ "scr_metric_threshold_20": 0.19504998571844387,
86
+ "scr_dir2_threshold_20": 0.19504998571844387,
87
+ "scr_dir1_threshold_50": 0.3149465830447075,
88
+ "scr_metric_threshold_50": 0.2532404087435781,
89
+ "scr_dir2_threshold_50": 0.2532404087435781,
90
+ "scr_dir1_threshold_100": 0.25099087778196677,
91
+ "scr_metric_threshold_100": 0.29652032888511043,
92
+ "scr_dir2_threshold_100": 0.29652032888511043,
93
+ "scr_dir1_threshold_500": 0.057968281987360454,
94
+ "scr_metric_threshold_500": 0.34336264749848006,
95
+ "scr_dir2_threshold_500": 0.34336264749848006
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.32142872348125223,
102
+ "scr_metric_threshold_2": 0.01643199319891413,
103
+ "scr_dir2_threshold_2": 0.01643199319891413,
104
+ "scr_dir1_threshold_5": 0.5357138295562434,
105
+ "scr_metric_threshold_5": 0.028169071233707016,
106
+ "scr_dir2_threshold_5": 0.028169071233707016,
107
+ "scr_dir1_threshold_10": 0.46428617044375664,
108
+ "scr_metric_threshold_10": 0.04694831213917153,
109
+ "scr_dir2_threshold_10": 0.04694831213917153,
110
+ "scr_dir1_threshold_20": 0.5357138295562434,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.46428617044375664,
114
+ "scr_metric_threshold_50": 0.09389676419534988,
115
+ "scr_dir2_threshold_50": 0.09389676419534988,
116
+ "scr_dir1_threshold_100": 0.607143617406261,
117
+ "scr_metric_threshold_100": 0.1267606106761713,
118
+ "scr_dir2_threshold_100": 0.1267606106761713,
119
+ "scr_dir1_threshold_500": -0.2857148939250088,
120
+ "scr_metric_threshold_500": 0.24178400340054293,
121
+ "scr_dir2_threshold_500": 0.24178400340054293
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3538461961689801,
126
+ "scr_metric_threshold_2": 0.05154649469395781,
127
+ "scr_dir2_threshold_2": 0.05154649469395781,
128
+ "scr_dir1_threshold_5": 0.5230766268171394,
129
+ "scr_metric_threshold_5": 0.0902062504992657,
130
+ "scr_dir2_threshold_5": 0.0902062504992657,
131
+ "scr_dir1_threshold_10": 0.5846152153240797,
132
+ "scr_metric_threshold_10": 0.11597942103613762,
133
+ "scr_dir2_threshold_10": 0.11597942103613762,
134
+ "scr_dir1_threshold_20": 0.6153849680748341,
135
+ "scr_metric_threshold_20": 0.13917530524336513,
136
+ "scr_dir2_threshold_20": 0.13917530524336513,
137
+ "scr_dir1_threshold_50": 0.5692307974459867,
138
+ "scr_metric_threshold_50": 0.21907225680383927,
139
+ "scr_dir2_threshold_50": 0.21907225680383927,
140
+ "scr_dir1_threshold_100": 0.5230766268171394,
141
+ "scr_metric_threshold_100": 0.296391768414455,
142
+ "scr_dir2_threshold_100": 0.296391768414455,
143
+ "scr_dir1_threshold_500": 0.3230773604127943,
144
+ "scr_metric_threshold_500": 0.2474227136703556,
145
+ "scr_dir2_threshold_500": 0.2474227136703556
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.29545460702955034,
150
+ "scr_metric_threshold_2": 0.02290077146305589,
151
+ "scr_dir2_threshold_2": 0.02290077146305589,
152
+ "scr_dir1_threshold_5": 0.3636368562364027,
153
+ "scr_metric_threshold_5": 0.04325689480129207,
154
+ "scr_dir2_threshold_5": 0.04325689480129207,
155
+ "scr_dir1_threshold_10": 0.5,
156
+ "scr_metric_threshold_10": 0.06870231438916767,
157
+ "scr_dir2_threshold_10": 0.06870231438916767,
158
+ "scr_dir1_threshold_20": 0.5681822492068523,
159
+ "scr_metric_threshold_20": 0.09669207877034255,
160
+ "scr_dir2_threshold_20": 0.09669207877034255,
161
+ "scr_dir1_threshold_50": 0.3636368562364027,
162
+ "scr_metric_threshold_50": 0.12213734669245793,
163
+ "scr_dir2_threshold_50": 0.12213734669245793,
164
+ "scr_dir1_threshold_100": 0.15909146326595303,
165
+ "scr_metric_threshold_100": 0.15267175919845252,
166
+ "scr_dir2_threshold_100": 0.15267175919845252,
167
+ "scr_dir1_threshold_500": -0.340908536734047,
168
+ "scr_metric_threshold_500": 0.24681934150973558,
169
+ "scr_dir2_threshold_500": 0.24681934150973558
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.06172831329955738,
174
+ "scr_metric_threshold_2": 0.01612914338360472,
175
+ "scr_dir2_threshold_2": 0.01612914338360472,
176
+ "scr_dir1_threshold_5": 0.27160502003377596,
177
+ "scr_metric_threshold_5": 0.03763440084920587,
178
+ "scr_dir2_threshold_5": 0.03763440084920587,
179
+ "scr_dir1_threshold_10": 0.2839505355217796,
180
+ "scr_metric_threshold_10": 0.07258066454365199,
181
+ "scr_dir2_threshold_10": 0.07258066454365199,
182
+ "scr_dir1_threshold_20": 0.2839505355217796,
183
+ "scr_metric_threshold_20": 0.032258126539686356,
184
+ "scr_dir2_threshold_20": 0.032258126539686356,
185
+ "scr_dir1_threshold_50": 0.1728394244106685,
186
+ "scr_metric_threshold_50": 0.07795709908069459,
187
+ "scr_dir2_threshold_50": 0.07795709908069459,
188
+ "scr_dir1_threshold_100": -0.1358021420871184,
189
+ "scr_metric_threshold_100": 0.11021506539285786,
190
+ "scr_dir2_threshold_100": 0.11021506539285786,
191
+ "scr_dir1_threshold_500": -0.23456773771022588,
192
+ "scr_metric_threshold_500": 0.16397860962566846,
193
+ "scr_dir2_threshold_500": 0.16397860962566846
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.051136382878565693,
198
+ "scr_metric_threshold_2": 0.1689497418629469,
199
+ "scr_dir2_threshold_2": 0.1689497418629469,
200
+ "scr_dir1_threshold_5": 0.06250021166422265,
201
+ "scr_metric_threshold_5": 0.2648402609430977,
202
+ "scr_dir2_threshold_5": 0.2648402609430977,
203
+ "scr_dir1_threshold_10": 0.09659102069568104,
204
+ "scr_metric_threshold_10": 0.3333333333333333,
205
+ "scr_dir2_threshold_10": 0.3333333333333333,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.46118717732868364,
208
+ "scr_dir2_threshold_20": 0.46118717732868364,
209
+ "scr_dir1_threshold_50": 0.005681745061450357,
210
+ "scr_metric_threshold_50": 0.5388128226713164,
211
+ "scr_dir2_threshold_50": 0.5388128226713164,
212
+ "scr_dir1_threshold_100": 0.02272731890855767,
213
+ "scr_metric_threshold_100": 0.5433789730638768,
214
+ "scr_dir2_threshold_100": 0.5433789730638768,
215
+ "scr_dir1_threshold_500": 0.10795451081858175,
216
+ "scr_metric_threshold_500": 0.5342464001114797,
217
+ "scr_dir2_threshold_500": 0.5342464001114797
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.13178308900821112,
222
+ "scr_metric_threshold_2": 0.056451601273808806,
223
+ "scr_dir2_threshold_2": 0.056451601273808806,
224
+ "scr_dir1_threshold_5": 0.1860464149194575,
225
+ "scr_metric_threshold_5": 0.07661287027579163,
226
+ "scr_dir2_threshold_5": 0.07661287027579163,
227
+ "scr_dir1_threshold_10": 0.23255790313645167,
228
+ "scr_metric_threshold_10": 0.11693564862104187,
229
+ "scr_dir2_threshold_10": 0.11693564862104187,
230
+ "scr_dir1_threshold_20": 0.2635657159649414,
231
+ "scr_metric_threshold_20": 0.16935480382142643,
232
+ "scr_dir2_threshold_20": 0.16935480382142643,
233
+ "scr_dir1_threshold_50": 0.23255790313645167,
234
+ "scr_metric_threshold_50": 0.26209673736706124,
235
+ "scr_dir2_threshold_50": 0.26209673736706124,
236
+ "scr_dir1_threshold_100": 0.2868216910991789,
237
+ "scr_metric_threshold_100": 0.3750001802559635,
238
+ "scr_dir2_threshold_100": 0.3750001802559635,
239
+ "scr_dir1_threshold_500": 0.42635661780164225,
240
+ "scr_metric_threshold_500": 0.4838709367301568,
241
+ "scr_dir2_threshold_500": 0.4838709367301568
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.10795447425842195,
246
+ "scr_metric_threshold_2": 0.12875547569614632,
247
+ "scr_dir2_threshold_2": 0.12875547569614632,
248
+ "scr_dir1_threshold_5": 0.25568182780291593,
249
+ "scr_metric_threshold_5": 0.3304721403333562,
250
+ "scr_dir2_threshold_5": 0.3304721403333562,
251
+ "scr_dir1_threshold_10": 0.3238637614379074,
252
+ "scr_metric_threshold_10": 0.35622313314703263,
253
+ "scr_dir2_threshold_10": 0.35622313314703263,
254
+ "scr_dir1_threshold_20": 0.3977274382101544,
255
+ "scr_metric_threshold_20": 0.4291845779019783,
256
+ "scr_dir2_threshold_20": 0.4291845779019783,
257
+ "scr_dir1_threshold_50": 0.4999998306686792,
258
+ "scr_metric_threshold_50": 0.450643781215689,
259
+ "scr_dir2_threshold_50": 0.450643781215689,
260
+ "scr_dir1_threshold_100": 0.26136357094017154,
261
+ "scr_metric_threshold_100": 0.5064378121568897,
262
+ "scr_dir2_threshold_100": 0.5064378121568897,
263
+ "scr_dir1_threshold_500": 0.26136357094017154,
264
+ "scr_metric_threshold_500": 0.5021460226569239,
265
+ "scr_dir2_threshold_500": 0.5021460226569239
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.030927743196160724,
270
+ "scr_metric_threshold_2": 0.04020108630213385,
271
+ "scr_dir2_threshold_2": 0.04020108630213385,
272
+ "scr_dir1_threshold_5": 0.0979381094881989,
273
+ "scr_metric_threshold_5": 0.05025120811725402,
274
+ "scr_dir2_threshold_5": 0.05025120811725402,
275
+ "scr_dir1_threshold_10": 0.14432987790265395,
276
+ "scr_metric_threshold_10": 0.12562802029313505,
277
+ "scr_dir2_threshold_10": 0.12562802029313505,
278
+ "scr_dir1_threshold_20": 0.18556676641739225,
279
+ "scr_metric_threshold_20": 0.18090458883877572,
280
+ "scr_dir2_threshold_20": 0.18090458883877572,
281
+ "scr_dir1_threshold_50": 0.2113399369542642,
282
+ "scr_metric_threshold_50": 0.26130646192221685,
283
+ "scr_dir2_threshold_50": 0.26130646192221685,
284
+ "scr_dir1_threshold_100": 0.28350487590559115,
285
+ "scr_metric_threshold_100": 0.26130646192221685,
286
+ "scr_dir2_threshold_100": 0.26130646192221685,
287
+ "scr_dir1_threshold_500": 0.2061853642949754,
288
+ "scr_metric_threshold_500": 0.3266331522829777,
289
+ "scr_dir2_threshold_500": 0.3266331522829777
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res-canonical/scr/gemma-scope-2b-pt-res-canonical_layer_5_width_65k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "dc0c6a5a-b9b4-48bc-b43c-b57dff428ad8",
72
+ "datetime_epoch_millis": 1732178382389,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.17054927434795833,
76
+ "scr_metric_threshold_2": 0.03513970463569734,
77
+ "scr_dir2_threshold_2": 0.03513970463569734,
78
+ "scr_dir1_threshold_5": 0.26385538968135,
79
+ "scr_metric_threshold_5": 0.06124801734514245,
80
+ "scr_dir2_threshold_5": 0.06124801734514245,
81
+ "scr_dir1_threshold_10": 0.32588406598056496,
82
+ "scr_metric_threshold_10": 0.10303706088162391,
83
+ "scr_dir2_threshold_10": 0.10303706088162391,
84
+ "scr_dir1_threshold_20": 0.3943029178328679,
85
+ "scr_metric_threshold_20": 0.13371773341360824,
86
+ "scr_dir2_threshold_20": 0.13371773341360824,
87
+ "scr_dir1_threshold_50": 0.35571585777011394,
88
+ "scr_metric_threshold_50": 0.19406306100937196,
89
+ "scr_dir2_threshold_50": 0.19406306100937196,
90
+ "scr_dir1_threshold_100": 0.3685569080790775,
91
+ "scr_metric_threshold_100": 0.249219545805795,
92
+ "scr_dir2_threshold_100": 0.249219545805795,
93
+ "scr_dir1_threshold_500": 0.27959893209126135,
94
+ "scr_metric_threshold_500": 0.3602012624266308,
95
+ "scr_dir2_threshold_500": 0.3602012624266308
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2857148939250088,
102
+ "scr_metric_threshold_2": 0.014084465658350092,
103
+ "scr_dir2_threshold_2": 0.014084465658350092,
104
+ "scr_dir1_threshold_5": 0.4285723408875132,
105
+ "scr_metric_threshold_5": 0.014084465658350092,
106
+ "scr_dir2_threshold_5": 0.014084465658350092,
107
+ "scr_dir1_threshold_10": 0.46428617044375664,
108
+ "scr_metric_threshold_10": 0.028169071233707016,
109
+ "scr_dir2_threshold_10": 0.028169071233707016,
110
+ "scr_dir1_threshold_20": 0.607143617406261,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.6428574469625045,
114
+ "scr_metric_threshold_50": 0.07042260812576412,
115
+ "scr_dir2_threshold_50": 0.07042260812576412,
116
+ "scr_dir1_threshold_100": 0.7499989356312345,
117
+ "scr_metric_threshold_100": 0.09389676419534988,
118
+ "scr_dir2_threshold_100": 0.09389676419534988,
119
+ "scr_dir1_threshold_500": 0.32142872348125223,
120
+ "scr_metric_threshold_500": 0.22300476249507842,
121
+ "scr_dir2_threshold_500": 0.22300476249507842
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3384617782908872,
126
+ "scr_metric_threshold_2": 0.03608246947566349,
127
+ "scr_dir2_threshold_2": 0.03608246947566349,
128
+ "scr_dir1_threshold_5": 0.44615362043210616,
129
+ "scr_metric_threshold_5": 0.07474237890118533,
130
+ "scr_dir2_threshold_5": 0.07474237890118533,
131
+ "scr_dir1_threshold_10": 0.5076922089390464,
132
+ "scr_metric_threshold_10": 0.10309283576770166,
133
+ "scr_dir2_threshold_10": 0.10309283576770166,
134
+ "scr_dir1_threshold_20": 0.6000005501967411,
135
+ "scr_metric_threshold_20": 0.11855670736578201,
136
+ "scr_dir2_threshold_20": 0.11855670736578201,
137
+ "scr_dir1_threshold_50": 0.5692307974459867,
138
+ "scr_metric_threshold_50": 0.15721661679130386,
139
+ "scr_dir2_threshold_50": 0.15721661679130386,
140
+ "scr_dir1_threshold_100": 0.5692307974459867,
141
+ "scr_metric_threshold_100": 0.20103094525590054,
142
+ "scr_dir2_threshold_100": 0.20103094525590054,
143
+ "scr_dir1_threshold_500": 0.6153849680748341,
144
+ "scr_metric_threshold_500": 0.3195876526216826,
145
+ "scr_dir2_threshold_500": 0.3195876526216826
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.18181842811820134,
150
+ "scr_metric_threshold_2": 0.012722633961057692,
151
+ "scr_dir2_threshold_2": 0.012722633961057692,
152
+ "scr_dir1_threshold_5": 0.3409098913841544,
153
+ "scr_metric_threshold_5": 0.017811626879176687,
154
+ "scr_dir2_threshold_5": 0.017811626879176687,
155
+ "scr_dir1_threshold_10": 0.45454607029550337,
156
+ "scr_metric_threshold_10": 0.04071239834223257,
157
+ "scr_dir2_threshold_10": 0.04071239834223257,
158
+ "scr_dir1_threshold_20": 0.545455284354604,
159
+ "scr_metric_threshold_20": 0.06361316980528846,
160
+ "scr_dir2_threshold_20": 0.06361316980528846,
161
+ "scr_dir1_threshold_50": 0.4772730351477517,
162
+ "scr_metric_threshold_50": 0.10941471273140024,
163
+ "scr_dir2_threshold_50": 0.10941471273140024,
164
+ "scr_dir1_threshold_100": 0.386363821088651,
165
+ "scr_metric_threshold_100": 0.13231548419445613,
166
+ "scr_dir2_threshold_100": 0.13231548419445613,
167
+ "scr_dir1_threshold_500": -0.022726964852248312,
168
+ "scr_metric_threshold_500": 0.20101779858362379,
169
+ "scr_dir2_threshold_500": 0.20101779858362379
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.23456773771022588,
174
+ "scr_metric_threshold_2": 0.013440846001321878,
175
+ "scr_dir2_threshold_2": 0.013440846001321878,
176
+ "scr_dir1_threshold_5": 0.345678848821337,
177
+ "scr_metric_threshold_5": 0.01612914338360472,
178
+ "scr_dir2_threshold_5": 0.01612914338360472,
179
+ "scr_dir1_threshold_10": 0.38271613114488706,
180
+ "scr_metric_threshold_10": 0.043010835386248475,
181
+ "scr_dir2_threshold_10": 0.043010835386248475,
182
+ "scr_dir1_threshold_20": 0.4567899599324481,
183
+ "scr_metric_threshold_20": 0.010752708846562119,
184
+ "scr_dir2_threshold_20": 0.010752708846562119,
185
+ "scr_dir1_threshold_50": 0.4567899599324481,
186
+ "scr_metric_threshold_50": 0.07526880169841174,
187
+ "scr_dir2_threshold_50": 0.07526880169841174,
188
+ "scr_dir1_threshold_100": 0.27160502003377596,
189
+ "scr_metric_threshold_100": 0.08602151054497387,
190
+ "scr_dir2_threshold_100": 0.08602151054497387,
191
+ "scr_dir1_threshold_500": 0.14814839343466119,
192
+ "scr_metric_threshold_500": 0.11021506539285786,
193
+ "scr_dir2_threshold_500": 0.11021506539285786
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.12328769360278982,
199
+ "scr_dir2_threshold_2": 0.12328769360278982,
200
+ "scr_dir1_threshold_5": 0.056818127940016054,
201
+ "scr_metric_threshold_5": 0.16438359147038642,
202
+ "scr_dir2_threshold_5": 0.16438359147038642,
203
+ "scr_dir1_threshold_10": 0.051136382878565693,
204
+ "scr_metric_threshold_10": 0.21917794051566444,
205
+ "scr_dir2_threshold_10": 0.21917794051566444,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.31506845959581525,
208
+ "scr_dir2_threshold_20": 0.31506845959581525,
209
+ "scr_dir1_threshold_50": -0.07954544684857372,
210
+ "scr_metric_threshold_50": 0.41552512906852657,
211
+ "scr_dir2_threshold_50": 0.41552512906852657,
212
+ "scr_dir1_threshold_100": -0.056818127940016054,
213
+ "scr_metric_threshold_100": 0.5479451234564373,
214
+ "scr_dir2_threshold_100": 0.5479451234564373,
215
+ "scr_dir1_threshold_500": -0.051136382878565693,
216
+ "scr_metric_threshold_500": 0.62557076879907,
217
+ "scr_dir2_threshold_500": 0.62557076879907
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.12403078926247804,
222
+ "scr_metric_threshold_2": 0.012096857537703539,
223
+ "scr_dir2_threshold_2": 0.012096857537703539,
224
+ "scr_dir1_threshold_5": 0.21705422774794722,
225
+ "scr_metric_threshold_5": 0.028225920807546715,
226
+ "scr_dir2_threshold_5": 0.028225920807546715,
227
+ "scr_dir1_threshold_10": 0.2635657159649414,
228
+ "scr_metric_threshold_10": 0.07661287027579163,
229
+ "scr_dir2_threshold_10": 0.07661287027579163,
230
+ "scr_dir1_threshold_20": 0.2868216910991789,
231
+ "scr_metric_threshold_20": 0.0927419335456348,
232
+ "scr_dir2_threshold_20": 0.0927419335456348,
233
+ "scr_dir1_threshold_50": 0.16279043978521998,
234
+ "scr_metric_threshold_50": 0.2056451360932524,
235
+ "scr_dir2_threshold_50": 0.2056451360932524,
236
+ "scr_dir1_threshold_100": 0.3023253664876833,
237
+ "scr_metric_threshold_100": 0.29032265817460795,
238
+ "scr_dir2_threshold_100": 0.29032265817460795,
239
+ "scr_dir1_threshold_500": 0.36434099214466276,
240
+ "scr_metric_threshold_500": 0.5040322057321397,
241
+ "scr_dir2_threshold_500": 0.5040322057321397
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.12500004233283019,
246
+ "scr_metric_threshold_2": 0.0643776099411321,
247
+ "scr_dir2_threshold_2": 0.0643776099411321,
248
+ "scr_dir1_threshold_5": 0.19318197596782163,
249
+ "scr_metric_threshold_5": 0.15450646850982275,
250
+ "scr_dir2_threshold_5": 0.15450646850982275,
251
+ "scr_dir1_threshold_10": 0.31818167963801025,
252
+ "scr_metric_threshold_10": 0.2532189060784448,
253
+ "scr_dir2_threshold_10": 0.2532189060784448,
254
+ "scr_dir1_threshold_20": 0.4318182356963293,
255
+ "scr_metric_threshold_20": 0.3218883055195427,
256
+ "scr_dir2_threshold_20": 0.3218883055195427,
257
+ "scr_dir1_threshold_50": 0.42045441075917667,
258
+ "scr_metric_threshold_50": 0.40343358508830185,
259
+ "scr_dir2_threshold_50": 0.40343358508830185,
260
+ "scr_dir1_threshold_100": 0.4886363443941681,
261
+ "scr_metric_threshold_100": 0.47639477402936536,
262
+ "scr_dir2_threshold_100": 0.47639477402936536,
263
+ "scr_dir1_threshold_500": 0.5056819124685763,
264
+ "scr_metric_threshold_500": 0.566523632598056,
265
+ "scr_dir2_threshold_500": 0.566523632598056
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.04639146117402712,
270
+ "scr_metric_threshold_2": 0.005025060907560086,
271
+ "scr_dir2_threshold_2": 0.005025060907560086,
272
+ "scr_dir1_threshold_5": 0.08247408426990457,
273
+ "scr_metric_threshold_5": 0.020100543151066925,
274
+ "scr_dir2_threshold_5": 0.020100543151066925,
275
+ "scr_dir1_threshold_10": 0.16494816853980915,
276
+ "scr_metric_threshold_10": 0.06030162945320077,
277
+ "scr_dir2_threshold_10": 0.06030162945320077,
278
+ "scr_dir1_threshold_20": 0.17525762109881465,
279
+ "scr_metric_threshold_20": 0.09547735532694795,
280
+ "scr_dir2_threshold_20": 0.09547735532694795,
281
+ "scr_dir1_threshold_50": 0.1958762189763978,
282
+ "scr_metric_threshold_50": 0.11557789847801488,
283
+ "scr_dir2_threshold_50": 0.11557789847801488,
284
+ "scr_dir1_threshold_100": 0.23711310749113612,
285
+ "scr_metric_threshold_100": 0.1658291065952689,
286
+ "scr_dir2_threshold_100": 0.1658291065952689,
287
+ "scr_dir1_threshold_500": 0.3556698148569181,
288
+ "scr_metric_threshold_500": 0.3316582131905378,
289
+ "scr_dir2_threshold_500": 0.3316582131905378
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_176_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732146404887,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.30498750349097253,
76
+ "scr_metric_threshold_2": 0.11502722722305693,
77
+ "scr_dir2_threshold_2": 0.1057154593027847,
78
+ "scr_dir1_threshold_5": 0.3288186887790282,
79
+ "scr_metric_threshold_5": 0.18764826169175228,
80
+ "scr_dir2_threshold_5": 0.17713834418982655,
81
+ "scr_dir1_threshold_10": 0.371848611644396,
82
+ "scr_metric_threshold_10": 0.25495008746646297,
83
+ "scr_dir2_threshold_10": 0.25080384411437856,
84
+ "scr_dir1_threshold_20": 0.4381292992767215,
85
+ "scr_metric_threshold_20": 0.3524437884732579,
86
+ "scr_dir2_threshold_20": 0.345548731113196,
87
+ "scr_dir1_threshold_50": 0.4547059461924292,
88
+ "scr_metric_threshold_50": 0.4215835297690564,
89
+ "scr_dir2_threshold_50": 0.421353593701679,
90
+ "scr_dir1_threshold_100": 0.36461451418218305,
91
+ "scr_metric_threshold_100": 0.4446856155257439,
92
+ "scr_dir2_threshold_100": 0.4455133448948921,
93
+ "scr_dir1_threshold_500": 0.3704307650626263,
94
+ "scr_metric_threshold_500": 0.30332584073844554,
95
+ "scr_dir2_threshold_500": 0.3011620371184143
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5156247962732202,
102
+ "scr_metric_threshold_2": 0.024630517283144072,
103
+ "scr_dir2_threshold_2": 0.024630517283144072,
104
+ "scr_dir1_threshold_5": 0.5468753201420825,
105
+ "scr_metric_threshold_5": 0.05418707929913162,
106
+ "scr_dir2_threshold_5": 0.05418707929913162,
107
+ "scr_dir1_threshold_10": 0.5937506402841651,
108
+ "scr_metric_threshold_10": 0.06403931557428191,
109
+ "scr_dir2_threshold_10": 0.06403931557428191,
110
+ "scr_dir1_threshold_20": 0.5625001164153027,
111
+ "scr_metric_threshold_20": 0.17980283489073706,
112
+ "scr_dir2_threshold_20": 0.17980283489073706,
113
+ "scr_dir1_threshold_50": 0.5,
114
+ "scr_metric_threshold_50": 0.23152703863291027,
115
+ "scr_dir2_threshold_50": 0.23152703863291027,
116
+ "scr_dir1_threshold_100": 0.5468753201420825,
117
+ "scr_metric_threshold_100": 0.28817728710792695,
118
+ "scr_dir2_threshold_100": 0.28817728710792695,
119
+ "scr_dir1_threshold_500": 0.39062549476503666,
120
+ "scr_metric_threshold_500": 0.32019701829979963,
121
+ "scr_dir2_threshold_500": 0.32019701829979963
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.29702997759219685,
126
+ "scr_metric_threshold_2": 0.15954413825695987,
127
+ "scr_dir2_threshold_2": 0.15954413825695987,
128
+ "scr_dir1_threshold_5": 0.2772276643420858,
129
+ "scr_metric_threshold_5": 0.24501430935964405,
130
+ "scr_dir2_threshold_5": 0.24501430935964405,
131
+ "scr_dir1_threshold_10": 0.32673256224996294,
132
+ "scr_metric_threshold_10": 0.29629637805849507,
133
+ "scr_dir2_threshold_10": 0.29629637805849507,
134
+ "scr_dir1_threshold_20": 0.32673256224996294,
135
+ "scr_metric_threshold_20": 0.3532763789177529,
136
+ "scr_dir2_threshold_20": 0.3532763789177529,
137
+ "scr_dir1_threshold_50": 0.40594063496053995,
138
+ "scr_metric_threshold_50": 0.5042736189341026,
139
+ "scr_dir2_threshold_50": 0.5042736189341026,
140
+ "scr_dir1_threshold_100": -0.039604036355288495,
141
+ "scr_metric_threshold_100": 0.4729344826104728,
142
+ "scr_dir2_threshold_100": 0.4729344826104728,
143
+ "scr_dir1_threshold_500": 0.43564380976323974,
144
+ "scr_metric_threshold_500": 0.2535613774140517,
145
+ "scr_dir2_threshold_500": 0.2535613774140517
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5555557658011382,
150
+ "scr_metric_threshold_2": 0.03544318161412681,
151
+ "scr_dir2_threshold_2": 0.03544318161412681,
152
+ "scr_dir1_threshold_5": 0.5079364028137167,
153
+ "scr_metric_threshold_5": 0.06075961056894363,
154
+ "scr_dir2_threshold_5": 0.06075961056894363,
155
+ "scr_dir1_threshold_10": 0.5079364028137167,
156
+ "scr_metric_threshold_10": 0.08607603952376044,
157
+ "scr_dir2_threshold_10": 0.08607603952376044,
158
+ "scr_dir1_threshold_20": 0.5238092084411499,
159
+ "scr_metric_threshold_20": 0.22025317334341732,
160
+ "scr_dir2_threshold_20": 0.22025317334341732,
161
+ "scr_dir1_threshold_50": 0.4761907915588501,
162
+ "scr_metric_threshold_50": 0.26582280582121537,
163
+ "scr_dir2_threshold_50": 0.26582280582121537,
164
+ "scr_dir1_threshold_100": 0.33333364870170723,
165
+ "scr_metric_threshold_100": 0.04810139609153522,
166
+ "scr_dir2_threshold_100": 0.04810139609153522,
167
+ "scr_dir1_threshold_500": 0.4126986229439953,
168
+ "scr_metric_threshold_500": 0.03291141800038962,
169
+ "scr_dir2_threshold_500": 0.03291141800038962
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.32283430937914476,
174
+ "scr_metric_threshold_2": 0.11242599898178338,
175
+ "scr_dir2_threshold_2": 0.11242599898178338,
176
+ "scr_dir1_threshold_5": 0.28346426020295623,
177
+ "scr_metric_threshold_5": 0.204142029051445,
178
+ "scr_dir2_threshold_5": 0.204142029051445,
179
+ "scr_dir1_threshold_10": 0.33858261064642425,
180
+ "scr_metric_threshold_10": 0.26627228847801654,
181
+ "scr_dir2_threshold_10": 0.26627228847801654,
182
+ "scr_dir1_threshold_20": 0.7244091864386734,
183
+ "scr_metric_threshold_20": 0.40236697451231324,
184
+ "scr_dir2_threshold_20": 0.40236697451231324,
185
+ "scr_dir1_threshold_50": 0.692913522560128,
186
+ "scr_metric_threshold_50": 0.29881660091639495,
187
+ "scr_dir2_threshold_50": 0.29881660091639495,
188
+ "scr_dir1_threshold_100": 0.32283430937914476,
189
+ "scr_metric_threshold_100": 0.3727812038692232,
190
+ "scr_dir2_threshold_100": 0.3727812038692232,
191
+ "scr_dir1_threshold_500": 0.015747831939272712,
192
+ "scr_metric_threshold_500": -0.22189345616827838,
193
+ "scr_dir2_threshold_500": -0.22189345616827838
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.08196712234356307,
198
+ "scr_metric_threshold_2": 0.10546877182786926,
199
+ "scr_dir2_threshold_2": 0.10546877182786926,
200
+ "scr_dir1_threshold_5": 0.13661187057260513,
201
+ "scr_metric_threshold_5": 0.22265637369125918,
202
+ "scr_dir2_threshold_5": 0.22265637369125918,
203
+ "scr_dir1_threshold_10": 0.15300542532467426,
204
+ "scr_metric_threshold_10": 0.43750011641530273,
205
+ "scr_dir2_threshold_10": 0.43750011641530273,
206
+ "scr_dir1_threshold_20": 0.1311474608913792,
207
+ "scr_metric_threshold_20": 0.5742187136202179,
208
+ "scr_dir2_threshold_20": 0.5742187136202179,
209
+ "scr_dir1_threshold_50": 0.1803277994391953,
210
+ "scr_metric_threshold_50": 0.7226561408606537,
211
+ "scr_dir2_threshold_50": 0.7226561408606537,
212
+ "scr_dir1_threshold_100": 0.16393424468712614,
213
+ "scr_metric_threshold_100": 0.7617188300355207,
214
+ "scr_dir2_threshold_100": 0.7617188300355207,
215
+ "scr_dir1_threshold_500": 0.16939898007674342,
216
+ "scr_metric_threshold_500": 0.7617188300355207,
217
+ "scr_dir2_threshold_500": 0.7617188300355207
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.2358974641126534,
222
+ "scr_metric_threshold_2": 0.04032253800396563,
223
+ "scr_dir2_threshold_2": 0.04032253800396563,
224
+ "scr_dir1_threshold_5": 0.32307674908308187,
225
+ "scr_metric_threshold_5": 0.1008065853511987,
226
+ "scr_dir2_threshold_5": 0.1008065853511987,
227
+ "scr_dir1_threshold_10": 0.37435875333977064,
228
+ "scr_metric_threshold_10": 0.1572581866250075,
229
+ "scr_dir2_threshold_10": 0.1572581866250075,
230
+ "scr_dir1_threshold_20": 0.44615362043210616,
231
+ "scr_metric_threshold_20": 0.21774199363095595,
232
+ "scr_dir2_threshold_20": 0.21774199363095595,
233
+ "scr_dir1_threshold_50": 0.4974359303536512,
234
+ "scr_metric_threshold_50": 0.3629033227182599,
235
+ "scr_dir2_threshold_50": 0.3629033227182599,
236
+ "scr_dir1_threshold_100": 0.5641023524884329,
237
+ "scr_metric_threshold_100": 0.48790314246229644,
238
+ "scr_dir2_threshold_100": 0.48790314246229644,
239
+ "scr_dir1_threshold_500": 0.44615362043210616,
240
+ "scr_metric_threshold_500": 0.34274205371627714,
241
+ "scr_dir2_threshold_500": 0.34274205371627714
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.2850679587298548,
246
+ "scr_metric_threshold_2": 0.29646003812059707,
247
+ "scr_dir2_threshold_2": 0.29646003812059707,
248
+ "scr_dir1_threshold_5": 0.36651594693838474,
249
+ "scr_metric_threshold_5": 0.4247788100750834,
250
+ "scr_dir2_threshold_5": 0.4247788100750834,
251
+ "scr_dir1_threshold_10": 0.4615384200454991,
252
+ "scr_metric_threshold_10": 0.5132742966058849,
253
+ "scr_dir2_threshold_10": 0.5132742966058849,
254
+ "scr_dir1_threshold_20": 0.5113123404530762,
255
+ "scr_metric_threshold_20": 0.5929203399785477,
256
+ "scr_dir2_threshold_20": 0.5929203399785477,
257
+ "scr_dir1_threshold_50": 0.5972851569611343,
258
+ "scr_metric_threshold_50": 0.69911497656298,
259
+ "scr_dir2_threshold_50": 0.69911497656298,
260
+ "scr_dir1_threshold_100": 0.6606335622672957,
261
+ "scr_metric_threshold_100": 0.7610618698820119,
262
+ "scr_dir2_threshold_100": 0.7610618698820119,
263
+ "scr_dir1_threshold_500": 0.6425339793649274,
264
+ "scr_metric_threshold_500": 0.48672570339411514,
265
+ "scr_dir2_threshold_500": 0.48672570339411514
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.1459226336960092,
270
+ "scr_metric_threshold_2": 0.1459226336960092,
271
+ "scr_dir2_threshold_2": 0.07142849033383136,
272
+ "scr_dir1_threshold_5": 0.1888412961373127,
273
+ "scr_metric_threshold_5": 0.1888412961373127,
274
+ "scr_dir2_threshold_5": 0.10476195612190681,
275
+ "scr_dir1_threshold_10": 0.21888407845095484,
276
+ "scr_metric_threshold_10": 0.21888407845095484,
277
+ "scr_dir2_threshold_10": 0.18571413163427958,
278
+ "scr_dir1_threshold_20": 0.27896989889212126,
279
+ "scr_metric_threshold_20": 0.27896989889212126,
280
+ "scr_dir2_threshold_20": 0.22380944001162575,
281
+ "scr_dir1_threshold_50": 0.2875537337059348,
282
+ "scr_metric_threshold_50": 0.2875537337059348,
283
+ "scr_dir2_threshold_50": 0.2857142451669157,
284
+ "scr_dir1_threshold_100": 0.36480671214696403,
285
+ "scr_metric_threshold_100": 0.36480671214696403,
286
+ "scr_dir2_threshold_100": 0.3714285471001494,
287
+ "scr_dir1_threshold_500": 0.450643781215689,
288
+ "scr_metric_threshold_500": 0.450643781215689,
289
+ "scr_dir2_threshold_500": 0.43333335225543934
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/average_l0_176",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_22_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732146820487,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.23782367701278928,
76
+ "scr_metric_threshold_2": 0.13647513271581077,
77
+ "scr_dir2_threshold_2": 0.12066682797922106,
78
+ "scr_dir1_threshold_5": 0.24179696526567407,
79
+ "scr_metric_threshold_5": 0.2100827980920951,
80
+ "scr_dir2_threshold_5": 0.19152567934752784,
81
+ "scr_dir1_threshold_10": 0.2367459411399631,
82
+ "scr_metric_threshold_10": 0.267597439470097,
83
+ "scr_dir2_threshold_10": 0.24849619008043344,
84
+ "scr_dir1_threshold_20": 0.19729908852531847,
85
+ "scr_metric_threshold_20": 0.302687896337182,
86
+ "scr_dir2_threshold_20": 0.2861438416528567,
87
+ "scr_dir1_threshold_50": 0.16717979296656588,
88
+ "scr_metric_threshold_50": 0.3816382471218651,
89
+ "scr_dir2_threshold_50": 0.34448056340955513,
90
+ "scr_dir1_threshold_100": -0.0071625390427317165,
91
+ "scr_metric_threshold_100": 0.3350628674277345,
92
+ "scr_dir2_threshold_100": 0.30242441025812616,
93
+ "scr_dir1_threshold_500": -0.15837955830059927,
94
+ "scr_metric_threshold_500": 0.3185108076084894,
95
+ "scr_dir2_threshold_500": 0.2599603376784459
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3749997671693945,
102
+ "scr_metric_threshold_2": 0.02955656201598755,
103
+ "scr_dir2_threshold_2": 0.02955656201598755,
104
+ "scr_dir1_threshold_5": 0.3125005820765137,
105
+ "scr_metric_threshold_5": 0.051724056932709886,
106
+ "scr_dir2_threshold_5": 0.051724056932709886,
107
+ "scr_dir1_threshold_10": 0.2968748544808716,
108
+ "scr_metric_threshold_10": 0.06157629320786018,
109
+ "scr_dir2_threshold_10": 0.06157629320786018,
110
+ "scr_dir1_threshold_20": 0.2968748544808716,
111
+ "scr_metric_threshold_20": 0.08620681049100425,
112
+ "scr_dir2_threshold_20": 0.08620681049100425,
113
+ "scr_dir1_threshold_50": 0.250000465661211,
114
+ "scr_metric_threshold_50": 0.10591128304130484,
115
+ "scr_dir2_threshold_50": 0.10591128304130484,
116
+ "scr_dir1_threshold_100": 0.0,
117
+ "scr_metric_threshold_100": 0.1699507454250501,
118
+ "scr_dir2_threshold_100": 0.1699507454250501,
119
+ "scr_dir1_threshold_500": -0.14062502910382568,
120
+ "scr_metric_threshold_500": 0.14285705896602094,
121
+ "scr_dir2_threshold_500": 0.14285705896602094
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.1386138321710429,
126
+ "scr_metric_threshold_2": 0.20797724087560748,
127
+ "scr_dir2_threshold_2": 0.20797724087560748,
128
+ "scr_dir1_threshold_5": 0.18811873007892008,
129
+ "scr_metric_threshold_5": 0.3105413782733095,
130
+ "scr_dir2_threshold_5": 0.3105413782733095,
131
+ "scr_dir1_threshold_10": 0.19801959163150878,
132
+ "scr_metric_threshold_10": 0.3532763789177529,
133
+ "scr_dir2_threshold_10": 0.3532763789177529,
134
+ "scr_dir1_threshold_20": -0.32673256224996294,
135
+ "scr_metric_threshold_20": 0.39601137956219623,
136
+ "scr_dir2_threshold_20": 0.39601137956219623,
137
+ "scr_dir1_threshold_50": -0.4752472559735946,
138
+ "scr_metric_threshold_50": 0.4586894823956583,
139
+ "scr_dir2_threshold_50": 0.4586894823956583,
140
+ "scr_dir1_threshold_100": -0.6732674377500371,
141
+ "scr_metric_threshold_100": 0.25071224152005084,
142
+ "scr_dir2_threshold_100": 0.25071224152005084,
143
+ "scr_dir1_threshold_500": -0.7128708839603919,
144
+ "scr_metric_threshold_500": 0.3190884463277172,
145
+ "scr_dir2_threshold_500": 0.3190884463277172
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4444442341988618,
150
+ "scr_metric_threshold_2": 0.05063300880745302,
151
+ "scr_dir2_threshold_2": 0.05063300880745302,
152
+ "scr_dir1_threshold_5": 0.2380958688319859,
153
+ "scr_metric_threshold_5": 0.06329122328486143,
154
+ "scr_dir2_threshold_5": 0.06329122328486143,
155
+ "scr_dir1_threshold_10": 0.0634921686148548,
156
+ "scr_metric_threshold_10": 0.10886085576265946,
157
+ "scr_dir2_threshold_10": 0.10886085576265946,
158
+ "scr_dir1_threshold_20": 0.11111153160227633,
159
+ "scr_metric_threshold_20": 0.1544304882404575,
160
+ "scr_dir2_threshold_20": 0.1544304882404575,
161
+ "scr_dir1_threshold_50": 0.0634921686148548,
162
+ "scr_metric_threshold_50": 0.26835441853713315,
163
+ "scr_dir2_threshold_50": 0.26835441853713315,
164
+ "scr_dir1_threshold_100": -0.9523806370125785,
165
+ "scr_metric_threshold_100": 0.3088608255830956,
166
+ "scr_dir2_threshold_100": 0.3088608255830956,
167
+ "scr_dir1_threshold_500": -1.5714276253234496,
168
+ "scr_metric_threshold_500": 0.19240513167268272,
169
+ "scr_dir2_threshold_500": 0.19240513167268272
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.13385797946783837,
174
+ "scr_metric_threshold_2": 0.16568045667738668,
175
+ "scr_dir2_threshold_2": 0.16568045667738668,
176
+ "scr_dir1_threshold_5": 0.1574801967047542,
177
+ "scr_metric_threshold_5": 0.23668651783492653,
178
+ "scr_dir2_threshold_5": 0.23668651783492653,
179
+ "scr_dir1_threshold_10": 0.14173189543747472,
180
+ "scr_metric_threshold_10": 0.3106509444426516,
181
+ "scr_dir2_threshold_10": 0.3106509444426516,
182
+ "scr_dir1_threshold_20": 0.16535411267439054,
183
+ "scr_metric_threshold_20": 0.2544379449517599,
184
+ "scr_dir2_threshold_20": 0.2544379449517599,
185
+ "scr_dir1_threshold_50": 0.04724396514582491,
186
+ "scr_metric_threshold_50": 0.33136091335477336,
187
+ "scr_dir2_threshold_50": 0.33136091335477336,
188
+ "scr_dir1_threshold_100": 0.3307086946767879,
189
+ "scr_metric_threshold_100": 0.28698225739013833,
190
+ "scr_dir2_threshold_100": 0.28698225739013833,
191
+ "scr_dir1_threshold_500": -0.18897679923931315,
192
+ "scr_metric_threshold_500": 0.14201194596997654,
193
+ "scr_dir2_threshold_500": 0.14201194596997654
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.05464474822904205,
198
+ "scr_metric_threshold_2": 0.10546877182786926,
199
+ "scr_dir2_threshold_2": 0.10546877182786926,
200
+ "scr_dir1_threshold_5": 0.05464474822904205,
201
+ "scr_metric_threshold_5": 0.3164063154836078,
202
+ "scr_dir2_threshold_5": 0.3164063154836078,
203
+ "scr_dir1_threshold_10": 0.03278678379574697,
204
+ "scr_metric_threshold_10": 0.40625005820765137,
205
+ "scr_dir2_threshold_10": 0.40625005820765137,
206
+ "scr_dir1_threshold_20": 0.06010915791026799,
207
+ "scr_metric_threshold_20": 0.5195312281721307,
208
+ "scr_dir2_threshold_20": 0.5195312281721307,
209
+ "scr_dir1_threshold_50": 0.05464474822904205,
210
+ "scr_metric_threshold_50": 0.6484374272404357,
211
+ "scr_dir2_threshold_50": 0.6484374272404357,
212
+ "scr_dir1_threshold_100": 0.016393554752069144,
213
+ "scr_metric_threshold_100": 0.6796874854480871,
214
+ "scr_dir2_threshold_100": 0.6796874854480871,
215
+ "scr_dir1_threshold_500": -0.06010915791026799,
216
+ "scr_metric_threshold_500": 0.6679686554125666,
217
+ "scr_dir2_threshold_500": 0.6679686554125666
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.24615374269804866,
222
+ "scr_metric_threshold_2": 0.10483879108333834,
223
+ "scr_dir2_threshold_2": 0.10483879108333834,
224
+ "scr_dir1_threshold_5": 0.32307674908308187,
225
+ "scr_metric_threshold_5": 0.11693564862104187,
226
+ "scr_dir2_threshold_5": 0.11693564862104187,
227
+ "scr_dir1_threshold_10": 0.35384589050412385,
228
+ "scr_metric_threshold_10": 0.1572581866250075,
229
+ "scr_dir2_threshold_10": 0.1572581866250075,
230
+ "scr_dir1_threshold_20": 0.4153844790110642,
231
+ "scr_metric_threshold_20": 0.19758072462897314,
232
+ "scr_dir2_threshold_20": 0.19758072462897314,
233
+ "scr_dir1_threshold_50": 0.4153844790110642,
234
+ "scr_metric_threshold_50": 0.26209673736706124,
235
+ "scr_dir2_threshold_50": 0.26209673736706124,
236
+ "scr_dir1_threshold_100": 0.4307692025540133,
237
+ "scr_metric_threshold_100": 0.33870960764285285,
238
+ "scr_dir2_threshold_100": 0.33870960764285285,
239
+ "scr_dir1_threshold_500": 0.48205120681070207,
240
+ "scr_metric_threshold_500": 0.41935492399206875,
241
+ "scr_dir2_threshold_500": 0.41935492399206875
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.31674202653080763,
246
+ "scr_metric_threshold_2": 0.23451314480156532,
247
+ "scr_dir2_threshold_2": 0.23451314480156532,
248
+ "scr_dir1_threshold_5": 0.40723994104264966,
249
+ "scr_metric_threshold_5": 0.33185833822785904,
250
+ "scr_dir2_threshold_5": 0.33185833822785904,
251
+ "scr_dir1_threshold_10": 0.5067872424492922,
252
+ "scr_metric_threshold_10": 0.4424776963913609,
253
+ "scr_dir2_threshold_10": 0.4424776963913609,
254
+ "scr_dir1_threshold_20": 0.542986408254029,
255
+ "scr_metric_threshold_20": 0.49999986813132324,
256
+ "scr_dir2_threshold_20": 0.49999986813132324,
257
+ "scr_dir1_threshold_50": 0.5656108194559256,
258
+ "scr_metric_threshold_50": 0.561946761450355,
259
+ "scr_dir2_threshold_50": 0.561946761450355,
260
+ "scr_dir1_threshold_100": 0.3484163640360163,
261
+ "scr_metric_threshold_100": 0.2035398300107262,
262
+ "scr_dir2_threshold_100": 0.2035398300107262,
263
+ "scr_dir1_threshold_500": 0.375565603537441,
264
+ "scr_metric_threshold_500": 0.11504407974257125,
265
+ "scr_dir2_threshold_500": 0.11504407974257125
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.19313308563727843,
270
+ "scr_metric_threshold_2": 0.19313308563727843,
271
+ "scr_dir2_threshold_2": 0.06666664774456064,
272
+ "scr_dir1_threshold_5": 0.2532189060784448,
273
+ "scr_metric_threshold_5": 0.2532189060784448,
274
+ "scr_dir2_threshold_5": 0.10476195612190681,
275
+ "scr_dir1_threshold_10": 0.30042910220583197,
276
+ "scr_metric_threshold_10": 0.30042910220583197,
277
+ "scr_dir2_threshold_10": 0.14761910708852366,
278
+ "scr_dir1_threshold_20": 0.3133047265196112,
279
+ "scr_metric_threshold_20": 0.3133047265196112,
280
+ "scr_dir2_threshold_20": 0.18095228904500887,
281
+ "scr_dir1_threshold_50": 0.41630895358819897,
282
+ "scr_metric_threshold_50": 0.41630895358819897,
283
+ "scr_dir2_threshold_50": 0.11904748388971893,
284
+ "scr_dir1_threshold_100": 0.4420599464018754,
285
+ "scr_metric_threshold_100": 0.4420599464018754,
286
+ "scr_dir2_threshold_100": 0.18095228904500887,
287
+ "scr_dir1_threshold_500": 0.5493562187843111,
288
+ "scr_metric_threshold_500": 0.5493562187843111,
289
+ "scr_dir2_threshold_500": 0.08095245934396302
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/average_l0_22",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_41_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732147237088,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.2530565871084578,
76
+ "scr_metric_threshold_2": 0.14727577546704618,
77
+ "scr_dir2_threshold_2": 0.13909571155792852,
78
+ "scr_dir1_threshold_5": 0.2945412885257296,
79
+ "scr_metric_threshold_5": 0.21888442787705337,
80
+ "scr_dir2_threshold_5": 0.20395748318955104,
81
+ "scr_dir1_threshold_10": 0.2710701286341929,
82
+ "scr_metric_threshold_10": 0.28102383769439593,
83
+ "scr_dir2_threshold_10": 0.268293887435439,
84
+ "scr_dir1_threshold_20": 0.25448353382146016,
85
+ "scr_metric_threshold_20": 0.34337309457554227,
86
+ "scr_dir2_threshold_20": 0.32444042616003227,
87
+ "scr_dir1_threshold_50": 0.1282095560144154,
88
+ "scr_metric_threshold_50": 0.3888649100162823,
89
+ "scr_dir2_threshold_50": 0.37934620887816967,
90
+ "scr_dir1_threshold_100": 0.14850694552323107,
91
+ "scr_metric_threshold_100": 0.35903131578661956,
92
+ "scr_dir2_threshold_100": 0.347476547738728,
93
+ "scr_dir1_threshold_500": -0.1558261102394828,
94
+ "scr_metric_threshold_500": 0.30739649232984406,
95
+ "scr_dir2_threshold_500": 0.3036819923067535
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4062502910382569,
102
+ "scr_metric_threshold_2": 0.014778281007993775,
103
+ "scr_dir2_threshold_2": 0.014778281007993775,
104
+ "scr_dir1_threshold_5": 0.42187508731147705,
105
+ "scr_metric_threshold_5": 0.022167494916722333,
106
+ "scr_dir2_threshold_5": 0.022167494916722333,
107
+ "scr_dir1_threshold_10": 0.42187508731147705,
108
+ "scr_metric_threshold_10": 0.044334989833444666,
109
+ "scr_dir2_threshold_10": 0.044334989833444666,
110
+ "scr_dir1_threshold_20": 0.4062502910382569,
111
+ "scr_metric_threshold_20": 0.06403931557428191,
112
+ "scr_dir2_threshold_20": 0.06403931557428191,
113
+ "scr_dir1_threshold_50": 0.3749997671693945,
114
+ "scr_metric_threshold_50": 0.11822654168287688,
115
+ "scr_dir2_threshold_50": 0.11822654168287688,
116
+ "scr_dir1_threshold_100": 0.42187508731147705,
117
+ "scr_metric_threshold_100": 0.1403940365995992,
118
+ "scr_dir2_threshold_100": 0.1403940365995992,
119
+ "scr_dir1_threshold_500": 0.14062502910382568,
120
+ "scr_metric_threshold_500": 0.0566502484750167,
121
+ "scr_dir2_threshold_500": 0.0566502484750167
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.18811873007892008,
126
+ "scr_metric_threshold_2": 0.2364672413052364,
127
+ "scr_dir2_threshold_2": 0.2364672413052364,
128
+ "scr_dir1_threshold_5": 0.19801959163150878,
129
+ "scr_metric_threshold_5": 0.2820513778436806,
130
+ "scr_dir2_threshold_5": 0.2820513778436806,
131
+ "scr_dir1_threshold_10": 0.17821786852633137,
132
+ "scr_metric_threshold_10": 0.3475784467573461,
133
+ "scr_dir2_threshold_10": 0.3475784467573461,
134
+ "scr_dir1_threshold_20": 0.0594057594604659,
135
+ "scr_metric_threshold_20": 0.4045584476166039,
136
+ "scr_dir2_threshold_20": 0.4045584476166039,
137
+ "scr_dir1_threshold_50": -0.5445544671315828,
138
+ "scr_metric_threshold_50": 0.48433051674508387,
139
+ "scr_dir2_threshold_50": 0.48433051674508387,
140
+ "scr_dir1_threshold_100": -0.7722772335657915,
141
+ "scr_metric_threshold_100": 0.13390327372133182,
142
+ "scr_dir2_threshold_100": 0.13390327372133182,
143
+ "scr_dir1_threshold_500": -1.0990097958157543,
144
+ "scr_metric_threshold_500": 0.18803430850038624,
145
+ "scr_dir2_threshold_500": 0.18803430850038624
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.055696234239288635,
151
+ "scr_dir2_threshold_2": 0.055696234239288635,
152
+ "scr_dir1_threshold_5": 0.5873013770560047,
153
+ "scr_metric_threshold_5": 0.10126586671708666,
154
+ "scr_dir2_threshold_5": 0.10126586671708666,
155
+ "scr_dir1_threshold_10": 0.2857142857142857,
156
+ "scr_metric_threshold_10": 0.1620253263882109,
157
+ "scr_dir2_threshold_10": 0.1620253263882109,
158
+ "scr_dir1_threshold_20": 0.33333364870170723,
159
+ "scr_metric_threshold_20": 0.21518994791158172,
160
+ "scr_dir2_threshold_20": 0.21518994791158172,
161
+ "scr_dir1_threshold_50": -0.03174561125486653,
162
+ "scr_metric_threshold_50": 0.29367099838976934,
163
+ "scr_dir2_threshold_50": 0.29367099838976934,
164
+ "scr_dir1_threshold_100": -0.2698405339817307,
165
+ "scr_metric_threshold_100": 0.3341772545379124,
166
+ "scr_dir2_threshold_100": 0.3341772545379124,
167
+ "scr_dir1_threshold_500": -1.5238092084411499,
168
+ "scr_metric_threshold_500": 0.08607603952376044,
169
+ "scr_dir2_threshold_500": 0.08607603952376044
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.19685024588094274,
174
+ "scr_metric_threshold_2": 0.1301776024437199,
175
+ "scr_dir2_threshold_2": 0.1301776024437199,
176
+ "scr_dir1_threshold_5": -0.007874385297643128,
177
+ "scr_metric_threshold_5": 0.24260360142550327,
178
+ "scr_dir2_threshold_5": 0.24260360142550327,
179
+ "scr_dir1_threshold_10": -0.015748301267279483,
180
+ "scr_metric_threshold_10": 0.3106509444426516,
181
+ "scr_dir2_threshold_10": 0.3106509444426516,
182
+ "scr_dir1_threshold_20": -0.07086618238274074,
183
+ "scr_metric_threshold_20": 0.3994082563719217,
184
+ "scr_dir2_threshold_20": 0.3994082563719217,
185
+ "scr_dir1_threshold_50": -0.031496133206552195,
186
+ "scr_metric_threshold_50": 0.357988142202575,
187
+ "scr_dir2_threshold_50": 0.357988142202575,
188
+ "scr_dir1_threshold_100": 0.35433044258569696,
189
+ "scr_metric_threshold_100": 0.12721906064843153,
190
+ "scr_dir2_threshold_100": 0.12721906064843153,
191
+ "scr_dir1_threshold_500": 0.1102362315589293,
192
+ "scr_metric_threshold_500": 0.1301776024437199,
193
+ "scr_dir2_threshold_500": 0.1301776024437199
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.06557389329988525,
198
+ "scr_metric_threshold_2": 0.16796888824317202,
199
+ "scr_dir2_threshold_2": 0.16796888824317202,
200
+ "scr_dir1_threshold_5": 0.08743153202478901,
201
+ "scr_metric_threshold_5": 0.29687508731147705,
202
+ "scr_dir2_threshold_5": 0.29687508731147705,
203
+ "scr_dir1_threshold_10": 0.1092894964580841,
204
+ "scr_metric_threshold_10": 0.42187508731147705,
205
+ "scr_dir2_threshold_10": 0.42187508731147705,
206
+ "scr_dir1_threshold_20": 0.1092894964580841,
207
+ "scr_metric_threshold_20": 0.5351562572759564,
208
+ "scr_dir2_threshold_20": 0.5351562572759564,
209
+ "scr_dir1_threshold_50": 0.03278678379574697,
210
+ "scr_metric_threshold_50": 0.6562500582076514,
211
+ "scr_dir2_threshold_50": 0.6562500582076514,
212
+ "scr_dir1_threshold_100": 0.00546440968122594,
213
+ "scr_metric_threshold_100": 0.6914063154836078,
214
+ "scr_dir2_threshold_100": 0.6914063154836078,
215
+ "scr_dir1_threshold_500": -0.06010915791026799,
216
+ "scr_metric_threshold_500": 0.6796874854480871,
217
+ "scr_dir2_threshold_500": 0.6796874854480871
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.2102561563194528,
222
+ "scr_metric_threshold_2": 0.11290320254761761,
223
+ "scr_dir2_threshold_2": 0.11290320254761761,
224
+ "scr_dir1_threshold_5": 0.36410247475437535,
225
+ "scr_metric_threshold_5": 0.1370969176230247,
226
+ "scr_dir2_threshold_5": 0.1370969176230247,
227
+ "scr_dir1_threshold_10": 0.3948716161754174,
228
+ "scr_metric_threshold_10": 0.1653225980892868,
229
+ "scr_dir2_threshold_10": 0.1653225980892868,
230
+ "scr_dir1_threshold_20": 0.3128204704976866,
231
+ "scr_metric_threshold_20": 0.20161293036111277,
232
+ "scr_dir2_threshold_20": 0.20161293036111277,
233
+ "scr_dir1_threshold_50": 0.3282048883757795,
234
+ "scr_metric_threshold_50": 0.2983870696388872,
235
+ "scr_dir2_threshold_50": 0.2983870696388872,
236
+ "scr_dir1_threshold_100": 0.44615362043210616,
237
+ "scr_metric_threshold_100": 0.3991936549900859,
238
+ "scr_dir2_threshold_100": 0.3991936549900859,
239
+ "scr_dir1_threshold_500": 0.44615362043210616,
240
+ "scr_metric_threshold_500": 0.48790314246229644,
241
+ "scr_dir2_threshold_500": 0.48790314246229644
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.2443439646255898,
246
+ "scr_metric_threshold_2": 0.3185839097532974,
247
+ "scr_dir2_threshold_2": 0.3185839097532974,
248
+ "scr_dir1_threshold_5": 0.4479639351469146,
249
+ "scr_metric_threshold_5": 0.4115043816005218,
250
+ "scr_dir2_threshold_5": 0.4115043816005218,
251
+ "scr_dir1_threshold_10": 0.5067872424492922,
252
+ "scr_metric_threshold_10": 0.5088495750268155,
253
+ "scr_dir2_threshold_10": 0.5088495750268155,
254
+ "scr_dir1_threshold_20": 0.5294116536511888,
255
+ "scr_metric_threshold_20": 0.5707964683458472,
256
+ "scr_dir2_threshold_20": 0.5707964683458472,
257
+ "scr_dir1_threshold_50": 0.5927603286616061,
258
+ "scr_metric_threshold_50": 0.597345061557617,
259
+ "scr_dir2_threshold_50": 0.597345061557617,
260
+ "scr_dir1_threshold_100": 0.6289592247620871,
261
+ "scr_metric_threshold_100": 0.6725663833512103,
262
+ "scr_dir2_threshold_100": 0.6725663833512103,
263
+ "scr_dir1_threshold_500": 0.2714932041270145,
264
+ "scr_metric_threshold_500": 0.36283191675605164,
265
+ "scr_dir2_threshold_500": 0.36283191675605164
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.1416308441960435,
270
+ "scr_metric_threshold_2": 0.1416308441960435,
271
+ "scr_dir2_threshold_2": 0.07619033292310208,
272
+ "scr_dir1_threshold_5": 0.25751069557841055,
273
+ "scr_metric_threshold_5": 0.25751069557841055,
274
+ "scr_dir2_threshold_5": 0.13809513807839202,
275
+ "scr_dir1_threshold_10": 0.2875537337059348,
276
+ "scr_metric_threshold_10": 0.2875537337059348,
277
+ "scr_dir2_threshold_10": 0.18571413163427958,
278
+ "scr_dir1_threshold_20": 0.35622313314703263,
279
+ "scr_metric_threshold_20": 0.35622313314703263,
280
+ "scr_dir2_threshold_20": 0.20476178582295265,
281
+ "scr_dir1_threshold_50": 0.30472089170579764,
282
+ "scr_metric_threshold_50": 0.30472089170579764,
283
+ "scr_dir2_threshold_50": 0.22857128260089646,
284
+ "scr_dir1_threshold_100": 0.3733905469607776,
285
+ "scr_metric_threshold_100": 0.3733905469607776,
286
+ "scr_dir2_threshold_100": 0.280952402577645,
287
+ "scr_dir1_threshold_500": 0.46781119502943397,
288
+ "scr_metric_threshold_500": 0.46781119502943397,
289
+ "scr_dir2_threshold_500": 0.43809519484471005
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/average_l0_41",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_445_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732147663090,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.3201838705748857,
76
+ "scr_metric_threshold_2": 0.17065033800250384,
77
+ "scr_dir2_threshold_2": 0.1538942561208495,
78
+ "scr_dir1_threshold_5": 0.32446017406782196,
79
+ "scr_metric_threshold_5": 0.24184525836060444,
80
+ "scr_dir2_threshold_5": 0.22894672712749398,
81
+ "scr_dir1_threshold_10": 0.21231734827310839,
82
+ "scr_metric_threshold_10": 0.2965076134387769,
83
+ "scr_dir2_threshold_10": 0.29897542367754976,
84
+ "scr_dir1_threshold_20": 0.28421142973722197,
85
+ "scr_metric_threshold_20": 0.3531390786590271,
86
+ "scr_dir2_threshold_20": 0.35820756180304897,
87
+ "scr_dir1_threshold_50": 0.3196611049548321,
88
+ "scr_metric_threshold_50": 0.37776587349204155,
89
+ "scr_dir2_threshold_50": 0.33140115572017464,
90
+ "scr_dir1_threshold_100": 0.23137037472317903,
91
+ "scr_metric_threshold_100": 0.4033984232667557,
92
+ "scr_dir2_threshold_100": 0.3598183052662277,
93
+ "scr_dir1_threshold_500": 0.06981882940685671,
94
+ "scr_metric_threshold_500": 0.385184134641965,
95
+ "scr_dir2_threshold_500": 0.36884701317548013
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5468753201420825,
102
+ "scr_metric_threshold_2": 0.03201958438240929,
103
+ "scr_dir2_threshold_2": 0.03201958438240929,
104
+ "scr_dir1_threshold_5": 0.5468753201420825,
105
+ "scr_metric_threshold_5": 0.049261034566288144,
106
+ "scr_dir2_threshold_5": 0.049261034566288144,
107
+ "scr_dir1_threshold_10": 0.5937506402841651,
108
+ "scr_metric_threshold_10": 0.07389155184943222,
109
+ "scr_dir2_threshold_10": 0.07389155184943222,
110
+ "scr_dir1_threshold_20": 0.3749997671693945,
111
+ "scr_metric_threshold_20": 0.12807877795802716,
112
+ "scr_dir2_threshold_20": 0.12807877795802716,
113
+ "scr_dir1_threshold_50": 0.39062549476503666,
114
+ "scr_metric_threshold_50": 0.16502455388274329,
115
+ "scr_dir2_threshold_50": 0.16502455388274329,
116
+ "scr_dir1_threshold_100": 0.4687504074535596,
117
+ "scr_metric_threshold_100": 0.2709358369240481,
118
+ "scr_dir2_threshold_100": 0.2709358369240481,
119
+ "scr_dir1_threshold_500": 0.2968748544808716,
120
+ "scr_metric_threshold_500": 0.22413782472418173,
121
+ "scr_dir2_threshold_500": 0.22413782472418173
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.31683170069737426,
126
+ "scr_metric_threshold_2": 0.17663827436577517,
127
+ "scr_dir2_threshold_2": 0.17663827436577517,
128
+ "scr_dir1_threshold_5": 0.29702997759219685,
129
+ "scr_metric_threshold_5": 0.2108262069558109,
130
+ "scr_dir2_threshold_5": 0.2108262069558109,
131
+ "scr_dir1_threshold_10": 0.2772276643420858,
132
+ "scr_metric_threshold_10": 0.26780637762886617,
133
+ "scr_dir2_threshold_10": 0.26780637762886617,
134
+ "scr_dir1_threshold_20": 0.39603977340795127,
135
+ "scr_metric_threshold_20": 0.23931637719923726,
136
+ "scr_dir2_threshold_20": 0.23931637719923726,
137
+ "scr_dir1_threshold_50": 0.4455446713158285,
138
+ "scr_metric_threshold_50": 0.10541310347790545,
139
+ "scr_dir2_threshold_50": 0.10541310347790545,
140
+ "scr_dir1_threshold_100": 0.5742576419342826,
141
+ "scr_metric_threshold_100": 0.051282068698851026,
142
+ "scr_dir2_threshold_100": 0.051282068698851026,
143
+ "scr_dir1_threshold_500": -0.6534651244999259,
144
+ "scr_metric_threshold_500": 0.6809117234860803,
145
+ "scr_dir2_threshold_500": 0.6809117234860803
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5396829601737049,
150
+ "scr_metric_threshold_2": 0.05063300880745302,
151
+ "scr_dir2_threshold_2": 0.05063300880745302,
152
+ "scr_dir1_threshold_5": 0.5396829601737049,
153
+ "scr_metric_threshold_5": 0.09367087767151386,
154
+ "scr_dir2_threshold_5": 0.09367087767151386,
155
+ "scr_dir1_threshold_10": 0.6031751287885597,
156
+ "scr_metric_threshold_10": 0.1544304882404575,
157
+ "scr_dir2_threshold_10": 0.1544304882404575,
158
+ "scr_dir1_threshold_20": 0.1269843372297096,
159
+ "scr_metric_threshold_20": 0.16708870271786588,
160
+ "scr_dir2_threshold_20": 0.16708870271786588,
161
+ "scr_dir1_threshold_50": 0.30158709134171896,
162
+ "scr_metric_threshold_50": 0.1696203154337837,
163
+ "scr_dir2_threshold_50": 0.1696203154337837,
164
+ "scr_dir1_threshold_100": 0.0,
165
+ "scr_metric_threshold_100": 0.26075958038937974,
166
+ "scr_dir2_threshold_100": 0.26075958038937974,
167
+ "scr_dir1_threshold_500": -0.34920550822401875,
168
+ "scr_metric_threshold_500": -0.015189827193326212,
169
+ "scr_dir2_threshold_500": -0.015189827193326212
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3622048278833401,
174
+ "scr_metric_threshold_2": 0.06508880122185995,
175
+ "scr_dir2_threshold_2": 0.06508880122185995,
176
+ "scr_dir1_threshold_5": 0.4881888913815421,
177
+ "scr_metric_threshold_5": 0.1834320601393232,
178
+ "scr_dir2_threshold_5": 0.1834320601393232,
179
+ "scr_dir1_threshold_10": -0.8897642377690774,
180
+ "scr_metric_threshold_10": 0.24260360142550327,
181
+ "scr_dir2_threshold_10": 0.24260360142550327,
182
+ "scr_dir1_threshold_20": -0.2283468484155017,
183
+ "scr_metric_threshold_20": 0.2899407991854267,
184
+ "scr_dir2_threshold_20": 0.2899407991854267,
185
+ "scr_dir1_threshold_50": -0.1102362315589293,
186
+ "scr_metric_threshold_50": 0.4201184016291466,
187
+ "scr_dir2_threshold_50": 0.4201184016291466,
188
+ "scr_dir1_threshold_100": -0.6850396065904916,
189
+ "scr_metric_threshold_100": 0.4881657446462949,
190
+ "scr_dir2_threshold_100": 0.4881657446462949,
191
+ "scr_dir1_threshold_500": 0.2047241618505791,
192
+ "scr_metric_threshold_500": 0.13313614423900827,
193
+ "scr_dir2_threshold_500": 0.13313614423900827
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.021857964433295084,
198
+ "scr_metric_threshold_2": 0.4609375436557385,
199
+ "scr_dir2_threshold_2": 0.4609375436557385,
200
+ "scr_dir1_threshold_5": 0.021857964433295084,
201
+ "scr_metric_threshold_5": 0.5976563736912591,
202
+ "scr_dir2_threshold_5": 0.5976563736912591,
203
+ "scr_dir1_threshold_10": 0.27868847653482753,
204
+ "scr_metric_threshold_10": 0.6406250291038257,
205
+ "scr_dir2_threshold_10": 0.6406250291038257,
206
+ "scr_dir1_threshold_20": 0.442623046930345,
207
+ "scr_metric_threshold_20": 0.6679686554125666,
208
+ "scr_dir2_threshold_20": 0.6679686554125666,
209
+ "scr_dir1_threshold_50": 0.03825119347697291,
210
+ "scr_metric_threshold_50": 0.5976563736912591,
211
+ "scr_dir2_threshold_50": 0.5976563736912591,
212
+ "scr_dir1_threshold_100": 0.04918033854781611,
213
+ "scr_metric_threshold_100": 0.496093800931695,
214
+ "scr_dir2_threshold_100": 0.496093800931695,
215
+ "scr_dir1_threshold_500": 0.35519118919716464,
216
+ "scr_metric_threshold_500": 0.6406250291038257,
217
+ "scr_dir2_threshold_500": 0.6406250291038257
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.27179474482639304,
222
+ "scr_metric_threshold_2": 0.08064531634921589,
223
+ "scr_dir2_threshold_2": 0.08064531634921589,
224
+ "scr_dir1_threshold_5": 0.2666666055336954,
225
+ "scr_metric_threshold_5": 0.07258066454365199,
226
+ "scr_dir2_threshold_5": 0.07258066454365199,
227
+ "scr_dir1_threshold_10": 0.2615384662409978,
228
+ "scr_metric_threshold_10": 0.29032265817460795,
229
+ "scr_dir2_threshold_10": 0.29032265817460795,
230
+ "scr_dir1_threshold_20": 0.5025640696463488,
231
+ "scr_metric_threshold_20": 0.4919355885357207,
232
+ "scr_dir2_threshold_20": 0.4919355885357207,
233
+ "scr_dir1_threshold_50": 0.5897433546167773,
234
+ "scr_metric_threshold_50": 0.6048387910833384,
235
+ "scr_dir2_threshold_50": 0.6048387910833384,
236
+ "scr_dir1_threshold_100": 0.6410256645383223,
237
+ "scr_metric_threshold_100": 0.5080646518055639,
238
+ "scr_dir2_threshold_100": 0.5080646518055639,
239
+ "scr_dir1_threshold_500": 0.4051282004256689,
240
+ "scr_metric_threshold_500": 0.6129032025476177,
241
+ "scr_dir2_threshold_500": 0.6129032025476177
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.33484160943317604,
246
+ "scr_metric_threshold_2": 0.33185833822785904,
247
+ "scr_dir2_threshold_2": 0.33185833822785904,
248
+ "scr_dir1_threshold_5": 0.20361997052132483,
249
+ "scr_metric_threshold_5": 0.4955751465522539,
250
+ "scr_dir2_threshold_5": 0.4955751465522539,
251
+ "scr_dir1_threshold_10": 0.3936651864398094,
252
+ "scr_metric_threshold_10": 0.5221237397640236,
253
+ "scr_dir2_threshold_10": 0.5221237397640236,
254
+ "scr_dir1_threshold_20": 0.375565603537441,
255
+ "scr_metric_threshold_20": 0.5575220398712856,
256
+ "scr_dir2_threshold_20": 0.5575220398712856,
257
+ "scr_dir1_threshold_50": 0.5927603286616061,
258
+ "scr_metric_threshold_50": 0.65044251171851,
259
+ "scr_dir2_threshold_50": 0.65044251171851,
260
+ "scr_dir1_threshold_100": 0.42081442594123414,
261
+ "scr_metric_threshold_100": 0.7699115767775041,
262
+ "scr_dir2_threshold_100": 0.7699115767775041,
263
+ "scr_dir1_threshold_500": 0.14479639351469145,
264
+ "scr_metric_threshold_500": 0.65044251171851,
265
+ "scr_dir2_threshold_500": 0.65044251171851
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.1673818370097199,
270
+ "scr_metric_threshold_2": 0.1673818370097199,
271
+ "scr_dir2_threshold_2": 0.033333181956485214,
272
+ "scr_dir1_threshold_5": 0.23175970276473412,
273
+ "scr_metric_threshold_5": 0.23175970276473412,
274
+ "scr_dir2_threshold_5": 0.12857145289985059,
275
+ "scr_dir1_threshold_10": 0.18025746132349915,
276
+ "scr_metric_threshold_10": 0.18025746132349915,
277
+ "scr_dir2_threshold_10": 0.19999994323368195,
278
+ "scr_dir1_threshold_20": 0.283261688392087,
279
+ "scr_metric_threshold_20": 0.283261688392087,
280
+ "scr_dir2_threshold_20": 0.32380955354426183,
281
+ "scr_dir1_threshold_50": 0.3090129370196455,
282
+ "scr_metric_threshold_50": 0.3090129370196455,
283
+ "scr_dir2_threshold_50": -0.06190480515528994,
284
+ "scr_dir1_threshold_100": 0.381974125960709,
285
+ "scr_metric_threshold_100": 0.381974125960709,
286
+ "scr_dir2_threshold_100": 0.033333181956485214,
287
+ "scr_dir1_threshold_500": 0.15450646850982275,
288
+ "scr_metric_threshold_500": 0.15450646850982275,
289
+ "scr_dir2_threshold_500": 0.02380949677794379
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/average_l0_445",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_16k_average_l0_82_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732148064391,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.29538624167885136,
76
+ "scr_metric_threshold_2": 0.13466911203574558,
77
+ "scr_dir2_threshold_2": 0.12493838720251739,
78
+ "scr_dir1_threshold_5": 0.33162325188553093,
79
+ "scr_metric_threshold_5": 0.21467081912678176,
80
+ "scr_dir2_threshold_5": 0.20058179176740487,
81
+ "scr_dir1_threshold_10": 0.31609306995184094,
82
+ "scr_metric_threshold_10": 0.2892841392955365,
83
+ "scr_dir2_threshold_10": 0.27756841175214314,
84
+ "scr_dir1_threshold_20": 0.3097389277918645,
85
+ "scr_metric_threshold_20": 0.37036066216186037,
86
+ "scr_dir2_threshold_20": 0.35530089028683065,
87
+ "scr_dir1_threshold_50": 0.2547891291187052,
88
+ "scr_metric_threshold_50": 0.4098353387871266,
89
+ "scr_dir2_threshold_50": 0.3927982211595323,
90
+ "scr_dir1_threshold_100": 0.21102541578113232,
91
+ "scr_metric_threshold_100": 0.3290507056464713,
92
+ "scr_dir2_threshold_100": 0.3189801849289266,
93
+ "scr_dir1_threshold_500": -0.01311671941284441,
94
+ "scr_metric_threshold_500": 0.3406873895704157,
95
+ "scr_dir2_threshold_500": 0.32280469765709546
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.43749988358469727,
102
+ "scr_metric_threshold_2": 0.019704325740837254,
103
+ "scr_dir2_threshold_2": 0.019704325740837254,
104
+ "scr_dir1_threshold_5": 0.5,
105
+ "scr_metric_threshold_5": 0.03694577592471611,
106
+ "scr_dir2_threshold_5": 0.03694577592471611,
107
+ "scr_dir1_threshold_10": 0.4531256111803394,
108
+ "scr_metric_threshold_10": 0.051724056932709886,
109
+ "scr_dir2_threshold_10": 0.051724056932709886,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.08374378812458251,
112
+ "scr_dir2_threshold_20": 0.08374378812458251,
113
+ "scr_dir1_threshold_50": 0.43749988358469727,
114
+ "scr_metric_threshold_50": 0.1502462728747495,
115
+ "scr_dir2_threshold_50": 0.1502462728747495,
116
+ "scr_dir1_threshold_100": 0.39062549476503666,
117
+ "scr_metric_threshold_100": 0.17733981252431533,
118
+ "scr_dir2_threshold_100": 0.17733981252431533,
119
+ "scr_dir1_threshold_500": -0.6874994179234862,
120
+ "scr_metric_threshold_500": 0.36945805286608774,
121
+ "scr_dir2_threshold_500": 0.36945805286608774
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.21782190488161987,
126
+ "scr_metric_threshold_2": 0.21652430893001515,
127
+ "scr_dir2_threshold_2": 0.21652430893001515,
128
+ "scr_dir1_threshold_5": 0.2376236279867973,
129
+ "scr_metric_threshold_5": 0.3105413782733095,
130
+ "scr_dir2_threshold_5": 0.3105413782733095,
131
+ "scr_dir1_threshold_10": 0.26732680278949705,
132
+ "scr_metric_threshold_10": 0.34188034478314183,
133
+ "scr_dir2_threshold_10": 0.34188034478314183,
134
+ "scr_dir1_threshold_20": 0.20792104332903116,
135
+ "scr_metric_threshold_20": 0.41310551567101156,
136
+ "scr_dir2_threshold_20": 0.41310551567101156,
137
+ "scr_dir1_threshold_50": -0.6930691608552144,
138
+ "scr_metric_threshold_50": 0.48433051674508387,
139
+ "scr_dir2_threshold_50": 0.48433051674508387,
140
+ "scr_dir1_threshold_100": -0.6930691608552144,
141
+ "scr_metric_threshold_100": 0.11396017153231311,
142
+ "scr_dir2_threshold_100": 0.11396017153231311,
143
+ "scr_dir1_threshold_500": -0.8316829930262574,
144
+ "scr_metric_threshold_500": 0.051282068698851026,
145
+ "scr_dir2_threshold_500": 0.051282068698851026
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.037974794330044616,
151
+ "scr_dir2_threshold_2": 0.037974794330044616,
152
+ "scr_dir1_threshold_5": 0.5714285714285714,
153
+ "scr_metric_threshold_5": 0.09873425400116885,
154
+ "scr_dir2_threshold_5": 0.09873425400116885,
155
+ "scr_dir1_threshold_10": 0.5238092084411499,
156
+ "scr_metric_threshold_10": 0.13670889743339407,
157
+ "scr_dir2_threshold_10": 0.13670889743339407,
158
+ "scr_dir1_threshold_20": 0.5238092084411499,
159
+ "scr_metric_threshold_20": 0.22531654967307232,
160
+ "scr_dir2_threshold_20": 0.22531654967307232,
161
+ "scr_dir1_threshold_50": 0.4444442341988618,
162
+ "scr_metric_threshold_50": 0.23037977510490792,
163
+ "scr_dir2_threshold_50": 0.23037977510490792,
164
+ "scr_dir1_threshold_100": 0.09523872597484306,
165
+ "scr_metric_threshold_100": -0.007594838147753411,
166
+ "scr_dir2_threshold_100": -0.007594838147753411,
167
+ "scr_dir1_threshold_500": -0.25396772835429743,
168
+ "scr_metric_threshold_500": 0.025316579852636207,
169
+ "scr_dir2_threshold_500": 0.025316579852636207
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.24409421102676765,
174
+ "scr_metric_threshold_2": 0.15680483129152156,
175
+ "scr_dir2_threshold_2": 0.15680483129152156,
176
+ "scr_dir1_threshold_5": 0.17322802864402692,
177
+ "scr_metric_threshold_5": 0.233727799694535,
178
+ "scr_dir2_threshold_5": 0.233727799694535,
179
+ "scr_dir1_threshold_10": -0.007874385297643128,
180
+ "scr_metric_threshold_10": 0.3017751427116833,
181
+ "scr_dir2_threshold_10": 0.3017751427116833,
182
+ "scr_dir1_threshold_20": -0.08661448365002022,
183
+ "scr_metric_threshold_20": 0.43786982874598,
184
+ "scr_dir2_threshold_20": 0.43786982874598,
185
+ "scr_dir1_threshold_50": 0.32283430937914476,
186
+ "scr_metric_threshold_50": 0.44970417227223664,
187
+ "scr_dir2_threshold_50": 0.44970417227223664,
188
+ "scr_dir1_threshold_100": 0.37007874385297646,
189
+ "scr_metric_threshold_100": 0.16568045667738668,
190
+ "scr_dir2_threshold_100": 0.16568045667738668,
191
+ "scr_dir1_threshold_500": 0.29921256147023567,
192
+ "scr_metric_threshold_500": 0.17455625840835495,
193
+ "scr_dir2_threshold_500": 0.17455625840835495
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.06557389329988525,
198
+ "scr_metric_threshold_2": 0.125,
199
+ "scr_dir2_threshold_2": 0.125,
200
+ "scr_dir1_threshold_5": 0.07650271266233713,
201
+ "scr_metric_threshold_5": 0.28125005820765137,
202
+ "scr_dir2_threshold_5": 0.28125005820765137,
203
+ "scr_dir1_threshold_10": 0.07650271266233713,
204
+ "scr_metric_threshold_10": 0.542968888243172,
205
+ "scr_dir2_threshold_10": 0.542968888243172,
206
+ "scr_dir1_threshold_20": 0.021857964433295084,
207
+ "scr_metric_threshold_20": 0.671875087311477,
208
+ "scr_dir2_threshold_20": 0.671875087311477,
209
+ "scr_dir1_threshold_50": -0.04371592886659017,
210
+ "scr_metric_threshold_50": 0.7304687718278693,
211
+ "scr_dir2_threshold_50": 0.7304687718278693,
212
+ "scr_dir1_threshold_100": -0.021857964433295084,
213
+ "scr_metric_threshold_100": 0.7890624563442614,
214
+ "scr_dir2_threshold_100": 0.7890624563442614,
215
+ "scr_dir1_threshold_500": 0.0,
216
+ "scr_metric_threshold_500": 0.8320313445874334,
217
+ "scr_dir2_threshold_500": 0.8320313445874334
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.29230760766203984,
222
+ "scr_metric_threshold_2": 0.04838718980952953,
223
+ "scr_dir2_threshold_2": 0.04838718980952953,
224
+ "scr_dir1_threshold_5": 0.369230614047073,
225
+ "scr_metric_threshold_5": 0.11290320254761761,
226
+ "scr_dir2_threshold_5": 0.11290320254761761,
227
+ "scr_dir1_threshold_10": 0.38461533759002214,
228
+ "scr_metric_threshold_10": 0.1733872498948507,
229
+ "scr_dir2_threshold_10": 0.1733872498948507,
230
+ "scr_dir1_threshold_20": 0.4615383439750553,
231
+ "scr_metric_threshold_20": 0.22983885116865949,
232
+ "scr_dir2_threshold_20": 0.22983885116865949,
233
+ "scr_dir1_threshold_50": 0.4974359303536512,
234
+ "scr_metric_threshold_50": 0.3346774019107132,
235
+ "scr_dir2_threshold_50": 0.3346774019107132,
236
+ "scr_dir1_threshold_100": 0.4871793461033997,
237
+ "scr_metric_threshold_100": 0.463709667728174,
238
+ "scr_dir2_threshold_100": 0.463709667728174,
239
+ "scr_dir1_threshold_500": 0.47179462256045057,
240
+ "scr_metric_threshold_500": 0.5,
241
+ "scr_dir2_threshold_500": 0.5
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.375565603537441,
246
+ "scr_metric_threshold_2": 0.31415918817422805,
247
+ "scr_dir2_threshold_2": 0.31415918817422805,
248
+ "scr_dir1_threshold_5": 0.49321275755070776,
249
+ "scr_metric_threshold_5": 0.4115043816005218,
250
+ "scr_dir2_threshold_5": 0.4115043816005218,
251
+ "scr_dir1_threshold_10": 0.5565611628568693,
252
+ "scr_metric_threshold_10": 0.4911504249731845,
253
+ "scr_dir2_threshold_10": 0.4911504249731845,
254
+ "scr_dir1_threshold_20": 0.5927603286616061,
255
+ "scr_metric_threshold_20": 0.5663717467667778,
256
+ "scr_dir2_threshold_20": 0.5663717467667778,
257
+ "scr_dir1_threshold_50": 0.6651583905668239,
258
+ "scr_metric_threshold_50": 0.4911504249731845,
259
+ "scr_dir2_threshold_50": 0.4911504249731845,
260
+ "scr_dir1_threshold_100": 0.6651583905668239,
261
+ "scr_metric_threshold_100": 0.5353981682385852,
262
+ "scr_dir2_threshold_100": 0.5353981682385852,
263
+ "scr_dir1_threshold_500": 0.42081442594123414,
264
+ "scr_metric_threshold_500": 0.29646003812059707,
265
+ "scr_dir2_threshold_500": 0.29646003812059707
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.15879825800978847,
270
+ "scr_metric_threshold_2": 0.15879825800978847,
271
+ "scr_dir2_threshold_2": 0.08095245934396302,
272
+ "scr_dir1_threshold_5": 0.23175970276473412,
273
+ "scr_metric_threshold_5": 0.23175970276473412,
274
+ "scr_dir2_threshold_5": 0.11904748388971893,
275
+ "scr_dir1_threshold_10": 0.27467810939215553,
276
+ "scr_metric_threshold_10": 0.27467810939215553,
277
+ "scr_dir2_threshold_10": 0.18095228904500887,
278
+ "scr_dir1_threshold_20": 0.3347639298333219,
279
+ "scr_metric_threshold_20": 0.3347639298333219,
280
+ "scr_dir2_threshold_20": 0.21428575483308432,
281
+ "scr_dir1_threshold_50": 0.4077253745882676,
282
+ "scr_metric_threshold_50": 0.4077253745882676,
283
+ "scr_dir2_threshold_50": 0.27142843356751334,
284
+ "scr_dir1_threshold_100": 0.3948497502744883,
285
+ "scr_metric_threshold_100": 0.3948497502744883,
286
+ "scr_dir2_threshold_100": 0.3142855845341302,
287
+ "scr_dir1_threshold_500": 0.47639477402936536,
288
+ "scr_metric_threshold_500": 0.47639477402936536,
289
+ "scr_dir2_threshold_500": 0.33333323872280324
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_16k/average_l0_82",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_141_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732185588197,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.23232346198202863,
76
+ "scr_metric_threshold_2": 0.059125367686381894,
77
+ "scr_dir2_threshold_2": 0.05762577096188516,
78
+ "scr_dir1_threshold_5": 0.2746924694425543,
79
+ "scr_metric_threshold_5": 0.09918917661772152,
80
+ "scr_dir2_threshold_5": 0.09440426022101628,
81
+ "scr_dir1_threshold_10": 0.3128291501076911,
82
+ "scr_metric_threshold_10": 0.1706267241106558,
83
+ "scr_dir2_threshold_10": 0.1618437729202787,
84
+ "scr_dir1_threshold_20": 0.30428358315117054,
85
+ "scr_metric_threshold_20": 0.2567338288563896,
86
+ "scr_dir2_threshold_20": 0.25217628554894983,
87
+ "scr_dir1_threshold_50": 0.3516593797484374,
88
+ "scr_metric_threshold_50": 0.33743051590679035,
89
+ "scr_dir2_threshold_50": 0.33172595467299487,
90
+ "scr_dir1_threshold_100": 0.28862343129029655,
91
+ "scr_metric_threshold_100": 0.4104462650542652,
92
+ "scr_dir2_threshold_100": 0.40146403760364824,
93
+ "scr_dir1_threshold_500": 0.0010242043347789484,
94
+ "scr_metric_threshold_500": 0.3577786731975153,
95
+ "scr_dir2_threshold_500": 0.3457308422063547
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4843752037267798,
102
+ "scr_metric_threshold_2": 0.0,
103
+ "scr_dir2_threshold_2": 0.0,
104
+ "scr_dir1_threshold_5": 0.43749988358469727,
105
+ "scr_metric_threshold_5": 0.019704325740837254,
106
+ "scr_dir2_threshold_5": 0.019704325740837254,
107
+ "scr_dir1_threshold_10": 0.4687504074535596,
108
+ "scr_metric_threshold_10": 0.039408798291137845,
109
+ "scr_dir2_threshold_10": 0.039408798291137845,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.05911327084143844,
112
+ "scr_dir2_threshold_20": 0.05911327084143844,
113
+ "scr_dir1_threshold_50": 0.4531256111803394,
114
+ "scr_metric_threshold_50": 0.0935960243997328,
115
+ "scr_dir2_threshold_50": 0.0935960243997328,
116
+ "scr_dir1_threshold_100": 0.4531256111803394,
117
+ "scr_metric_threshold_100": 0.1502462728747495,
118
+ "scr_dir2_threshold_100": 0.1502462728747495,
119
+ "scr_dir1_threshold_500": -0.31249965075409175,
120
+ "scr_metric_threshold_500": 0.27586202846635494,
121
+ "scr_dir2_threshold_500": 0.27586202846635494
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.2574259412369084,
126
+ "scr_metric_threshold_2": 0.08831913718288759,
127
+ "scr_dir2_threshold_2": 0.08831913718288759,
128
+ "scr_dir1_threshold_5": 0.2772276643420858,
129
+ "scr_metric_threshold_5": 0.15954413825695987,
130
+ "scr_dir2_threshold_5": 0.15954413825695987,
131
+ "scr_dir1_threshold_10": 0.2772276643420858,
132
+ "scr_metric_threshold_10": 0.18803430850038624,
133
+ "scr_dir2_threshold_10": 0.18803430850038624,
134
+ "scr_dir1_threshold_20": 0.3069308391447856,
135
+ "scr_metric_threshold_20": 0.24501430935964405,
136
+ "scr_dir2_threshold_20": 0.24501430935964405,
137
+ "scr_dir1_threshold_50": 0.10891124751327678,
138
+ "scr_metric_threshold_50": 0.4045584476166039,
139
+ "scr_dir2_threshold_50": 0.4045584476166039,
140
+ "scr_dir1_threshold_100": -0.09900979581575439,
141
+ "scr_metric_threshold_100": 0.4871794828252872,
142
+ "scr_dir2_threshold_100": 0.4871794828252872,
143
+ "scr_dir1_threshold_500": 0.009900861552588701,
144
+ "scr_metric_threshold_500": 0.09686620523729525,
145
+ "scr_dir2_threshold_500": 0.09686620523729525
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.02025320352298121,
151
+ "scr_dir2_threshold_2": 0.02025320352298121,
152
+ "scr_dir1_threshold_5": 0.5555557658011382,
153
+ "scr_metric_threshold_5": 0.05316462152337083,
154
+ "scr_dir2_threshold_5": 0.05316462152337083,
155
+ "scr_dir1_threshold_10": 0.5555557658011382,
156
+ "scr_metric_threshold_10": 0.09620264128525105,
157
+ "scr_dir2_threshold_10": 0.09620264128525105,
158
+ "scr_dir1_threshold_20": 0.5079364028137167,
159
+ "scr_metric_threshold_20": 0.1848101426271099,
160
+ "scr_dir2_threshold_20": 0.1848101426271099,
161
+ "scr_dir1_threshold_50": 0.396825817316562,
162
+ "scr_metric_threshold_50": 0.25822796767346196,
163
+ "scr_dir2_threshold_50": 0.25822796767346196,
164
+ "scr_dir1_threshold_100": 0.2380958688319859,
165
+ "scr_metric_threshold_100": 0.3189874273445862,
166
+ "scr_dir2_threshold_100": 0.3189874273445862,
167
+ "scr_dir1_threshold_500": -1.460317039826295,
168
+ "scr_metric_threshold_500": -0.010126450863671215,
169
+ "scr_dir2_threshold_500": -0.010126450863671215
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.30708647743987205,
174
+ "scr_metric_threshold_2": 0.05029591590031494,
175
+ "scr_dir2_threshold_2": 0.05029591590031494,
176
+ "scr_dir1_threshold_5": 0.32283430937914476,
177
+ "scr_metric_threshold_5": 0.11242599898178338,
178
+ "scr_dir2_threshold_5": 0.11242599898178338,
179
+ "scr_dir1_threshold_10": 0.28346426020295623,
180
+ "scr_metric_threshold_10": 0.23076925789924663,
181
+ "scr_dir2_threshold_10": 0.23076925789924663,
182
+ "scr_dir1_threshold_20": 0.2362202950571313,
183
+ "scr_metric_threshold_20": 0.34615397502142153,
184
+ "scr_dir2_threshold_20": 0.34615397502142153,
185
+ "scr_dir1_threshold_50": 0.6850391372624848,
186
+ "scr_metric_threshold_50": 0.3017751427116833,
187
+ "scr_dir2_threshold_50": 0.3017751427116833,
188
+ "scr_dir1_threshold_100": 0.6299212561470235,
189
+ "scr_metric_threshold_100": 0.36686394393354327,
190
+ "scr_dir2_threshold_100": 0.36686394393354327,
191
+ "scr_dir1_threshold_500": 0.37007874385297646,
192
+ "scr_metric_threshold_500": 0.0710058848124367,
193
+ "scr_dir2_threshold_500": 0.0710058848124367
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.021857964433295084,
198
+ "scr_metric_threshold_2": 0.10156257275956422,
199
+ "scr_dir2_threshold_2": 0.10156257275956422,
200
+ "scr_dir1_threshold_5": 0.08743153202478901,
201
+ "scr_metric_threshold_5": 0.13671883003552063,
202
+ "scr_dir2_threshold_5": 0.13671883003552063,
203
+ "scr_dir1_threshold_10": 0.10382508677685816,
204
+ "scr_metric_threshold_10": 0.26171883003552066,
205
+ "scr_dir2_threshold_10": 0.26171883003552066,
206
+ "scr_dir1_threshold_20": 0.08196712234356307,
207
+ "scr_metric_threshold_20": 0.4414063154836078,
208
+ "scr_dir2_threshold_20": 0.4414063154836078,
209
+ "scr_dir1_threshold_50": 0.03278678379574697,
210
+ "scr_metric_threshold_50": 0.5546874854480871,
211
+ "scr_dir2_threshold_50": 0.5546874854480871,
212
+ "scr_dir1_threshold_100": 0.0,
213
+ "scr_metric_threshold_100": 0.6835936845163922,
214
+ "scr_dir2_threshold_100": 0.6835936845163922,
215
+ "scr_dir1_threshold_500": -0.06557389329988525,
216
+ "scr_metric_threshold_500": 0.7812500582076514,
217
+ "scr_dir2_threshold_500": 0.7812500582076514
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.06153828284208406,
222
+ "scr_metric_threshold_2": 0.06854845881151235,
223
+ "scr_dir2_threshold_2": 0.06854845881151235,
224
+ "scr_dir1_threshold_5": 0.18461515419110838,
225
+ "scr_metric_threshold_5": 0.07258066454365199,
226
+ "scr_dir2_threshold_5": 0.07258066454365199,
227
+ "scr_dir1_threshold_10": 0.27179474482639304,
228
+ "scr_metric_threshold_10": 0.12096785435318151,
229
+ "scr_dir2_threshold_10": 0.12096785435318151,
230
+ "scr_dir1_threshold_20": 0.29230760766203984,
231
+ "scr_metric_threshold_20": 0.18145166135912996,
232
+ "scr_dir2_threshold_20": 0.18145166135912996,
233
+ "scr_dir1_threshold_50": 0.3282048883757795,
234
+ "scr_metric_threshold_50": 0.2943548639067476,
235
+ "scr_dir2_threshold_50": 0.2943548639067476,
236
+ "scr_dir1_threshold_100": 0.338461472626031,
237
+ "scr_metric_threshold_100": 0.3346774019107132,
238
+ "scr_dir2_threshold_100": 0.3346774019107132,
239
+ "scr_dir1_threshold_500": 0.5282050717746932,
240
+ "scr_metric_threshold_500": 0.5645162530793727,
241
+ "scr_dir2_threshold_500": 0.5645162530793727
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.09049764480758624,
246
+ "scr_metric_threshold_2": 0.07964604337266272,
247
+ "scr_dir2_threshold_2": 0.07964604337266272,
248
+ "scr_dir1_threshold_5": 0.20361997052132483,
249
+ "scr_metric_threshold_5": 0.11061935816350187,
250
+ "scr_dir2_threshold_5": 0.11061935816350187,
251
+ "scr_dir1_threshold_10": 0.3574660206350726,
252
+ "scr_metric_threshold_10": 0.24336285169705757,
253
+ "scr_dir2_threshold_10": 0.24336285169705757,
254
+ "scr_dir1_threshold_20": 0.39819001473933757,
255
+ "scr_metric_threshold_20": 0.4070796600214524,
256
+ "scr_dir2_threshold_20": 0.4070796600214524,
257
+ "scr_dir1_threshold_50": 0.5294116536511888,
258
+ "scr_metric_threshold_50": 0.5132742966058849,
259
+ "scr_dir2_threshold_50": 0.5132742966058849,
260
+ "scr_dir1_threshold_100": 0.4479639351469146,
261
+ "scr_metric_threshold_100": 0.6415928048230177,
262
+ "scr_dir2_threshold_100": 0.6415928048230177,
263
+ "scr_dir1_threshold_500": 0.4705883463488112,
264
+ "scr_metric_threshold_500": 0.615044211611248,
265
+ "scr_dir2_threshold_500": 0.615044211611248
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.0643776099411321,
270
+ "scr_metric_threshold_2": 0.0643776099411321,
271
+ "scr_dir2_threshold_2": 0.05238083614515829,
272
+ "scr_dir1_threshold_5": 0.12875547569614632,
273
+ "scr_metric_threshold_5": 0.12875547569614632,
274
+ "scr_dir2_threshold_5": 0.09047614452250444,
275
+ "scr_dir1_threshold_10": 0.18454925082346488,
276
+ "scr_metric_threshold_10": 0.18454925082346488,
277
+ "scr_dir2_threshold_10": 0.11428564130044823,
278
+ "scr_dir1_threshold_20": 0.1888412961373127,
279
+ "scr_metric_threshold_20": 0.1888412961373127,
280
+ "scr_dir2_threshold_20": 0.1523809496777944,
281
+ "scr_dir1_threshold_50": 0.27896989889212126,
282
+ "scr_metric_threshold_50": 0.27896989889212126,
283
+ "scr_dir2_threshold_50": 0.2333334090217574,
284
+ "scr_dir1_threshold_100": 0.30042910220583197,
285
+ "scr_metric_threshold_100": 0.30042910220583197,
286
+ "scr_dir2_threshold_100": 0.22857128260089646,
287
+ "scr_dir1_threshold_500": 0.46781119502943397,
288
+ "scr_metric_threshold_500": 0.46781119502943397,
289
+ "scr_dir2_threshold_500": 0.3714285471001494
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/average_l0_141",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_21_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732186456495,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.162743310082424,
76
+ "scr_metric_threshold_2": 0.054995504572713565,
77
+ "scr_dir2_threshold_2": 0.05724359867982154,
78
+ "scr_dir1_threshold_5": 0.18937889627004625,
79
+ "scr_metric_threshold_5": 0.11411829182132366,
80
+ "scr_dir2_threshold_5": 0.11618249103907696,
81
+ "scr_dir1_threshold_10": 0.2147810735684718,
82
+ "scr_metric_threshold_10": 0.17237895685004703,
83
+ "scr_dir2_threshold_10": 0.1744354671334645,
84
+ "scr_dir1_threshold_20": 0.1853685310179895,
85
+ "scr_metric_threshold_20": 0.22787072978861378,
86
+ "scr_dir2_threshold_20": 0.22164500878252374,
87
+ "scr_dir1_threshold_50": 0.22408357475862528,
88
+ "scr_metric_threshold_50": 0.31235877318215505,
89
+ "scr_dir2_threshold_50": 0.3007605943900368,
90
+ "scr_dir1_threshold_100": 0.13928566694099936,
91
+ "scr_metric_threshold_100": 0.331683629227898,
92
+ "scr_dir2_threshold_100": 0.32049671841439986,
93
+ "scr_dir1_threshold_500": 0.020506951398666,
94
+ "scr_metric_threshold_500": 0.36366173341515085,
95
+ "scr_dir2_threshold_500": 0.3541864785002137
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3437501746229541,
102
+ "scr_metric_threshold_2": 0.009852089465686957,
103
+ "scr_dir2_threshold_2": 0.009852089465686957,
104
+ "scr_dir1_threshold_5": 0.3593749708961743,
105
+ "scr_metric_threshold_5": 0.03448275355829437,
106
+ "scr_dir2_threshold_5": 0.03448275355829437,
107
+ "scr_dir1_threshold_10": 0.3749997671693945,
108
+ "scr_metric_threshold_10": 0.0467980121998664,
109
+ "scr_dir2_threshold_10": 0.0467980121998664,
110
+ "scr_dir1_threshold_20": 0.3437501746229541,
111
+ "scr_metric_threshold_20": 0.06650233794070366,
112
+ "scr_dir2_threshold_20": 0.06650233794070366,
113
+ "scr_dir1_threshold_50": 0.28125005820765137,
114
+ "scr_metric_threshold_50": 0.08620681049100425,
115
+ "scr_dir2_threshold_50": 0.08620681049100425,
116
+ "scr_dir1_threshold_100": 0.21874994179234863,
117
+ "scr_metric_threshold_100": 0.07881774339173903,
118
+ "scr_dir2_threshold_100": 0.07881774339173903,
119
+ "scr_dir1_threshold_500": -0.046874388819660585,
120
+ "scr_metric_threshold_500": 0.09605904676615455,
121
+ "scr_dir2_threshold_500": 0.09605904676615455
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.18811873007892008,
126
+ "scr_metric_threshold_2": 0.09971517131749864,
127
+ "scr_dir2_threshold_2": 0.09971517131749864,
128
+ "scr_dir1_threshold_5": 0.17821786852633137,
129
+ "scr_metric_threshold_5": 0.20797724087560748,
130
+ "scr_dir2_threshold_5": 0.20797724087560748,
131
+ "scr_dir1_threshold_10": 0.1683170069737427,
132
+ "scr_metric_threshold_10": 0.24786327543984746,
133
+ "scr_dir2_threshold_10": 0.24786327543984746,
134
+ "scr_dir1_threshold_20": -0.019801723105177402,
135
+ "scr_metric_threshold_20": 0.29059827608429084,
136
+ "scr_dir2_threshold_20": 0.29059827608429084,
137
+ "scr_dir1_threshold_50": 0.0594057594604659,
138
+ "scr_metric_threshold_50": 0.4017094815364005,
139
+ "scr_dir2_threshold_50": 0.4017094815364005,
140
+ "scr_dir1_threshold_100": -0.07920807271057699,
141
+ "scr_metric_threshold_100": 0.21652430893001515,
142
+ "scr_dir2_threshold_100": 0.21652430893001515,
143
+ "scr_dir1_threshold_500": -0.5049504307762943,
144
+ "scr_metric_threshold_500": 0.31339034435351293,
145
+ "scr_dir2_threshold_500": 0.31339034435351293
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.396825817316562,
150
+ "scr_metric_threshold_2": 0.02025320352298121,
151
+ "scr_dir2_threshold_2": 0.02025320352298121,
152
+ "scr_dir1_threshold_5": 0.4603179859314168,
153
+ "scr_metric_threshold_5": 0.08354442680784264,
154
+ "scr_dir2_threshold_5": 0.08354442680784264,
155
+ "scr_dir1_threshold_10": 0.4444442341988618,
156
+ "scr_metric_threshold_10": 0.11898745752415006,
157
+ "scr_dir2_threshold_10": 0.11898745752415006,
158
+ "scr_dir1_threshold_20": 0.26984148008685244,
159
+ "scr_metric_threshold_20": 0.1949367443886005,
160
+ "scr_dir2_threshold_20": 0.1949367443886005,
161
+ "scr_dir1_threshold_50": 0.30158709134171896,
162
+ "scr_metric_threshold_50": 0.24556975319605354,
163
+ "scr_dir2_threshold_50": 0.24556975319605354,
164
+ "scr_dir1_threshold_100": -0.5396820140685832,
165
+ "scr_metric_threshold_100": 0.29113923477603215,
166
+ "scr_dir2_threshold_100": 0.29113923477603215,
167
+ "scr_dir1_threshold_500": -0.9206340796525903,
168
+ "scr_metric_threshold_500": 0.21518994791158172,
169
+ "scr_dir2_threshold_500": 0.21518994791158172
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.1102362315589293,
174
+ "scr_metric_threshold_2": 0.06213025942657158,
175
+ "scr_dir2_threshold_2": 0.06213025942657158,
176
+ "scr_dir1_threshold_5": 0.11811014752856565,
177
+ "scr_metric_threshold_5": 0.15088757135584166,
178
+ "scr_dir2_threshold_5": 0.15088757135584166,
179
+ "scr_dir1_threshold_10": 0.09448793029164981,
180
+ "scr_metric_threshold_10": 0.21893491437299,
181
+ "scr_dir2_threshold_10": 0.21893491437299,
182
+ "scr_dir1_threshold_20": 0.12598406349820201,
183
+ "scr_metric_threshold_20": 0.17455625840835495,
184
+ "scr_dir2_threshold_20": 0.17455625840835495,
185
+ "scr_dir1_threshold_50": 0.11811014752856565,
186
+ "scr_metric_threshold_50": 0.2573964867470483,
187
+ "scr_dir2_threshold_50": 0.2573964867470483,
188
+ "scr_dir1_threshold_100": 0.3779526598226128,
189
+ "scr_metric_threshold_100": 0.30769240264736325,
190
+ "scr_dir2_threshold_100": 0.30769240264736325,
191
+ "scr_dir1_threshold_500": 0.29921256147023567,
192
+ "scr_metric_threshold_500": 0.17159771661306658,
193
+ "scr_dir2_threshold_500": 0.17159771661306658
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.03825119347697291,
198
+ "scr_metric_threshold_2": 0.07812514551912843,
199
+ "scr_dir2_threshold_2": 0.07812514551912843,
200
+ "scr_dir1_threshold_5": 0.04918033854781611,
201
+ "scr_metric_threshold_5": 0.10937497089617432,
202
+ "scr_dir2_threshold_5": 0.10937497089617432,
203
+ "scr_dir1_threshold_10": 0.06557389329988525,
204
+ "scr_metric_threshold_10": 0.20703134458743347,
205
+ "scr_dir2_threshold_10": 0.20703134458743347,
206
+ "scr_dir1_threshold_20": 0.021857964433295084,
207
+ "scr_metric_threshold_20": 0.30078128637978213,
208
+ "scr_dir2_threshold_20": 0.30078128637978213,
209
+ "scr_dir1_threshold_50": 0.00546440968122594,
210
+ "scr_metric_threshold_50": 0.4726563736912592,
211
+ "scr_dir2_threshold_50": 0.4726563736912592,
212
+ "scr_dir1_threshold_100": 0.03825119347697291,
213
+ "scr_metric_threshold_100": 0.5703125145519129,
214
+ "scr_dir2_threshold_100": 0.5703125145519129,
215
+ "scr_dir1_threshold_500": 0.01092881936245188,
216
+ "scr_metric_threshold_500": 0.6835936845163922,
217
+ "scr_dir2_threshold_500": 0.6835936845163922
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.06153828284208406,
222
+ "scr_metric_threshold_2": 0.04838718980952953,
223
+ "scr_dir2_threshold_2": 0.04838718980952953,
224
+ "scr_dir1_threshold_5": 0.06153828284208406,
225
+ "scr_metric_threshold_5": 0.06451625307937271,
226
+ "scr_dir2_threshold_5": 0.06451625307937271,
227
+ "scr_dir1_threshold_10": 0.14871787347736873,
228
+ "scr_metric_threshold_10": 0.0927419335456348,
229
+ "scr_dir2_threshold_10": 0.0927419335456348,
230
+ "scr_dir1_threshold_20": 0.1999998777340575,
231
+ "scr_metric_threshold_20": 0.1733872498948507,
232
+ "scr_dir2_threshold_20": 0.1733872498948507,
233
+ "scr_dir1_threshold_50": 0.28205102341178834,
234
+ "scr_metric_threshold_50": 0.27419359490476475,
235
+ "scr_dir2_threshold_50": 0.27419359490476475,
236
+ "scr_dir1_threshold_100": 0.36410247475437535,
237
+ "scr_metric_threshold_100": 0.3225807847142943,
238
+ "scr_dir2_threshold_100": 0.3225807847142943,
239
+ "scr_dir1_threshold_500": 0.45641020468235766,
240
+ "scr_metric_threshold_500": 0.5080646518055639,
241
+ "scr_dir2_threshold_500": 0.5080646518055639
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.08597281650805809,
246
+ "scr_metric_threshold_2": 0.04424774326540074,
247
+ "scr_dir2_threshold_2": 0.04424774326540074,
248
+ "scr_dir1_threshold_5": 0.1809955593194283,
249
+ "scr_metric_threshold_5": 0.15486710142890261,
250
+ "scr_dir2_threshold_5": 0.15486710142890261,
251
+ "scr_dir1_threshold_10": 0.2714932041270145,
252
+ "scr_metric_threshold_10": 0.29646003812059707,
253
+ "scr_dir2_threshold_10": 0.29646003812059707,
254
+ "scr_dir1_threshold_20": 0.3438915357364882,
255
+ "scr_metric_threshold_20": 0.4247788100750834,
256
+ "scr_dir2_threshold_20": 0.4247788100750834,
257
+ "scr_dir1_threshold_50": 0.4615384200454991,
258
+ "scr_metric_threshold_50": 0.4778759964986229,
259
+ "scr_dir2_threshold_50": 0.4778759964986229,
260
+ "scr_dir1_threshold_100": 0.42081442594123414,
261
+ "scr_metric_threshold_100": 0.5530973182922162,
262
+ "scr_dir2_threshold_100": 0.5530973182922162,
263
+ "scr_dir1_threshold_500": 0.47511317464833935,
264
+ "scr_metric_threshold_500": 0.5265487250804465,
265
+ "scr_dir2_threshold_500": 0.5265487250804465
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.07725323425491137,
270
+ "scr_metric_threshold_2": 0.07725323425491137,
271
+ "scr_dir2_threshold_2": 0.09523798711177515,
272
+ "scr_dir1_threshold_5": 0.10729601656855352,
273
+ "scr_metric_threshold_5": 0.10729601656855352,
274
+ "scr_dir2_threshold_5": 0.12380961031057988,
275
+ "scr_dir1_threshold_10": 0.15021467900985702,
276
+ "scr_metric_threshold_10": 0.15021467900985702,
277
+ "scr_dir2_threshold_10": 0.16666676127719673,
278
+ "scr_dir1_threshold_20": 0.19742487513724416,
279
+ "scr_metric_threshold_20": 0.19742487513724416,
280
+ "scr_dir2_threshold_20": 0.14761910708852366,
281
+ "scr_dir1_threshold_50": 0.283261688392087,
282
+ "scr_metric_threshold_50": 0.283261688392087,
283
+ "scr_dir2_threshold_50": 0.19047625805514054,
284
+ "scr_dir1_threshold_100": 0.3133047265196112,
285
+ "scr_metric_threshold_100": 0.3133047265196112,
286
+ "scr_dir2_threshold_100": 0.22380944001162575,
287
+ "scr_dir1_threshold_500": 0.3948497502744883,
288
+ "scr_metric_threshold_500": 0.3948497502744883,
289
+ "scr_dir2_threshold_500": 0.3190477109549911
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/average_l0_21",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_297_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732187327097,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.258840715540528,
76
+ "scr_metric_threshold_2": 0.09678354686653676,
77
+ "scr_dir2_threshold_2": 0.09683464654509932,
78
+ "scr_dir1_threshold_5": 0.29374137805387457,
79
+ "scr_metric_threshold_5": 0.1321740796590132,
80
+ "scr_dir2_threshold_5": 0.1288223428908275,
81
+ "scr_dir1_threshold_10": 0.354705230641241,
82
+ "scr_metric_threshold_10": 0.18795128660704097,
83
+ "scr_dir2_threshold_10": 0.18327622652506143,
84
+ "scr_dir1_threshold_20": 0.35573376672116297,
85
+ "scr_metric_threshold_20": 0.2610464511087111,
86
+ "scr_dir2_threshold_20": 0.2633967480751577,
87
+ "scr_dir1_threshold_50": 0.3960858577473932,
88
+ "scr_metric_threshold_50": 0.3353283560089671,
89
+ "scr_dir2_threshold_50": 0.33969171703565565,
90
+ "scr_dir1_threshold_100": 0.28581122099759626,
91
+ "scr_metric_threshold_100": 0.3677531780350695,
92
+ "scr_dir2_threshold_100": 0.37091076449923926,
93
+ "scr_dir1_threshold_500": 0.07343462469373627,
94
+ "scr_metric_threshold_500": 0.41586687254632826,
95
+ "scr_dir2_threshold_500": 0.4229330738586557
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5156247962732202,
102
+ "scr_metric_threshold_2": 0.009852089465686957,
103
+ "scr_dir2_threshold_2": 0.009852089465686957,
104
+ "scr_dir1_threshold_5": 0.578124912688523,
105
+ "scr_metric_threshold_5": 0.022167494916722333,
106
+ "scr_dir2_threshold_5": 0.022167494916722333,
107
+ "scr_dir1_threshold_10": 0.5625001164153027,
108
+ "scr_metric_threshold_10": 0.0467980121998664,
109
+ "scr_dir2_threshold_10": 0.0467980121998664,
110
+ "scr_dir1_threshold_20": 0.5468753201420825,
111
+ "scr_metric_threshold_20": 0.10344826067488311,
112
+ "scr_dir2_threshold_20": 0.10344826067488311,
113
+ "scr_dir1_threshold_50": 0.5625001164153027,
114
+ "scr_metric_threshold_50": 0.12807877795802716,
115
+ "scr_dir2_threshold_50": 0.12807877795802716,
116
+ "scr_dir1_threshold_100": 0.5625001164153027,
117
+ "scr_metric_threshold_100": 0.1453200813324427,
118
+ "scr_dir2_threshold_100": 0.1453200813324427,
119
+ "scr_dir1_threshold_500": -0.31249965075409175,
120
+ "scr_metric_threshold_500": 0.019704325740837254,
121
+ "scr_dir2_threshold_500": 0.019704325740837254
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.35643573705266274,
126
+ "scr_metric_threshold_2": 0.05982913675325868,
127
+ "scr_dir2_threshold_2": 0.05982913675325868,
128
+ "scr_dir1_threshold_5": 0.31683170069737426,
129
+ "scr_metric_threshold_5": 0.12535620566692415,
130
+ "scr_dir2_threshold_5": 0.12535620566692415,
131
+ "scr_dir1_threshold_10": 0.46534639442100584,
132
+ "scr_metric_threshold_10": 0.19088327458058965,
133
+ "scr_dir2_threshold_10": 0.19088327458058965,
134
+ "scr_dir1_threshold_20": 0.45544553286841716,
135
+ "scr_metric_threshold_20": 0.233618275225033,
136
+ "scr_dir2_threshold_20": 0.233618275225033,
137
+ "scr_dir1_threshold_50": 0.514851292328883,
138
+ "scr_metric_threshold_50": 0.3532763789177529,
139
+ "scr_dir2_threshold_50": 0.3532763789177529,
140
+ "scr_dir1_threshold_100": 0.15841614542115398,
141
+ "scr_metric_threshold_100": 0.3988605154561971,
142
+ "scr_dir2_threshold_100": 0.3988605154561971,
143
+ "scr_dir1_threshold_500": -1.1386138321710428,
144
+ "scr_metric_threshold_500": 0.5555556876329536,
145
+ "scr_dir2_threshold_500": 0.5555556876329536
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5238092084411499,
150
+ "scr_metric_threshold_2": 0.04556963247779803,
151
+ "scr_dir2_threshold_2": 0.04556963247779803,
152
+ "scr_dir1_threshold_5": 0.5396829601737049,
153
+ "scr_metric_threshold_5": 0.07088621233043424,
154
+ "scr_dir2_threshold_5": 0.07088621233043424,
155
+ "scr_dir1_threshold_10": 0.5238092084411499,
156
+ "scr_metric_threshold_10": 0.10632924304674166,
157
+ "scr_dir2_threshold_10": 0.10632924304674166,
158
+ "scr_dir1_threshold_20": 0.5396829601737049,
159
+ "scr_metric_threshold_20": 0.13924051014931188,
160
+ "scr_dir2_threshold_20": 0.13924051014931188,
161
+ "scr_dir1_threshold_50": 0.4761907915588501,
162
+ "scr_metric_threshold_50": 0.22025317334341732,
163
+ "scr_dir2_threshold_50": 0.22025317334341732,
164
+ "scr_dir1_threshold_100": 0.49206359718628334,
165
+ "scr_metric_threshold_100": 0.055696234239288635,
166
+ "scr_dir2_threshold_100": 0.055696234239288635,
167
+ "scr_dir1_threshold_500": -0.14285714285714285,
168
+ "scr_metric_threshold_500": -0.017721439909244015,
169
+ "scr_dir2_threshold_500": -0.017721439909244015
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.37007874385297646,
174
+ "scr_metric_threshold_2": 0.0384615723740583,
175
+ "scr_dir2_threshold_2": 0.0384615723740583,
176
+ "scr_dir1_threshold_5": 0.3149603934095084,
177
+ "scr_metric_threshold_5": 0.0710058848124367,
178
+ "scr_dir2_threshold_5": 0.0710058848124367,
179
+ "scr_dir1_threshold_10": 0.41732270899880136,
180
+ "scr_metric_threshold_10": 0.15976337308680993,
181
+ "scr_dir2_threshold_10": 0.15976337308680993,
182
+ "scr_dir1_threshold_20": 0.3307086946767879,
183
+ "scr_metric_threshold_20": 0.28402371559484996,
184
+ "scr_dir2_threshold_20": 0.28402371559484996,
185
+ "scr_dir1_threshold_50": 0.3622048278833401,
186
+ "scr_metric_threshold_50": 0.2899407991854267,
187
+ "scr_dir2_threshold_50": 0.2899407991854267,
188
+ "scr_dir1_threshold_100": -0.18897679923931315,
189
+ "scr_metric_threshold_100": 0.328402371559485,
190
+ "scr_dir2_threshold_100": 0.328402371559485,
191
+ "scr_dir1_threshold_500": 0.41732270899880136,
192
+ "scr_metric_threshold_500": 0.0917160300696616,
193
+ "scr_dir2_threshold_500": 0.0917160300696616
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.08743153202478901,
198
+ "scr_metric_threshold_2": 0.44531251455191284,
199
+ "scr_dir2_threshold_2": 0.44531251455191284,
200
+ "scr_dir1_threshold_5": 0.12568305121015325,
201
+ "scr_metric_threshold_5": 0.4765625727595642,
202
+ "scr_dir2_threshold_5": 0.4765625727595642,
203
+ "scr_dir1_threshold_10": 0.09836067709563222,
204
+ "scr_metric_threshold_10": 0.5156250291038257,
205
+ "scr_dir2_threshold_10": 0.5156250291038257,
206
+ "scr_dir1_threshold_20": 0.04371592886659017,
207
+ "scr_metric_threshold_20": 0.5585936845163922,
208
+ "scr_dir2_threshold_20": 0.5585936845163922,
209
+ "scr_dir1_threshold_50": 0.15300542532467426,
210
+ "scr_metric_threshold_50": 0.63281239813661,
211
+ "scr_dir2_threshold_50": 0.63281239813661,
212
+ "scr_dir1_threshold_100": -0.03278678379574697,
213
+ "scr_metric_threshold_100": 0.6992187136202179,
214
+ "scr_dir2_threshold_100": 0.6992187136202179,
215
+ "scr_dir1_threshold_500": 0.1584698350059002,
216
+ "scr_metric_threshold_500": 0.8320313445874334,
217
+ "scr_dir2_threshold_500": 0.8320313445874334
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.06153828284208406,
222
+ "scr_metric_threshold_2": 0.04838718980952953,
223
+ "scr_dir2_threshold_2": 0.04838718980952953,
224
+ "scr_dir1_threshold_5": 0.158974152062764,
225
+ "scr_metric_threshold_5": 0.06451625307937271,
226
+ "scr_dir2_threshold_5": 0.06451625307937271,
227
+ "scr_dir1_threshold_10": 0.24615374269804866,
228
+ "scr_metric_threshold_10": 0.10483879108333834,
229
+ "scr_dir2_threshold_10": 0.10483879108333834,
230
+ "scr_dir1_threshold_20": 0.28717946836934216,
231
+ "scr_metric_threshold_20": 0.17741945562699032,
232
+ "scr_dir2_threshold_20": 0.17741945562699032,
233
+ "scr_dir1_threshold_50": 0.3025638862474351,
234
+ "scr_metric_threshold_50": 0.282258006369044,
235
+ "scr_dir2_threshold_50": 0.282258006369044,
236
+ "scr_dir1_threshold_100": 0.34871775121142623,
237
+ "scr_metric_threshold_100": 0.3951612089166617,
238
+ "scr_dir2_threshold_100": 0.3951612089166617,
239
+ "scr_dir1_threshold_500": 0.5384613503600885,
240
+ "scr_metric_threshold_500": 0.7217741993630956,
241
+ "scr_dir2_threshold_500": 0.7217741993630956
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.10859722770995466,
246
+ "scr_metric_threshold_2": 0.07964604337266272,
247
+ "scr_dir2_threshold_2": 0.07964604337266272,
248
+ "scr_dir1_threshold_5": 0.21266962712038115,
249
+ "scr_metric_threshold_5": 0.12389378663806347,
250
+ "scr_dir2_threshold_5": 0.12389378663806347,
251
+ "scr_dir1_threshold_10": 0.3438915357364882,
252
+ "scr_metric_threshold_10": 0.1991151084316568,
253
+ "scr_dir2_threshold_10": 0.1991151084316568,
254
+ "scr_dir1_threshold_20": 0.4705883463488112,
255
+ "scr_metric_threshold_20": 0.42035382475866057,
256
+ "scr_dir2_threshold_20": 0.42035382475866057,
257
+ "scr_dir1_threshold_50": 0.5656108194559256,
258
+ "scr_metric_threshold_50": 0.5442476113967241,
259
+ "scr_dir2_threshold_50": 0.5442476113967241,
260
+ "scr_dir1_threshold_100": 0.6289592247620871,
261
+ "scr_metric_threshold_100": 0.6017697831366864,
262
+ "scr_dir2_threshold_100": 0.6017697831366864,
263
+ "scr_dir1_threshold_500": 0.6380091510653992,
264
+ "scr_metric_threshold_500": 0.6946902549839107,
265
+ "scr_dir2_threshold_500": 0.6946902549839107
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.047210196127387125,
270
+ "scr_metric_threshold_2": 0.047210196127387125,
271
+ "scr_dir2_threshold_2": 0.04761899355588758,
272
+ "scr_dir1_threshold_5": 0.10300422706858779,
273
+ "scr_metric_threshold_5": 0.10300422706858779,
274
+ "scr_dir2_threshold_5": 0.07619033292310208,
275
+ "scr_dir1_threshold_10": 0.18025746132349915,
276
+ "scr_metric_threshold_10": 0.18025746132349915,
277
+ "scr_dir2_threshold_10": 0.14285698066766273,
278
+ "scr_dir1_threshold_20": 0.17167388232356773,
279
+ "scr_metric_threshold_20": 0.17167388232356773,
280
+ "scr_dir2_threshold_20": 0.19047625805514054,
281
+ "scr_dir1_threshold_50": 0.23175970276473412,
282
+ "scr_metric_threshold_50": 0.23175970276473412,
283
+ "scr_dir2_threshold_50": 0.2666665909782426,
284
+ "scr_dir1_threshold_100": 0.31759651601957695,
285
+ "scr_metric_threshold_100": 0.31759651601957695,
286
+ "scr_dir2_threshold_100": 0.34285720773293493,
287
+ "scr_dir1_threshold_500": 0.4291845779019783,
288
+ "scr_metric_threshold_500": 0.4291845779019783,
289
+ "scr_dir2_threshold_500": 0.48571418840059766
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/average_l0_297",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_38_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732188195892,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.17675689054627836,
76
+ "scr_metric_threshold_2": 0.06083779301628871,
77
+ "scr_dir2_threshold_2": 0.0599334620943996,
78
+ "scr_dir1_threshold_5": 0.21474723611971733,
79
+ "scr_metric_threshold_5": 0.12145876645665167,
80
+ "scr_dir2_threshold_5": 0.11750414240720655,
81
+ "scr_dir1_threshold_10": 0.22755180143218018,
82
+ "scr_metric_threshold_10": 0.18494880175833403,
83
+ "scr_dir2_threshold_10": 0.17920076232462778,
84
+ "scr_dir1_threshold_20": 0.21403182512814212,
85
+ "scr_metric_threshold_20": 0.2563144193859877,
86
+ "scr_dir2_threshold_20": 0.252704649721399,
87
+ "scr_dir1_threshold_50": 0.2080206093051259,
88
+ "scr_metric_threshold_50": 0.3381421269772288,
89
+ "scr_dir2_threshold_50": 0.3235089689727556,
90
+ "scr_dir1_threshold_100": 0.25670254037482104,
91
+ "scr_metric_threshold_100": 0.3557431678382766,
92
+ "scr_dir2_threshold_100": 0.34163882656369854,
93
+ "scr_dir1_threshold_500": 0.06548327914017868,
94
+ "scr_metric_threshold_500": 0.36411814690057104,
95
+ "scr_dir2_threshold_500": 0.34451617761003794
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.39062549476503666,
102
+ "scr_metric_threshold_2": 0.0073890670992652185,
103
+ "scr_dir2_threshold_2": 0.0073890670992652185,
104
+ "scr_dir1_threshold_5": 0.42187508731147705,
105
+ "scr_metric_threshold_5": 0.024630517283144072,
106
+ "scr_dir2_threshold_5": 0.024630517283144072,
107
+ "scr_dir1_threshold_10": 0.39062549476503666,
108
+ "scr_metric_threshold_10": 0.03694577592471611,
109
+ "scr_dir2_threshold_10": 0.03694577592471611,
110
+ "scr_dir1_threshold_20": 0.32812537834973393,
111
+ "scr_metric_threshold_20": 0.051724056932709886,
112
+ "scr_dir2_threshold_20": 0.051724056932709886,
113
+ "scr_dir1_threshold_50": 0.3125005820765137,
114
+ "scr_metric_threshold_50": 0.07142852948301047,
115
+ "scr_dir2_threshold_50": 0.07142852948301047,
116
+ "scr_dir1_threshold_100": 0.32812537834973393,
117
+ "scr_metric_threshold_100": 0.09605904676615455,
118
+ "scr_dir2_threshold_100": 0.09605904676615455,
119
+ "scr_dir1_threshold_500": 0.1250002328306055,
120
+ "scr_metric_threshold_500": 0.10591128304130484,
121
+ "scr_dir2_threshold_500": 0.10591128304130484
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.1386138321710429,
126
+ "scr_metric_threshold_2": 0.09971517131749864,
127
+ "scr_dir2_threshold_2": 0.09971517131749864,
128
+ "scr_dir1_threshold_5": 0.15841614542115398,
129
+ "scr_metric_threshold_5": 0.22222224109042193,
130
+ "scr_dir2_threshold_5": 0.22222224109042193,
131
+ "scr_dir1_threshold_10": 0.19801959163150878,
132
+ "scr_metric_threshold_10": 0.25071224152005084,
133
+ "scr_dir2_threshold_10": 0.25071224152005084,
134
+ "scr_dir1_threshold_20": -0.029703174802699794,
135
+ "scr_metric_threshold_20": 0.3076924121931061,
136
+ "scr_dir2_threshold_20": 0.3076924121931061,
137
+ "scr_dir1_threshold_50": -0.19801959163150878,
138
+ "scr_metric_threshold_50": 0.43589741412643623,
139
+ "scr_dir2_threshold_50": 0.43589741412643623,
140
+ "scr_dir1_threshold_100": -0.26732680278949705,
141
+ "scr_metric_threshold_100": 0.19943017282119982,
142
+ "scr_dir2_threshold_100": 0.19943017282119982,
143
+ "scr_dir1_threshold_500": -0.42574235806571736,
144
+ "scr_metric_threshold_500": 0.2307693091448296,
145
+ "scr_dir2_threshold_500": 0.2307693091448296
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5238092084411499,
150
+ "scr_metric_threshold_2": 0.03544318161412681,
151
+ "scr_dir2_threshold_2": 0.03544318161412681,
152
+ "scr_dir1_threshold_5": 0.5396829601737049,
153
+ "scr_metric_threshold_5": 0.06835444871669703,
154
+ "scr_dir2_threshold_5": 0.06835444871669703,
155
+ "scr_dir1_threshold_10": 0.4444442341988618,
156
+ "scr_metric_threshold_10": 0.10126586671708666,
157
+ "scr_dir2_threshold_10": 0.10126586671708666,
158
+ "scr_dir1_threshold_20": 0.4761907915588501,
159
+ "scr_metric_threshold_20": 0.1620253263882109,
160
+ "scr_dir2_threshold_20": 0.1620253263882109,
161
+ "scr_dir1_threshold_50": 0.42857142857142855,
162
+ "scr_metric_threshold_50": 0.24303798958231634,
163
+ "scr_dir2_threshold_50": 0.24303798958231634,
164
+ "scr_dir1_threshold_100": 0.31746084307427397,
165
+ "scr_metric_threshold_100": 0.29113923477603215,
166
+ "scr_dir2_threshold_100": 0.29113923477603215,
167
+ "scr_dir1_threshold_500": -0.6984119625531593,
168
+ "scr_metric_threshold_500": 0.1620253263882109,
169
+ "scr_dir2_threshold_500": 0.1620253263882109
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.12598406349820201,
174
+ "scr_metric_threshold_2": 0.0591717176312832,
175
+ "scr_dir2_threshold_2": 0.0591717176312832,
176
+ "scr_dir1_threshold_5": 0.11811014752856565,
177
+ "scr_metric_threshold_5": 0.1479290295605533,
178
+ "scr_dir2_threshold_5": 0.1479290295605533,
179
+ "scr_dir1_threshold_10": 0.1102362315589293,
180
+ "scr_metric_threshold_10": 0.22189345616827838,
181
+ "scr_dir2_threshold_10": 0.22189345616827838,
182
+ "scr_dir1_threshold_20": 0.0787400983523771,
183
+ "scr_metric_threshold_20": 0.2928995173258182,
184
+ "scr_dir2_threshold_20": 0.2928995173258182,
185
+ "scr_dir1_threshold_50": 0.06299179708509761,
186
+ "scr_metric_threshold_50": 0.3047338608520748,
187
+ "scr_dir2_threshold_50": 0.3047338608520748,
188
+ "scr_dir1_threshold_100": 0.4960628073511784,
189
+ "scr_metric_threshold_100": 0.34615397502142153,
190
+ "scr_dir2_threshold_100": 0.34615397502142153,
191
+ "scr_dir1_threshold_500": 0.2362202950571313,
192
+ "scr_metric_threshold_500": 0.1301776024437199,
193
+ "scr_dir2_threshold_500": 0.1301776024437199
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.03278678379574697,
198
+ "scr_metric_threshold_2": 0.09375017462295412,
199
+ "scr_dir2_threshold_2": 0.09375017462295412,
200
+ "scr_dir1_threshold_5": 0.00546440968122594,
201
+ "scr_metric_threshold_5": 0.14843766007104128,
202
+ "scr_dir2_threshold_5": 0.14843766007104128,
203
+ "scr_dir1_threshold_10": -0.021857964433295084,
204
+ "scr_metric_threshold_10": 0.26171883003552066,
205
+ "scr_dir2_threshold_10": 0.26171883003552066,
206
+ "scr_dir1_threshold_20": -0.00546440968122594,
207
+ "scr_metric_threshold_20": 0.3984374272404358,
208
+ "scr_dir2_threshold_20": 0.3984374272404358,
209
+ "scr_dir1_threshold_50": -0.00546440968122594,
210
+ "scr_metric_threshold_50": 0.5820313445874334,
211
+ "scr_dir2_threshold_50": 0.5820313445874334,
212
+ "scr_dir1_threshold_100": -0.021857964433295084,
213
+ "scr_metric_threshold_100": 0.6679686554125666,
214
+ "scr_dir2_threshold_100": 0.6679686554125666,
215
+ "scr_dir1_threshold_500": -0.021857964433295084,
216
+ "scr_metric_threshold_500": 0.746093800931695,
217
+ "scr_dir2_threshold_500": 0.746093800931695
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.05641014354938642,
222
+ "scr_metric_threshold_2": 0.060483807005948444,
223
+ "scr_dir2_threshold_2": 0.060483807005948444,
224
+ "scr_dir1_threshold_5": 0.13846128922711723,
225
+ "scr_metric_threshold_5": 0.07258066454365199,
226
+ "scr_dir2_threshold_5": 0.07258066454365199,
227
+ "scr_dir1_threshold_10": 0.22051274056970427,
228
+ "scr_metric_threshold_10": 0.11290320254761761,
229
+ "scr_dir2_threshold_10": 0.11290320254761761,
230
+ "scr_dir1_threshold_20": 0.29230760766203984,
231
+ "scr_metric_threshold_20": 0.18951607282340924,
232
+ "scr_dir2_threshold_20": 0.18951607282340924,
233
+ "scr_dir1_threshold_50": 0.32307674908308187,
234
+ "scr_metric_threshold_50": 0.2580645316349216,
235
+ "scr_dir2_threshold_50": 0.2580645316349216,
236
+ "scr_dir1_threshold_100": 0.3948716161754174,
237
+ "scr_metric_threshold_100": 0.3346774019107132,
238
+ "scr_dir2_threshold_100": 0.3346774019107132,
239
+ "scr_dir1_threshold_500": 0.44102548113940854,
240
+ "scr_metric_threshold_500": 0.5161290632698432,
241
+ "scr_dir2_threshold_500": 0.5161290632698432
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.08144798820852994,
246
+ "scr_metric_threshold_2": 0.06637161489810112,
247
+ "scr_dir2_threshold_2": 0.06637161489810112,
248
+ "scr_dir1_threshold_5": 0.19004521591848458,
249
+ "scr_metric_threshold_5": 0.14159293669169445,
250
+ "scr_dir2_threshold_5": 0.14159293669169445,
251
+ "scr_dir1_threshold_10": 0.2895927870293829,
252
+ "scr_metric_threshold_10": 0.3053097450160893,
253
+ "scr_dir2_threshold_10": 0.3053097450160893,
254
+ "scr_dir1_threshold_20": 0.3574660206350726,
255
+ "scr_metric_threshold_20": 0.4336282532332221,
256
+ "scr_dir2_threshold_20": 0.4336282532332221,
257
+ "scr_dir1_threshold_50": 0.4615384200454991,
258
+ "scr_metric_threshold_50": 0.5309734466595158,
259
+ "scr_dir2_threshold_50": 0.5309734466595158,
260
+ "scr_dir1_threshold_100": 0.4886879292511796,
261
+ "scr_metric_threshold_100": 0.5929203399785477,
262
+ "scr_dir2_threshold_100": 0.5929203399785477,
263
+ "scr_dir1_threshold_500": 0.4298643522445462,
264
+ "scr_metric_threshold_500": 0.5840706330830554,
265
+ "scr_dir2_threshold_500": 0.5840706330830554
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.0643776099411321,
270
+ "scr_metric_threshold_2": 0.0643776099411321,
271
+ "scr_dir2_threshold_2": 0.05714296256601923,
272
+ "scr_dir1_threshold_5": 0.1459226336960092,
273
+ "scr_metric_threshold_5": 0.1459226336960092,
274
+ "scr_dir2_threshold_5": 0.11428564130044823,
275
+ "scr_dir1_threshold_10": 0.1888412961373127,
276
+ "scr_metric_threshold_10": 0.1888412961373127,
277
+ "scr_dir2_threshold_10": 0.14285698066766273,
278
+ "scr_dir1_threshold_20": 0.21459228895098914,
279
+ "scr_metric_threshold_20": 0.21459228895098914,
280
+ "scr_dir2_threshold_20": 0.18571413163427958,
281
+ "scr_dir1_threshold_50": 0.27896989889212126,
282
+ "scr_metric_threshold_50": 0.27896989889212126,
283
+ "scr_dir2_threshold_50": 0.1619046348563358,
284
+ "scr_dir1_threshold_100": 0.31759651601957695,
285
+ "scr_metric_threshold_100": 0.31759651601957695,
286
+ "scr_dir2_threshold_100": 0.20476178582295265,
287
+ "scr_dir1_threshold_500": 0.4377681569019097,
288
+ "scr_metric_threshold_500": 0.4377681569019097,
289
+ "scr_dir2_threshold_500": 0.280952402577645
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/average_l0_38",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_12_width_65k_average_l0_72_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732189061093,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.21782108635654443,
76
+ "scr_metric_threshold_2": 0.05198406220746681,
77
+ "scr_dir2_threshold_2": 0.05274790898422797,
78
+ "scr_dir1_threshold_5": 0.2736353482899159,
79
+ "scr_metric_threshold_5": 0.11364522800610186,
80
+ "scr_dir2_threshold_5": 0.10903661699683478,
81
+ "scr_dir1_threshold_10": 0.2548046623405194,
82
+ "scr_metric_threshold_10": 0.17602022856464683,
83
+ "scr_dir2_threshold_10": 0.16747230742113575,
84
+ "scr_dir1_threshold_20": 0.27007297281939036,
85
+ "scr_metric_threshold_20": 0.27074226421465636,
86
+ "scr_dir2_threshold_20": 0.2624217161604107,
87
+ "scr_dir1_threshold_50": 0.27526224550358364,
88
+ "scr_metric_threshold_50": 0.36654492749632883,
89
+ "scr_dir2_threshold_50": 0.36286108728037586,
90
+ "scr_dir1_threshold_100": 0.3201452494141222,
91
+ "scr_metric_threshold_100": 0.378308324992422,
92
+ "scr_dir2_threshold_100": 0.37462448477646904,
93
+ "scr_dir1_threshold_500": -0.0764709120658077,
94
+ "scr_metric_threshold_500": 0.36028058623533077,
95
+ "scr_dir2_threshold_500": 0.3566912564421671
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4062502910382569,
102
+ "scr_metric_threshold_2": 0.009852089465686957,
103
+ "scr_dir2_threshold_2": 0.009852089465686957,
104
+ "scr_dir1_threshold_5": 0.42187508731147705,
105
+ "scr_metric_threshold_5": 0.024630517283144072,
106
+ "scr_dir2_threshold_5": 0.024630517283144072,
107
+ "scr_dir1_threshold_10": 0.4062502910382569,
108
+ "scr_metric_threshold_10": 0.04187182065755959,
109
+ "scr_dir2_threshold_10": 0.04187182065755959,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.0566502484750167,
112
+ "scr_dir2_threshold_20": 0.0566502484750167,
113
+ "scr_dir1_threshold_50": 0.3593749708961743,
114
+ "scr_metric_threshold_50": 0.08374378812458251,
115
+ "scr_dir2_threshold_50": 0.08374378812458251,
116
+ "scr_dir1_threshold_100": 0.43749988358469727,
117
+ "scr_metric_threshold_100": 0.1133004969500334,
118
+ "scr_dir2_threshold_100": 0.1133004969500334,
119
+ "scr_dir1_threshold_500": -0.32812444702731197,
120
+ "scr_metric_threshold_500": 0.044334989833444666,
121
+ "scr_dir2_threshold_500": 0.044334989833444666
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.19801959163150878,
126
+ "scr_metric_threshold_2": 0.08547017110268419,
127
+ "scr_dir2_threshold_2": 0.08547017110268419,
128
+ "scr_dir1_threshold_5": 0.2376236279867973,
129
+ "scr_metric_threshold_5": 0.15669517217675646,
130
+ "scr_dir2_threshold_5": 0.15669517217675646,
131
+ "scr_dir1_threshold_10": 0.09900979581575439,
132
+ "scr_metric_threshold_10": 0.24216534327944067,
133
+ "scr_dir2_threshold_10": 0.24216534327944067,
134
+ "scr_dir1_threshold_20": 0.10891124751327678,
135
+ "scr_metric_threshold_20": 0.2792024117634772,
136
+ "scr_dir2_threshold_20": 0.2792024117634772,
137
+ "scr_dir1_threshold_50": 0.1386138321710429,
138
+ "scr_metric_threshold_50": 0.41595448175121497,
139
+ "scr_dir2_threshold_50": 0.41595448175121497,
140
+ "scr_dir1_threshold_100": 0.14851469372363157,
141
+ "scr_metric_threshold_100": 0.13390327372133182,
142
+ "scr_dir2_threshold_100": 0.13390327372133182,
143
+ "scr_dir1_threshold_500": -0.59405936503946,
144
+ "scr_metric_threshold_500": 0.14814827393614627,
145
+ "scr_dir2_threshold_500": 0.14814827393614627
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5714285714285714,
150
+ "scr_metric_threshold_2": 0.037974794330044616,
151
+ "scr_dir2_threshold_2": 0.037974794330044616,
152
+ "scr_dir1_threshold_5": 0.5714285714285714,
153
+ "scr_metric_threshold_5": 0.08354442680784264,
154
+ "scr_dir2_threshold_5": 0.08354442680784264,
155
+ "scr_dir1_threshold_10": 0.380952065584007,
156
+ "scr_metric_threshold_10": 0.1265822956719035,
157
+ "scr_dir2_threshold_10": 0.1265822956719035,
158
+ "scr_dir1_threshold_20": 0.36507925995657375,
159
+ "scr_metric_threshold_20": 0.20759495886600893,
160
+ "scr_dir2_threshold_20": 0.20759495886600893,
161
+ "scr_dir1_threshold_50": 0.2857142857142857,
162
+ "scr_metric_threshold_50": 0.32151904006050397,
163
+ "scr_dir2_threshold_50": 0.32151904006050397,
164
+ "scr_dir1_threshold_100": 0.26984148008685244,
165
+ "scr_metric_threshold_100": 0.3392406308675674,
166
+ "scr_dir2_threshold_100": 0.3392406308675674,
167
+ "scr_dir1_threshold_500": -1.3492055082240189,
168
+ "scr_metric_threshold_500": 0.030379805284471813,
169
+ "scr_dir2_threshold_500": 0.030379805284471813
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.2362202950571313,
174
+ "scr_metric_threshold_2": 0.0591717176312832,
175
+ "scr_dir2_threshold_2": 0.0591717176312832,
176
+ "scr_dir1_threshold_5": 0.24409421102676765,
177
+ "scr_metric_threshold_5": 0.1301776024437199,
178
+ "scr_dir2_threshold_5": 0.1301776024437199,
179
+ "scr_dir1_threshold_10": 0.19685024588094274,
180
+ "scr_metric_threshold_10": 0.23668651783492653,
181
+ "scr_dir2_threshold_10": 0.23668651783492653,
182
+ "scr_dir1_threshold_20": 0.14960628073511784,
183
+ "scr_metric_threshold_20": 0.3254438297641966,
184
+ "scr_dir2_threshold_20": 0.3254438297641966,
185
+ "scr_dir1_threshold_50": 0.17322802864402692,
186
+ "scr_metric_threshold_50": 0.34023671508574166,
187
+ "scr_dir2_threshold_50": 0.34023671508574166,
188
+ "scr_dir1_threshold_100": 0.5511811577946465,
189
+ "scr_metric_threshold_100": 0.42899420336011485,
190
+ "scr_dir2_threshold_100": 0.42899420336011485,
191
+ "scr_dir1_threshold_500": 0.29133864550059935,
192
+ "scr_metric_threshold_500": 0.17751480020364332,
193
+ "scr_dir2_threshold_500": 0.17751480020364332
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.05464474822904205,
198
+ "scr_metric_threshold_2": 0.05078128637978211,
199
+ "scr_dir2_threshold_2": 0.05078128637978211,
200
+ "scr_dir1_threshold_5": 0.07650271266233713,
201
+ "scr_metric_threshold_5": 0.12890643189891055,
202
+ "scr_dir2_threshold_5": 0.12890643189891055,
203
+ "scr_dir1_threshold_10": 0.06557389329988525,
204
+ "scr_metric_threshold_10": 0.21093754365573852,
205
+ "scr_dir2_threshold_10": 0.21093754365573852,
206
+ "scr_dir1_threshold_20": 0.08196712234356307,
207
+ "scr_metric_threshold_20": 0.4140624563442615,
208
+ "scr_dir2_threshold_20": 0.4140624563442615,
209
+ "scr_dir1_threshold_50": 0.04918033854781611,
210
+ "scr_metric_threshold_50": 0.5742187136202179,
211
+ "scr_dir2_threshold_50": 0.5742187136202179,
212
+ "scr_dir1_threshold_100": 0.01092881936245188,
213
+ "scr_metric_threshold_100": 0.6757812863797821,
214
+ "scr_dir2_threshold_100": 0.6757812863797821,
215
+ "scr_dir1_threshold_500": -0.00546440968122594,
216
+ "scr_metric_threshold_500": 0.8007812863797821,
217
+ "scr_dir2_threshold_500": 0.8007812863797821
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.10256400851337757,
222
+ "scr_metric_threshold_2": 0.07258066454365199,
223
+ "scr_dir2_threshold_2": 0.07258066454365199,
224
+ "scr_dir1_threshold_5": 0.21538460127700665,
225
+ "scr_metric_threshold_5": 0.08870972781349516,
226
+ "scr_dir2_threshold_5": 0.08870972781349516,
227
+ "scr_dir1_threshold_10": 0.27179474482639304,
228
+ "scr_metric_threshold_10": 0.10483879108333834,
229
+ "scr_dir2_threshold_10": 0.10483879108333834,
230
+ "scr_dir1_threshold_20": 0.28717946836934216,
231
+ "scr_metric_threshold_20": 0.20967734182539205,
232
+ "scr_dir2_threshold_20": 0.20967734182539205,
233
+ "scr_dir1_threshold_50": 0.37948719829732447,
234
+ "scr_metric_threshold_50": 0.2943548639067476,
235
+ "scr_dir2_threshold_50": 0.2943548639067476,
236
+ "scr_dir1_threshold_100": 0.35384589050412385,
237
+ "scr_metric_threshold_100": 0.3709677341825392,
238
+ "scr_dir2_threshold_100": 0.3709677341825392,
239
+ "scr_dir1_threshold_500": 0.4923074853960973,
240
+ "scr_metric_threshold_500": 0.5927419335456348,
241
+ "scr_dir2_threshold_500": 0.5927419335456348
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.11764715401326675,
246
+ "scr_metric_threshold_2": 0.04424774326540074,
247
+ "scr_dir2_threshold_2": 0.04424774326540074,
248
+ "scr_dir1_threshold_5": 0.28054313043032664,
249
+ "scr_metric_threshold_5": 0.15486710142890261,
250
+ "scr_dir2_threshold_5": 0.15486710142890261,
251
+ "scr_dir1_threshold_10": 0.416289597641706,
252
+ "scr_metric_threshold_10": 0.24336285169705757,
253
+ "scr_dir2_threshold_10": 0.24336285169705757,
254
+ "scr_dir1_threshold_20": 0.4841628312473957,
255
+ "scr_metric_threshold_20": 0.4115043816005218,
256
+ "scr_dir2_threshold_20": 0.4115043816005218,
257
+ "scr_dir1_threshold_50": 0.5203619970521325,
258
+ "scr_metric_threshold_50": 0.6061945047157558,
259
+ "scr_dir2_threshold_50": 0.6061945047157558,
260
+ "scr_dir1_threshold_100": 0.49321275755070776,
261
+ "scr_metric_threshold_100": 0.668141661772141,
262
+ "scr_dir2_threshold_100": 0.668141661772141,
263
+ "scr_dir1_threshold_500": 0.4479639351469146,
264
+ "scr_metric_threshold_500": 0.6548672332975793,
265
+ "scr_dir2_threshold_500": 0.6548672332975793
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.05579403094120067,
270
+ "scr_metric_threshold_2": 0.05579403094120067,
271
+ "scr_dir2_threshold_2": 0.06190480515528994,
272
+ "scr_dir1_threshold_5": 0.1416308441960435,
273
+ "scr_metric_threshold_5": 0.1416308441960435,
274
+ "scr_dir2_threshold_5": 0.10476195612190681,
275
+ "scr_dir1_threshold_10": 0.20171666463720986,
276
+ "scr_metric_threshold_10": 0.20171666463720986,
277
+ "scr_dir2_threshold_10": 0.1333332954891213,
278
+ "scr_dir1_threshold_20": 0.2618024850783763,
279
+ "scr_metric_threshold_20": 0.2618024850783763,
280
+ "scr_dir2_threshold_20": 0.19523810064441124,
281
+ "scr_dir1_threshold_50": 0.29613731270586624,
282
+ "scr_metric_threshold_50": 0.29613731270586624,
283
+ "scr_dir2_threshold_50": 0.2666665909782426,
284
+ "scr_dir1_threshold_100": 0.29613731270586624,
285
+ "scr_metric_threshold_100": 0.29613731270586624,
286
+ "scr_dir2_threshold_100": 0.2666665909782426,
287
+ "scr_dir1_threshold_500": 0.433476367401944,
288
+ "scr_metric_threshold_500": 0.433476367401944,
289
+ "scr_dir2_threshold_500": 0.4047617290566346
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_12/width_65k/average_l0_72",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_137_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732148482289,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.30066379394750936,
76
+ "scr_metric_threshold_2": 0.19583047719854235,
77
+ "scr_dir2_threshold_2": 0.19583047719854235,
78
+ "scr_dir1_threshold_5": 0.35182578126247654,
79
+ "scr_metric_threshold_5": 0.2567877208257863,
80
+ "scr_dir2_threshold_5": 0.2567877208257863,
81
+ "scr_dir1_threshold_10": 0.3336105026339313,
82
+ "scr_metric_threshold_10": 0.32601031558378435,
83
+ "scr_dir2_threshold_10": 0.32601031558378435,
84
+ "scr_dir1_threshold_20": 0.32105566933414575,
85
+ "scr_metric_threshold_20": 0.3990629809853189,
86
+ "scr_dir2_threshold_20": 0.3990629809853189,
87
+ "scr_dir1_threshold_50": 0.29643659752886214,
88
+ "scr_metric_threshold_50": 0.4738797043706798,
89
+ "scr_dir2_threshold_50": 0.4738797043706798,
90
+ "scr_dir1_threshold_100": 0.0478244269310811,
91
+ "scr_metric_threshold_100": 0.5214530880132012,
92
+ "scr_dir2_threshold_100": 0.5214530880132012,
93
+ "scr_dir1_threshold_500": -0.8408597942634157,
94
+ "scr_metric_threshold_500": 0.47804202845064436,
95
+ "scr_dir2_threshold_500": 0.47804202845064436
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2941181626697965,
102
+ "scr_metric_threshold_2": 0.07594943776226984,
103
+ "scr_dir2_threshold_2": 0.07594943776226984,
104
+ "scr_dir1_threshold_5": 0.4264711167365414,
105
+ "scr_metric_threshold_5": 0.08860765223967824,
106
+ "scr_dir2_threshold_5": 0.08860765223967824,
107
+ "scr_dir1_threshold_10": 0.4411768057353677,
108
+ "scr_metric_threshold_10": 0.1265822956719035,
109
+ "scr_dir2_threshold_10": 0.1265822956719035,
110
+ "scr_dir1_threshold_20": 0.38235317320140844,
111
+ "scr_metric_threshold_20": 0.13417728471747628,
112
+ "scr_dir2_threshold_20": 0.13417728471747628,
113
+ "scr_dir1_threshold_50": 0.38235317320140844,
114
+ "scr_metric_threshold_50": 0.2050633461500911,
115
+ "scr_dir2_threshold_50": 0.2050633461500911,
116
+ "scr_dir1_threshold_100": 0.3088238516686228,
117
+ "scr_metric_threshold_100": 0.29873422382160497,
118
+ "scr_dir2_threshold_100": 0.29873422382160497,
119
+ "scr_dir1_threshold_500": -0.499999561730673,
120
+ "scr_metric_threshold_500": 0.4329113576412619,
121
+ "scr_dir2_threshold_500": 0.4329113576412619
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3693694419340687,
126
+ "scr_metric_threshold_2": 0.2470587740307497,
127
+ "scr_dir2_threshold_2": 0.2470587740307497,
128
+ "scr_dir1_threshold_5": 0.3873872277450489,
129
+ "scr_metric_threshold_5": 0.3323528370230358,
130
+ "scr_dir2_threshold_5": 0.3323528370230358,
131
+ "scr_dir1_threshold_10": 0.36036054902857856,
132
+ "scr_metric_threshold_10": 0.4117646233845828,
133
+ "scr_dir2_threshold_10": 0.4117646233845828,
134
+ "scr_dir1_threshold_20": 0.3873872277450489,
135
+ "scr_metric_threshold_20": 0.5852940629922861,
136
+ "scr_dir2_threshold_20": 0.5852940629922861,
137
+ "scr_dir1_threshold_50": 0.41441444344029416,
138
+ "scr_metric_threshold_50": 0.6970587214384212,
139
+ "scr_dir2_threshold_50": 0.6970587214384212,
140
+ "scr_dir1_threshold_100": -0.03603610860073534,
141
+ "scr_metric_threshold_100": 0.7058823116922914,
142
+ "scr_dir2_threshold_100": 0.7058823116922914,
143
+ "scr_dir1_threshold_500": -0.4324322292512744,
144
+ "scr_metric_threshold_500": 0.632352977269245,
145
+ "scr_dir2_threshold_500": 0.632352977269245
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4814806229786858,
150
+ "scr_metric_threshold_2": 0.08088233575413901,
151
+ "scr_dir2_threshold_2": 0.08088233575413901,
152
+ "scr_dir1_threshold_5": 0.4814806229786858,
153
+ "scr_metric_threshold_5": 0.14215678826793574,
154
+ "scr_dir2_threshold_5": 0.14215678826793574,
155
+ "scr_dir1_threshold_10": 0.4259258032826694,
156
+ "scr_metric_threshold_10": 0.23774510949724065,
157
+ "scr_dir2_threshold_10": 0.23774510949724065,
158
+ "scr_dir1_threshold_20": 0.38888815302934976,
159
+ "scr_metric_threshold_20": 0.3357842335193154,
160
+ "scr_dir2_threshold_20": 0.3357842335193154,
161
+ "scr_dir1_threshold_50": 0.3518516065653388,
162
+ "scr_metric_threshold_50": 0.40441167877069506,
163
+ "scr_dir2_threshold_50": 0.40441167877069506,
164
+ "scr_dir1_threshold_100": -0.6111107431813415,
165
+ "scr_metric_threshold_100": 0.5098039416201712,
166
+ "scr_dir2_threshold_100": 0.5098039416201712,
167
+ "scr_dir1_threshold_500": -5.648146185856044,
168
+ "scr_metric_threshold_500": 0.014705839385347542,
169
+ "scr_dir2_threshold_500": 0.014705839385347542
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3828126309672156,
174
+ "scr_metric_threshold_2": 0.18208950018931144,
175
+ "scr_dir2_threshold_2": 0.18208950018931144,
176
+ "scr_dir1_threshold_5": 0.3515625727595642,
177
+ "scr_metric_threshold_5": 0.3074626074306878,
178
+ "scr_dir2_threshold_5": 0.3074626074306878,
179
+ "scr_dir1_threshold_10": 0.21874994179234863,
180
+ "scr_metric_threshold_10": 0.41492532161595114,
181
+ "scr_dir2_threshold_10": 0.41492532161595114,
182
+ "scr_dir1_threshold_20": 0.07812491268852294,
183
+ "scr_metric_threshold_20": 0.5104477144557082,
184
+ "scr_dir2_threshold_20": 0.5104477144557082,
185
+ "scr_dir1_threshold_50": 0.007812398136610098,
186
+ "scr_metric_threshold_50": 0.5880597142393523,
187
+ "scr_dir2_threshold_50": 0.5880597142393523,
188
+ "scr_dir1_threshold_100": 0.07812491268852294,
189
+ "scr_metric_threshold_100": 0.5164179640906076,
190
+ "scr_dir2_threshold_100": 0.5164179640906076,
191
+ "scr_dir1_threshold_500": -0.03125005820765137,
192
+ "scr_metric_threshold_500": 0.40895524990534426,
193
+ "scr_dir2_threshold_500": 0.40895524990534426
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.047619132092729194,
198
+ "scr_metric_threshold_2": 0.4981548955203265,
199
+ "scr_dir2_threshold_2": 0.4981548955203265,
200
+ "scr_dir1_threshold_5": 0.07142869813909379,
201
+ "scr_metric_threshold_5": 0.5498154015747062,
202
+ "scr_dir2_threshold_5": 0.5498154015747062,
203
+ "scr_dir1_threshold_10": 0.10714286981390937,
204
+ "scr_metric_threshold_10": 0.5830257426056157,
205
+ "scr_dir2_threshold_10": 0.5830257426056157,
206
+ "scr_dir1_threshold_20": 0.0178572632321391,
207
+ "scr_metric_threshold_20": 0.6273062706279338,
208
+ "scr_dir2_threshold_20": 0.6273062706279338,
209
+ "scr_dir1_threshold_50": -0.03571417167481559,
210
+ "scr_metric_threshold_50": 0.645756435651404,
211
+ "scr_dir2_threshold_50": 0.645756435651404,
212
+ "scr_dir1_threshold_100": -0.20238077920990516,
213
+ "scr_metric_threshold_100": 0.7306272827366931,
214
+ "scr_dir2_threshold_100": 0.7306272827366931,
215
+ "scr_dir1_threshold_500": -0.33333321507017916,
216
+ "scr_metric_threshold_500": 0.7933579757824814,
217
+ "scr_dir2_threshold_500": 0.7933579757824814
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.21052638917161398,
222
+ "scr_metric_threshold_2": 0.04511286787937455,
223
+ "scr_dir2_threshold_2": 0.04511286787937455,
224
+ "scr_dir1_threshold_5": 0.30994170596157977,
225
+ "scr_metric_threshold_5": 0.0902257357587491,
226
+ "scr_dir2_threshold_5": 0.0902257357587491,
227
+ "scr_dir1_threshold_10": 0.36842118105032445,
228
+ "scr_metric_threshold_10": 0.157894701461434,
229
+ "scr_dir2_threshold_10": 0.157894701461434,
230
+ "scr_dir1_threshold_20": 0.36842118105032445,
231
+ "scr_metric_threshold_20": 0.21804519196726674,
232
+ "scr_dir2_threshold_20": 0.21804519196726674,
233
+ "scr_dir1_threshold_50": 0.4444446380917591,
234
+ "scr_metric_threshold_50": 0.32706767591210784,
235
+ "scr_dir2_threshold_50": 0.32706767591210784,
236
+ "scr_dir1_threshold_100": 0.41520490054738673,
237
+ "scr_metric_threshold_100": 0.37593989342870077,
238
+ "scr_dir2_threshold_100": 0.37593989342870077,
239
+ "scr_dir1_threshold_500": 0.2923977240088898,
240
+ "scr_metric_threshold_500": 0.5977444350331859,
241
+ "scr_dir2_threshold_500": 0.5977444350331859
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.3982301118351579,
246
+ "scr_metric_threshold_2": 0.32218847242533294,
247
+ "scr_dir2_threshold_2": 0.32218847242533294,
248
+ "scr_dir1_threshold_5": 0.46902669337797875,
249
+ "scr_metric_threshold_5": 0.43768995583821163,
250
+ "scr_dir2_threshold_5": 0.43768995583821163,
251
+ "scr_dir1_threshold_10": 0.47787613420218944,
252
+ "scr_metric_threshold_10": 0.5471124900900483,
253
+ "scr_dir2_threshold_10": 0.5471124900900483,
254
+ "scr_dir1_threshold_20": 0.5752210382176424,
255
+ "scr_metric_threshold_20": 0.6109421809575296,
256
+ "scr_dir2_threshold_20": 0.6109421809575296,
257
+ "scr_dir1_threshold_50": 0.4601767250792003,
258
+ "scr_metric_threshold_50": 0.6747720529941461,
259
+ "scr_dir2_threshold_50": 0.6747720529941461,
260
+ "scr_dir1_threshold_100": -0.07964602236703157,
261
+ "scr_metric_threshold_100": 0.6747720529941461,
262
+ "scr_dir2_threshold_100": 0.6747720529941461,
263
+ "scr_dir1_threshold_500": -0.699114792180295,
264
+ "scr_metric_threshold_500": 0.4650455894011708,
265
+ "scr_dir2_threshold_500": 0.4650455894011708
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.22115385993080702,
270
+ "scr_metric_threshold_2": 0.11520753402683473,
271
+ "scr_dir2_threshold_2": 0.11520753402683473,
272
+ "scr_dir1_threshold_5": 0.3173076124013192,
273
+ "scr_metric_threshold_5": 0.10599078847328613,
274
+ "scr_dir2_threshold_5": 0.10599078847328613,
275
+ "scr_dir1_threshold_10": 0.26923073616606313,
276
+ "scr_metric_threshold_10": 0.12903224034349853,
277
+ "scr_dir2_threshold_10": 0.12903224034349853,
278
+ "scr_dir1_threshold_20": 0.37019240550872995,
279
+ "scr_metric_threshold_20": 0.1705069086450354,
280
+ "scr_dir2_threshold_20": 0.1705069086450354,
281
+ "scr_dir1_threshold_50": 0.3461539673911019,
282
+ "scr_metric_threshold_50": 0.2488480098092212,
283
+ "scr_dir2_threshold_50": 0.2488480098092212,
284
+ "scr_dir1_threshold_100": 0.5096154039031299,
285
+ "scr_metric_threshold_100": 0.35944703372139525,
286
+ "scr_dir2_threshold_100": 0.35944703372139525,
287
+ "scr_dir1_threshold_500": 0.6249999641799017,
288
+ "scr_metric_threshold_500": 0.47926280318711795,
289
+ "scr_dir2_threshold_500": 0.47926280318711795
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/average_l0_137",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_23_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732148900796,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.22456900767739602,
76
+ "scr_metric_threshold_2": 0.17266994948832587,
77
+ "scr_dir2_threshold_2": 0.17266994948832587,
78
+ "scr_dir1_threshold_5": 0.28377795644436826,
79
+ "scr_metric_threshold_5": 0.26440728499021576,
80
+ "scr_dir2_threshold_5": 0.26440728499021576,
81
+ "scr_dir1_threshold_10": 0.0930953555486498,
82
+ "scr_metric_threshold_10": 0.3249289103629941,
83
+ "scr_dir2_threshold_10": 0.3249289103629941,
84
+ "scr_dir1_threshold_20": 0.09156788783390263,
85
+ "scr_metric_threshold_20": 0.3700145905363591,
86
+ "scr_dir2_threshold_20": 0.3700145905363591,
87
+ "scr_dir1_threshold_50": -0.041415374050253084,
88
+ "scr_metric_threshold_50": 0.4407451443019093,
89
+ "scr_dir2_threshold_50": 0.4407451443019093,
90
+ "scr_dir1_threshold_100": -0.20912385940006403,
91
+ "scr_metric_threshold_100": 0.42420376899881146,
92
+ "scr_dir2_threshold_100": 0.42420376899881146,
93
+ "scr_dir1_threshold_500": -0.7414259613958035,
94
+ "scr_metric_threshold_500": 0.4164129954872385,
95
+ "scr_dir2_threshold_500": 0.4164129954872385
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2352945301358372,
102
+ "scr_metric_threshold_2": 0.04810139609153522,
103
+ "scr_dir2_threshold_2": 0.04810139609153522,
104
+ "scr_dir1_threshold_5": 0.19117658660070422,
105
+ "scr_metric_threshold_5": 0.10379747943300446,
106
+ "scr_dir2_threshold_5": 0.10379747943300446,
107
+ "scr_dir1_threshold_10": 0.19117658660070422,
108
+ "scr_metric_threshold_10": 0.14936711191080249,
109
+ "scr_dir2_threshold_10": 0.14936711191080249,
110
+ "scr_dir1_threshold_20": 0.20588315213818456,
111
+ "scr_metric_threshold_20": 0.1443038864789669,
112
+ "scr_dir2_threshold_20": 0.1443038864789669,
113
+ "scr_dir1_threshold_50": 0.044117943535132986,
114
+ "scr_metric_threshold_50": 0.1721519281497015,
115
+ "scr_dir2_threshold_50": 0.1721519281497015,
116
+ "scr_dir1_threshold_100": -0.02941137799765263,
117
+ "scr_metric_threshold_100": 0.23037977510490792,
118
+ "scr_dir2_threshold_100": 0.23037977510490792,
119
+ "scr_dir1_threshold_500": -1.2058813990608765,
120
+ "scr_metric_threshold_500": 0.28354439662827874,
121
+ "scr_dir2_threshold_500": 0.28354439662827874
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.09909889591794105,
126
+ "scr_metric_threshold_2": 0.35588229416151523,
127
+ "scr_dir2_threshold_2": 0.35588229416151523,
128
+ "scr_dir1_threshold_5": 0.17117111311941172,
129
+ "scr_metric_threshold_5": 0.4382352188384318,
130
+ "scr_dir2_threshold_5": 0.4382352188384318,
131
+ "scr_dir1_threshold_10": 0.14414389742416653,
132
+ "scr_metric_threshold_10": 0.5147058668846093,
133
+ "scr_dir2_threshold_10": 0.5147058668846093,
134
+ "scr_dir1_threshold_20": 0.15315332730843148,
135
+ "scr_metric_threshold_20": 0.6029410681922649,
136
+ "scr_dir2_threshold_20": 0.6029410681922649,
137
+ "scr_dir1_threshold_50": -0.5855855565597059,
138
+ "scr_metric_threshold_50": 0.6735294396077033,
139
+ "scr_dir2_threshold_50": 0.6735294396077033,
140
+ "scr_dir1_threshold_100": -0.4684683378520097,
141
+ "scr_metric_threshold_100": 0.6441175305307232,
142
+ "scr_dir2_threshold_100": 0.6441175305307232,
143
+ "scr_dir1_threshold_500": -1.0810811101069608,
144
+ "scr_metric_threshold_500": 0.6264705253307444,
145
+ "scr_dir2_threshold_500": 0.6264705253307444
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.37036987979734426,
150
+ "scr_metric_threshold_2": 0.0882351824019036,
151
+ "scr_dir2_threshold_2": 0.0882351824019036,
152
+ "scr_dir1_threshold_5": 0.4259258032826694,
153
+ "scr_metric_threshold_5": 0.16911751815604262,
154
+ "scr_dir2_threshold_5": 0.16911751815604262,
155
+ "scr_dir1_threshold_10": -0.29629678686932237,
156
+ "scr_metric_threshold_10": 0.23774510949724065,
157
+ "scr_dir2_threshold_10": 0.23774510949724065,
158
+ "scr_dir1_threshold_20": -0.22222259015199178,
159
+ "scr_metric_threshold_20": 0.3382351824019036,
160
+ "scr_dir2_threshold_20": 0.3382351824019036,
161
+ "scr_dir1_threshold_50": -0.5,
162
+ "scr_metric_threshold_50": 0.40686277374310165,
163
+ "scr_dir2_threshold_50": 0.40686277374310165,
164
+ "scr_dir1_threshold_100": -1.537036546464011,
165
+ "scr_metric_threshold_100": 0.15196072988810688,
166
+ "scr_dir2_threshold_100": 0.15196072988810688,
167
+ "scr_dir1_threshold_500": -2.8518516065653388,
168
+ "scr_metric_threshold_500": 0.07598029189914426,
169
+ "scr_dir2_threshold_500": 0.07598029189914426
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.16406268917486697,
174
+ "scr_metric_threshold_2": 0.31044782121028375,
175
+ "scr_dir2_threshold_2": 0.31044782121028375,
176
+ "scr_dir1_threshold_5": 0.20312514551912844,
177
+ "scr_metric_threshold_5": 0.3999999644151415,
178
+ "scr_dir2_threshold_5": 0.3999999644151415,
179
+ "scr_dir1_threshold_10": 0.11718736903278441,
180
+ "scr_metric_threshold_10": 0.4477612497971663,
181
+ "scr_dir2_threshold_10": 0.4477612497971663,
182
+ "scr_dir1_threshold_20": 0.023437660071041276,
183
+ "scr_metric_threshold_20": 0.42985067881676076,
184
+ "scr_dir2_threshold_20": 0.42985067881676076,
185
+ "scr_dir1_threshold_50": -0.21093754365573852,
186
+ "scr_metric_threshold_50": 0.5044776427451014,
187
+ "scr_dir2_threshold_50": 0.5044776427451014,
188
+ "scr_dir1_threshold_100": -0.22656233992895872,
189
+ "scr_metric_threshold_100": 0.5074626786004048,
190
+ "scr_dir2_threshold_100": 0.5074626786004048,
191
+ "scr_dir1_threshold_500": -0.8203125145519129,
192
+ "scr_metric_threshold_500": 0.4208955712508505,
193
+ "scr_dir2_threshold_500": 0.4208955712508505
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.06547639532486829,
198
+ "scr_metric_threshold_2": 0.0738006600938806,
199
+ "scr_dir2_threshold_2": 0.0738006600938806,
200
+ "scr_dir1_threshold_5": 0.10119056699968389,
201
+ "scr_metric_threshold_5": 0.22140220022495802,
202
+ "scr_dir2_threshold_5": 0.22140220022495802,
203
+ "scr_dir1_threshold_10": 0.11309517262813487,
204
+ "scr_metric_threshold_10": 0.3173432343016558,
205
+ "scr_dir2_threshold_10": 0.3173432343016558,
206
+ "scr_dir1_threshold_20": 0.08333330376754479,
207
+ "scr_metric_threshold_20": 0.5018451044796735,
208
+ "scr_dir2_threshold_20": 0.5018451044796735,
209
+ "scr_dir1_threshold_50": 0.059523737721180185,
210
+ "scr_metric_threshold_50": 0.6752767876662826,
211
+ "scr_dir2_threshold_50": 0.6752767876662826,
212
+ "scr_dir1_threshold_100": 0.053571434906954686,
213
+ "scr_metric_threshold_100": 0.6789667766823134,
214
+ "scr_dir2_threshold_100": 0.6789667766823134,
215
+ "scr_dir1_threshold_500": -0.16071430472086407,
216
+ "scr_metric_threshold_500": 0.5756457645735541,
217
+ "scr_dir2_threshold_500": 0.5756457645735541
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.19883063357993158,
222
+ "scr_metric_threshold_2": 0.026315895615697978,
223
+ "scr_dir2_threshold_2": 0.026315895615697978,
224
+ "scr_dir1_threshold_5": 0.3391814435059521,
225
+ "scr_metric_threshold_5": 0.06390984014305114,
226
+ "scr_dir2_threshold_5": 0.06390984014305114,
227
+ "scr_dir1_threshold_10": 0.3391814435059521,
228
+ "scr_metric_threshold_10": 0.13909772919775742,
229
+ "scr_dir2_threshold_10": 0.13909772919775742,
230
+ "scr_dir1_threshold_20": 0.35672542545864205,
231
+ "scr_metric_threshold_20": 0.16541362481345542,
232
+ "scr_dir2_threshold_20": 0.16541362481345542,
233
+ "scr_dir1_threshold_50": 0.403508796390538,
234
+ "scr_metric_threshold_50": 0.2556391364946199,
235
+ "scr_dir2_threshold_50": 0.2556391364946199,
236
+ "scr_dir1_threshold_100": 0.4502925158876003,
237
+ "scr_metric_threshold_100": 0.31203005328564964,
238
+ "scr_dir2_threshold_100": 0.31203005328564964,
239
+ "scr_dir1_threshold_500": 0.47953225343197264,
240
+ "scr_metric_threshold_500": 0.46240605547264685,
241
+ "scr_dir2_threshold_500": 0.46240605547264685
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.3893806710109472,
246
+ "scr_metric_threshold_2": 0.3495441059882921,
247
+ "scr_dir2_threshold_2": 0.3495441059882921,
248
+ "scr_dir1_threshold_5": 0.47787613420218944,
249
+ "scr_metric_threshold_5": 0.5805470728140495,
250
+ "scr_dir2_threshold_5": 0.5805470728140495,
251
+ "scr_dir1_threshold_10": -0.18584036720669514,
252
+ "scr_metric_threshold_10": 0.6413372891010097,
253
+ "scr_dir2_threshold_10": 0.6413372891010097,
254
+ "scr_dir1_threshold_20": -0.1946903355054736,
255
+ "scr_metric_threshold_20": 0.6899696070658862,
256
+ "scr_dir2_threshold_20": 0.6899696070658862,
257
+ "scr_dir1_threshold_50": -0.07079658154282091,
258
+ "scr_metric_threshold_50": 0.6352583399399678,
259
+ "scr_dir2_threshold_50": 0.6352583399399678,
260
+ "scr_dir1_threshold_100": -0.47787613420218944,
261
+ "scr_metric_threshold_100": 0.6382978145204887,
262
+ "scr_dir2_threshold_100": 0.6382978145204887,
263
+ "scr_dir1_threshold_500": -0.8584068369143582,
264
+ "scr_metric_threshold_500": 0.5683889933228303,
265
+ "scr_dir2_threshold_500": 0.5683889933228303
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.27403836647743146,
270
+ "scr_metric_threshold_2": 0.12903224034349853,
271
+ "scr_dir2_threshold_2": 0.12903224034349853,
272
+ "scr_dir1_threshold_5": 0.360576858325207,
273
+ "scr_metric_threshold_5": 0.13824898589704712,
274
+ "scr_dir2_threshold_5": 0.13824898589704712,
275
+ "scr_dir1_threshold_10": 0.32211552927347387,
276
+ "scr_metric_threshold_10": 0.15207369221371095,
277
+ "scr_dir2_threshold_10": 0.15207369221371095,
278
+ "scr_dir1_threshold_20": 0.3269231595848422,
279
+ "scr_metric_threshold_20": 0.08755757204196167,
280
+ "scr_dir2_threshold_20": 0.08755757204196167,
281
+ "scr_dir1_threshold_50": 0.5288462117093895,
282
+ "scr_metric_threshold_50": 0.20276510606879639,
283
+ "scr_dir2_threshold_50": 0.20276510606879639,
284
+ "scr_dir1_threshold_100": 0.5624999104497543,
285
+ "scr_metric_threshold_100": 0.23041479337789672,
286
+ "scr_dir2_threshold_100": 0.23041479337789672,
287
+ "scr_dir1_threshold_500": 0.5673078273219089,
288
+ "scr_metric_threshold_500": 0.3179723654198584,
289
+ "scr_dir2_threshold_500": 0.3179723654198584
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/average_l0_23",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_279_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732149324991,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.2943828544947634,
76
+ "scr_metric_threshold_2": 0.1812626526154949,
77
+ "scr_dir2_threshold_2": 0.1812626526154949,
78
+ "scr_dir1_threshold_5": 0.39031566710940985,
79
+ "scr_metric_threshold_5": 0.2428163089924627,
80
+ "scr_dir2_threshold_5": 0.2428163089924627,
81
+ "scr_dir1_threshold_10": 0.3346124691006466,
82
+ "scr_metric_threshold_10": 0.32382964526120134,
83
+ "scr_dir2_threshold_10": 0.32382964526120134,
84
+ "scr_dir1_threshold_20": 0.19914057358115475,
85
+ "scr_metric_threshold_20": 0.4150339399800802,
86
+ "scr_dir2_threshold_20": 0.4150339399800802,
87
+ "scr_dir1_threshold_50": 0.10250205563375178,
88
+ "scr_metric_threshold_50": 0.431571934990497,
89
+ "scr_dir2_threshold_50": 0.431571934990497,
90
+ "scr_dir1_threshold_100": -0.06953014521362216,
91
+ "scr_metric_threshold_100": 0.43144517057732357,
92
+ "scr_dir2_threshold_100": 0.43144517057732357,
93
+ "scr_dir1_threshold_500": -0.34919923219541515,
94
+ "scr_metric_threshold_500": 0.4015066560391146,
95
+ "scr_dir2_threshold_500": 0.4015066560391146
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2352945301358372,
102
+ "scr_metric_threshold_2": 0.04303801976188022,
103
+ "scr_dir2_threshold_2": 0.04303801976188022,
104
+ "scr_dir1_threshold_5": 0.4411768057353677,
105
+ "scr_metric_threshold_5": 0.09620264128525105,
106
+ "scr_dir2_threshold_5": 0.09620264128525105,
107
+ "scr_dir1_threshold_10": 0.4852947492705007,
108
+ "scr_metric_threshold_10": 0.1544304882404575,
109
+ "scr_dir2_threshold_10": 0.1544304882404575,
110
+ "scr_dir1_threshold_20": 0.2647059081334898,
111
+ "scr_metric_threshold_20": 0.2860760093441966,
112
+ "scr_dir2_threshold_20": 0.2860760093441966,
113
+ "scr_dir1_threshold_50": 0.27941247367097016,
114
+ "scr_metric_threshold_50": 0.27341779486678813,
115
+ "scr_dir2_threshold_50": 0.27341779486678813,
116
+ "scr_dir1_threshold_100": -0.9264698019285603,
117
+ "scr_metric_threshold_100": 0.3772152742997926,
118
+ "scr_dir2_threshold_100": 0.3772152742997926,
119
+ "scr_dir1_threshold_500": -1.6617638937950705,
120
+ "scr_metric_threshold_500": 0.3468354690153208,
121
+ "scr_dir2_threshold_500": 0.3468354690153208
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3873872277450489,
126
+ "scr_metric_threshold_2": 0.21176476363079202,
127
+ "scr_dir2_threshold_2": 0.21176476363079202,
128
+ "scr_dir1_threshold_5": 0.4234233363457843,
129
+ "scr_metric_threshold_5": 0.27058823116922914,
130
+ "scr_dir2_threshold_5": 0.27058823116922914,
131
+ "scr_dir1_threshold_10": 0.47747723075749987,
132
+ "scr_metric_threshold_10": 0.3852940279307338,
133
+ "scr_dir2_threshold_10": 0.3852940279307338,
134
+ "scr_dir1_threshold_20": 0.5405405550534804,
135
+ "scr_metric_threshold_20": 0.5470587389691973,
136
+ "scr_dir2_threshold_20": 0.5470587389691973,
137
+ "scr_dir1_threshold_50": 0.5675677707487257,
138
+ "scr_metric_threshold_50": 0.6823528545538119,
139
+ "scr_dir2_threshold_50": 0.6823528545538119,
140
+ "scr_dir1_threshold_100": 0.5855855565597059,
141
+ "scr_metric_threshold_100": 0.5,
142
+ "scr_dir2_threshold_100": 0.5,
143
+ "scr_dir1_threshold_500": 0.01801778581098025,
144
+ "scr_metric_threshold_500": 0.761764640915359,
145
+ "scr_dir2_threshold_500": 0.761764640915359
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4814806229786858,
150
+ "scr_metric_threshold_2": 0.06862744525137968,
151
+ "scr_dir2_threshold_2": 0.06862744525137968,
152
+ "scr_dir1_threshold_5": 0.5185182732320055,
153
+ "scr_metric_threshold_5": 0.1127449634074223,
154
+ "scr_dir2_threshold_5": 0.1127449634074223,
155
+ "scr_dir1_threshold_10": -0.07407419671733059,
156
+ "scr_metric_threshold_10": 0.2524509488825882,
157
+ "scr_dir2_threshold_10": 0.2524509488825882,
158
+ "scr_dir1_threshold_20": -0.24074086338399725,
159
+ "scr_metric_threshold_20": 0.3553921167596577,
160
+ "scr_dir2_threshold_20": 0.3553921167596577,
161
+ "scr_dir1_threshold_50": -0.18518493989867213,
162
+ "scr_metric_threshold_50": 0.4166665692734544,
163
+ "scr_dir2_threshold_50": 0.4166665692734544,
164
+ "scr_dir1_threshold_100": -1.537036546464011,
165
+ "scr_metric_threshold_100": 0.3504902189944813,
166
+ "scr_dir2_threshold_100": 0.3504902189944813,
167
+ "scr_dir1_threshold_500": -2.074073092928022,
168
+ "scr_metric_threshold_500": 0.012254890502759344,
169
+ "scr_dir2_threshold_500": 0.012254890502759344
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.42187508731147705,
174
+ "scr_metric_threshold_2": 0.13731342858688256,
175
+ "scr_dir2_threshold_2": 0.13731342858688256,
176
+ "scr_dir1_threshold_5": 0.32812491268852295,
177
+ "scr_metric_threshold_5": 0.23880589313724654,
178
+ "scr_dir2_threshold_5": 0.23880589313724654,
179
+ "scr_dir1_threshold_10": 0.33593731082513306,
180
+ "scr_metric_threshold_10": 0.379104535503725,
181
+ "scr_dir2_threshold_10": 0.379104535503725,
182
+ "scr_dir1_threshold_20": -0.7031246798579175,
183
+ "scr_metric_threshold_20": 0.4686566787085828,
184
+ "scr_dir2_threshold_20": 0.4686566787085828,
185
+ "scr_dir1_threshold_50": -0.4921876018633899,
186
+ "scr_metric_threshold_50": 0.5014926068897979,
187
+ "scr_dir2_threshold_50": 0.5014926068897979,
188
+ "scr_dir1_threshold_100": 0.6406250291038257,
189
+ "scr_metric_threshold_100": 0.6238805003515784,
190
+ "scr_dir2_threshold_100": 0.6238805003515784,
191
+ "scr_dir1_threshold_500": -0.5546872526174816,
192
+ "scr_metric_threshold_500": -0.17014917884380523,
193
+ "scr_dir2_threshold_500": -0.17014917884380523
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.06547639532486829,
198
+ "scr_metric_threshold_2": 0.5129150715277658,
199
+ "scr_dir2_threshold_2": 0.5129150715277658,
200
+ "scr_dir1_threshold_5": 0.3869046499771338,
201
+ "scr_metric_threshold_5": 0.5645755775821455,
202
+ "scr_dir2_threshold_5": 0.5645755775821455,
203
+ "scr_dir1_threshold_10": 0.32142860944172813,
204
+ "scr_metric_threshold_10": 0.5904059405809935,
205
+ "scr_dir2_threshold_10": 0.5904059405809935,
206
+ "scr_dir1_threshold_20": 0.35714278111654374,
207
+ "scr_metric_threshold_20": 0.6162360836365252,
208
+ "scr_dir2_threshold_20": 0.6162360836365252,
209
+ "scr_dir1_threshold_50": 0.20833343681359326,
210
+ "scr_metric_threshold_50": 0.21771221120892723,
211
+ "scr_dir2_threshold_50": 0.21771221120892723,
212
+ "scr_dir1_threshold_100": -0.16071430472086407,
213
+ "scr_metric_threshold_100": 0.08118085806925839,
214
+ "scr_dir2_threshold_100": 0.08118085806925839,
215
+ "scr_dir1_threshold_500": 0.11309517262813487,
216
+ "scr_metric_threshold_500": 0.5424354235426446,
217
+ "scr_dir2_threshold_500": 0.5424354235426446
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.23976612671598632,
222
+ "scr_metric_threshold_2": 0.10150378467040429,
223
+ "scr_dir2_threshold_2": 0.10150378467040429,
224
+ "scr_dir1_threshold_5": 0.29824560180473103,
225
+ "scr_metric_threshold_5": 0.1691729744506738,
226
+ "scr_dir2_threshold_5": 0.1691729744506738,
227
+ "scr_dir1_threshold_10": 0.3918130407988556,
228
+ "scr_metric_threshold_10": 0.20300756934080857,
229
+ "scr_dir2_threshold_10": 0.20300756934080857,
230
+ "scr_dir1_threshold_20": 0.5146198687721861,
231
+ "scr_metric_threshold_20": 0.21804519196726674,
232
+ "scr_dir2_threshold_20": 0.21804519196726674,
233
+ "scr_dir1_threshold_50": 0.40935702275154556,
234
+ "scr_metric_threshold_50": 0.3533835715278058,
235
+ "scr_dir2_threshold_50": 0.3533835715278058,
236
+ "scr_dir1_threshold_100": 0.47953225343197264,
237
+ "scr_metric_threshold_100": 0.36466162043946093,
238
+ "scr_dir2_threshold_100": 0.36466162043946093,
239
+ "scr_dir1_threshold_500": 0.27485409062136623,
240
+ "scr_metric_threshold_500": 0.6503760021869972,
241
+ "scr_dir2_threshold_500": 0.6503760021869972
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.3362829711165476,
246
+ "scr_metric_threshold_2": 0.27355615446045656,
247
+ "scr_dir2_threshold_2": 0.27355615446045656,
248
+ "scr_dir1_threshold_5": 0.442477843430779,
249
+ "scr_metric_threshold_5": 0.38905763787333525,
250
+ "scr_dir2_threshold_5": 0.38905763787333525,
251
+ "scr_dir1_threshold_10": 0.4601767250792003,
252
+ "scr_metric_threshold_10": 0.5015196467056929,
253
+ "scr_dir2_threshold_10": 0.5015196467056929,
254
+ "scr_dir1_threshold_20": 0.45132728425498964,
255
+ "scr_metric_threshold_20": 0.6352583399399678,
256
+ "scr_dir2_threshold_20": 0.6352583399399678,
257
+ "scr_dir1_threshold_50": -0.4336278751320005,
258
+ "scr_metric_threshold_50": 0.7264436643704083,
259
+ "scr_dir2_threshold_50": 0.7264436643704083,
260
+ "scr_dir1_threshold_100": -0.18584036720669514,
261
+ "scr_metric_threshold_100": 0.8085105650592858,
262
+ "scr_dir2_threshold_100": 0.8085105650592858,
263
+ "scr_dir1_threshold_500": 0.5044249841493892,
264
+ "scr_metric_threshold_500": 0.7689968520051075,
265
+ "scr_dir2_threshold_500": 0.7689968520051075
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.187499874629656,
270
+ "scr_metric_threshold_2": 0.10138255303439819,
271
+ "scr_dir2_threshold_2": 0.10138255303439819,
272
+ "scr_dir1_threshold_5": 0.28365391366095444,
273
+ "scr_metric_threshold_5": 0.10138255303439819,
274
+ "scr_dir2_threshold_5": 0.10138255303439819,
275
+ "scr_dir1_threshold_10": 0.2788462833495861,
276
+ "scr_metric_threshold_10": 0.1244240049046106,
277
+ "scr_dir2_threshold_10": 0.1244240049046106,
278
+ "scr_dir1_threshold_20": 0.40865373456046306,
279
+ "scr_metric_threshold_20": 0.1935483605152478,
280
+ "scr_dir2_threshold_20": 0.1935483605152478,
281
+ "scr_dir1_threshold_50": 0.4663461579792421,
282
+ "scr_metric_threshold_50": 0.2811062072329822,
283
+ "scr_dir2_threshold_50": 0.2811062072329822,
284
+ "scr_dir1_threshold_100": 0.5480770195156492,
285
+ "scr_metric_threshold_100": 0.3456223274047315,
286
+ "scr_dir2_threshold_100": 0.3456223274047315,
287
+ "scr_dir1_threshold_500": 0.5865383485673823,
288
+ "scr_metric_threshold_500": 0.29953914898853395,
289
+ "scr_dir2_threshold_500": 0.29953914898853395
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/average_l0_279",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_40_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732149751188,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.22312257235975685,
76
+ "scr_metric_threshold_2": 0.1668348778918644,
77
+ "scr_dir2_threshold_2": 0.1668348778918644,
78
+ "scr_dir1_threshold_5": 0.33119637777207983,
79
+ "scr_metric_threshold_5": 0.23134461256201835,
80
+ "scr_dir2_threshold_5": 0.23134461256201835,
81
+ "scr_dir1_threshold_10": 0.3186693591288544,
82
+ "scr_metric_threshold_10": 0.30124848610333904,
83
+ "scr_dir2_threshold_10": 0.30124848610333904,
84
+ "scr_dir1_threshold_20": 0.15725204616847793,
85
+ "scr_metric_threshold_20": 0.3930884292295641,
86
+ "scr_dir2_threshold_20": 0.3930884292295641,
87
+ "scr_dir1_threshold_50": 0.13131814020566657,
88
+ "scr_metric_threshold_50": 0.4559356303607377,
89
+ "scr_dir2_threshold_50": 0.4559356303607377,
90
+ "scr_dir1_threshold_100": -0.09176902766618408,
91
+ "scr_metric_threshold_100": 0.4062266560937047,
92
+ "scr_dir2_threshold_100": 0.4062266560937047,
93
+ "scr_dir1_threshold_500": -0.8158517333955163,
94
+ "scr_metric_threshold_500": 0.39597014646093737,
95
+ "scr_dir2_threshold_500": 0.39597014646093737
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.20588315213818456,
102
+ "scr_metric_threshold_2": 0.05316462152337083,
103
+ "scr_dir2_threshold_2": 0.05316462152337083,
104
+ "scr_dir1_threshold_5": 0.3088238516686228,
105
+ "scr_metric_threshold_5": 0.07848105047818764,
106
+ "scr_dir2_threshold_5": 0.07848105047818764,
107
+ "scr_dir1_threshold_10": 0.3088238516686228,
108
+ "scr_metric_threshold_10": 0.09367087767151386,
109
+ "scr_dir2_threshold_10": 0.09367087767151386,
110
+ "scr_dir1_threshold_20": 0.2941181626697965,
111
+ "scr_metric_threshold_20": 0.12151907024006786,
112
+ "scr_dir2_threshold_20": 0.12151907024006786,
113
+ "scr_dir1_threshold_50": 0.2647059081334898,
114
+ "scr_metric_threshold_50": 0.21265833519566393,
115
+ "scr_dir2_threshold_50": 0.21265833519566393,
116
+ "scr_dir1_threshold_100": 0.2941181626697965,
117
+ "scr_metric_threshold_100": 0.23037977510490792,
118
+ "scr_dir2_threshold_100": 0.23037977510490792,
119
+ "scr_dir1_threshold_500": -2.308822975129969,
120
+ "scr_metric_threshold_500": 0.3493670817312386,
121
+ "scr_dir2_threshold_500": 0.3493670817312386
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.0810811101069608,
126
+ "scr_metric_threshold_2": 0.27352936948459866,
127
+ "scr_dir2_threshold_2": 0.27352936948459866,
128
+ "scr_dir1_threshold_5": 0.3243244404278432,
129
+ "scr_metric_threshold_5": 0.3705881610461245,
130
+ "scr_dir2_threshold_5": 0.3705881610461245,
131
+ "scr_dir1_threshold_10": 0.27027000903735277,
132
+ "scr_metric_threshold_10": 0.4411763571538014,
133
+ "scr_dir2_threshold_10": 0.4411763571538014,
134
+ "scr_dir1_threshold_20": 0.2882883318271079,
135
+ "scr_metric_threshold_20": 0.5617646058538066,
136
+ "scr_dir2_threshold_20": 0.5617646058538066,
137
+ "scr_dir1_threshold_50": 0.17117111311941172,
138
+ "scr_metric_threshold_50": 0.6617647110384636,
139
+ "scr_dir2_threshold_50": 0.6617647110384636,
140
+ "scr_dir1_threshold_100": -0.4594594449465196,
141
+ "scr_metric_threshold_100": 0.6205882487000053,
142
+ "scr_dir2_threshold_100": 0.6205882487000053,
143
+ "scr_dir1_threshold_500": -1.0540538944117155,
144
+ "scr_metric_threshold_500": 0.4147057616999523,
145
+ "scr_dir2_threshold_500": 0.4147057616999523
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.2777774098480082,
150
+ "scr_metric_threshold_2": 0.1102940145248341,
151
+ "scr_dir2_threshold_2": 0.1102940145248341,
152
+ "scr_dir1_threshold_5": 0.4259258032826694,
153
+ "scr_metric_threshold_5": 0.15931372262568982,
154
+ "scr_dir2_threshold_5": 0.15931372262568982,
155
+ "scr_dir1_threshold_10": 0.3518516065653388,
156
+ "scr_metric_threshold_10": 0.24754890502759344,
157
+ "scr_dir2_threshold_10": 0.24754890502759344,
158
+ "scr_dir1_threshold_20": -0.09259246994933606,
159
+ "scr_metric_threshold_20": 0.36274510949724065,
160
+ "scr_dir2_threshold_20": 0.36274510949724065,
161
+ "scr_dir1_threshold_50": -0.25925913661600275,
162
+ "scr_metric_threshold_50": 0.46813722625689835,
163
+ "scr_dir2_threshold_50": 0.46813722625689835,
164
+ "scr_dir1_threshold_100": -1.3703698797973443,
165
+ "scr_metric_threshold_100": 0.10294116787706951,
166
+ "scr_dir2_threshold_100": 0.10294116787706951,
167
+ "scr_dir1_threshold_500": -2.6111107431813414,
168
+ "scr_metric_threshold_500": -0.009803941620171147,
169
+ "scr_dir2_threshold_500": -0.009803941620171147
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.16406268917486697,
174
+ "scr_metric_threshold_2": 0.22089550008113348,
175
+ "scr_dir2_threshold_2": 0.22089550008113348,
176
+ "scr_dir1_threshold_5": 0.32031251455191284,
177
+ "scr_metric_threshold_5": 0.31044782121028375,
178
+ "scr_dir2_threshold_5": 0.31044782121028375,
179
+ "scr_dir1_threshold_10": 0.21093754365573852,
180
+ "scr_metric_threshold_10": 0.41194028576064773,
181
+ "scr_dir2_threshold_10": 0.41194028576064773,
182
+ "scr_dir1_threshold_20": 0.10156257275956422,
183
+ "scr_metric_threshold_20": 0.5253730716565179,
184
+ "scr_dir2_threshold_20": 0.5253730716565179,
185
+ "scr_dir1_threshold_50": -0.2421876018633899,
186
+ "scr_metric_threshold_50": 0.5283581075118213,
187
+ "scr_dir2_threshold_50": 0.5283581075118213,
188
+ "scr_dir1_threshold_100": -0.21093754365573852,
189
+ "scr_metric_threshold_100": 0.6358208216970846,
190
+ "scr_dir2_threshold_100": 0.6358208216970846,
191
+ "scr_dir1_threshold_500": -0.5546872526174816,
192
+ "scr_metric_threshold_500": 0.6626865002434005,
193
+ "scr_dir2_threshold_500": 0.6626865002434005
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.11309517262813487,
198
+ "scr_metric_threshold_2": 0.12915137510760727,
199
+ "scr_dir2_threshold_2": 0.12915137510760727,
200
+ "scr_dir1_threshold_5": 0.11309517262813487,
201
+ "scr_metric_threshold_5": 0.28413289327074626,
202
+ "scr_dir2_threshold_5": 0.28413289327074626,
203
+ "scr_dir1_threshold_10": 0.11904783023182298,
204
+ "scr_metric_threshold_10": 0.44649438946594683,
205
+ "scr_dir2_threshold_10": 0.44649438946594683,
206
+ "scr_dir1_threshold_20": 0.14880969909241307,
207
+ "scr_metric_threshold_20": 0.5756457645735541,
208
+ "scr_dir2_threshold_20": 0.5756457645735541,
209
+ "scr_dir1_threshold_50": 0.16071430472086407,
210
+ "scr_metric_threshold_50": 0.7084871286971922,
211
+ "scr_dir2_threshold_50": 0.7084871286971922,
212
+ "scr_dir1_threshold_100": 0.16071430472086407,
213
+ "scr_metric_threshold_100": 0.7232473047046315,
214
+ "scr_dir2_threshold_100": 0.7232473047046315,
215
+ "scr_dir1_threshold_500": 0.19047617358145416,
216
+ "scr_metric_threshold_500": 0.6789667766823134,
217
+ "scr_dir2_threshold_500": 0.6789667766823134
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.27485409062136623,
222
+ "scr_metric_threshold_2": 0.03759416860493777,
223
+ "scr_dir2_threshold_2": 0.03759416860493777,
224
+ "scr_dir1_threshold_5": 0.3274856879142697,
225
+ "scr_metric_threshold_5": 0.07518811313229093,
226
+ "scr_dir2_threshold_5": 0.07518811313229093,
227
+ "scr_dir1_threshold_10": 0.3391814435059521,
228
+ "scr_metric_threshold_10": 0.12781968028610227,
229
+ "scr_dir2_threshold_10": 0.12781968028610227,
230
+ "scr_dir1_threshold_20": 0.403508796390538,
231
+ "scr_metric_threshold_20": 0.19924821970359016,
232
+ "scr_dir2_threshold_20": 0.19924821970359016,
233
+ "scr_dir1_threshold_50": 0.5029241131805038,
234
+ "scr_metric_threshold_50": 0.2894737313847547,
235
+ "scr_dir2_threshold_50": 0.2894737313847547,
236
+ "scr_dir1_threshold_100": 0.49707623538466256,
237
+ "scr_metric_threshold_100": 0.3383459489013476,
238
+ "scr_dir2_threshold_100": 0.3383459489013476,
239
+ "scr_dir1_threshold_500": 0.5555557104734072,
240
+ "scr_metric_threshold_500": 0.4360903839345335,
241
+ "scr_dir2_threshold_500": 0.4360903839345335
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.3893806710109472,
246
+ "scr_metric_threshold_2": 0.4224924017664715,
247
+ "scr_dir2_threshold_2": 0.4224924017664715,
248
+ "scr_dir1_threshold_5": 0.46902669337797875,
249
+ "scr_metric_threshold_5": 0.4620061148206498,
250
+ "scr_dir2_threshold_5": 0.4620061148206498,
251
+ "scr_dir1_threshold_10": 0.5309733066220212,
252
+ "scr_metric_threshold_10": 0.4984801721251719,
253
+ "scr_dir2_threshold_10": 0.4984801721251719,
254
+ "scr_dir1_threshold_20": -0.2654863895737267,
255
+ "scr_metric_threshold_20": 0.6048632317964876,
256
+ "scr_dir2_threshold_20": 0.6048632317964876,
257
+ "scr_dir1_threshold_50": -0.06194661324404244,
258
+ "scr_metric_threshold_50": 0.5896656777247475,
259
+ "scr_dir2_threshold_50": 0.5896656777247475,
260
+ "scr_dir1_threshold_100": -0.2654863895737267,
261
+ "scr_metric_threshold_100": 0.340425501077594,
262
+ "scr_dir2_threshold_100": 0.340425501077594,
263
+ "scr_dir1_threshold_500": -1.3451324119407582,
264
+ "scr_metric_threshold_500": 0.30395126260393673,
265
+ "scr_dir2_threshold_500": 0.30395126260393673
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.2788462833495861,
270
+ "scr_metric_threshold_2": 0.08755757204196167,
271
+ "scr_dir2_threshold_2": 0.08755757204196167,
272
+ "scr_dir1_threshold_5": 0.360576858325207,
273
+ "scr_metric_threshold_5": 0.11059902391217406,
274
+ "scr_dir2_threshold_5": 0.11059902391217406,
275
+ "scr_dir1_threshold_10": 0.41826928174398603,
276
+ "scr_metric_threshold_10": 0.14285722133593506,
277
+ "scr_dir2_threshold_10": 0.14285722133593506,
278
+ "scr_dir1_threshold_20": 0.37980766613146666,
279
+ "scr_metric_threshold_20": 0.1935483605152478,
280
+ "scr_dir2_threshold_20": 0.1935483605152478,
281
+ "scr_dir1_threshold_50": 0.5144230342144982,
282
+ "scr_metric_threshold_50": 0.18894012507635988,
283
+ "scr_dir2_threshold_50": 0.18894012507635988,
284
+ "scr_dir1_threshold_100": 0.6201923338685333,
285
+ "scr_metric_threshold_100": 0.25806448068699706,
286
+ "scr_dir2_threshold_100": 0.25806448068699706,
287
+ "scr_dir1_threshold_500": 0.6009615260622737,
288
+ "scr_metric_threshold_500": 0.3317973464122949,
289
+ "scr_dir2_threshold_500": 0.3317973464122949
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/average_l0_40",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_16k_average_l0_73_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732150170488,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.29413948593556816,
76
+ "scr_metric_threshold_2": 0.2044518234593106,
77
+ "scr_dir2_threshold_2": 0.2044518234593106,
78
+ "scr_dir1_threshold_5": 0.35460829340921973,
79
+ "scr_metric_threshold_5": 0.25725384227387843,
80
+ "scr_dir2_threshold_5": 0.25725384227387843,
81
+ "scr_dir1_threshold_10": 0.32818072868003084,
82
+ "scr_metric_threshold_10": 0.33237747277750745,
83
+ "scr_dir2_threshold_10": 0.33237747277750745,
84
+ "scr_dir1_threshold_20": 0.17202608136077024,
85
+ "scr_metric_threshold_20": 0.411792574033676,
86
+ "scr_dir2_threshold_20": 0.411792574033676,
87
+ "scr_dir1_threshold_50": 0.18659824438820305,
88
+ "scr_metric_threshold_50": 0.4950551255456537,
89
+ "scr_dir2_threshold_50": 0.4950551255456537,
90
+ "scr_dir1_threshold_100": 0.011185897008624088,
91
+ "scr_metric_threshold_100": 0.49498033378050094,
92
+ "scr_dir2_threshold_100": 0.49498033378050094,
93
+ "scr_dir1_threshold_500": -1.3378640088101883,
94
+ "scr_metric_threshold_500": 0.42177864508967455,
95
+ "scr_dir2_threshold_500": 0.42177864508967455
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.27941247367097016,
102
+ "scr_metric_threshold_2": 0.06835444871669703,
103
+ "scr_dir2_threshold_2": 0.06835444871669703,
104
+ "scr_dir1_threshold_5": 0.39705886220023473,
105
+ "scr_metric_threshold_5": 0.08101266319410545,
106
+ "scr_dir2_threshold_5": 0.08101266319410545,
107
+ "scr_dir1_threshold_10": 0.3676474842025821,
108
+ "scr_metric_threshold_10": 0.08860765223967824,
109
+ "scr_dir2_threshold_10": 0.08860765223967824,
110
+ "scr_dir1_threshold_20": 0.3235295406674491,
111
+ "scr_metric_threshold_20": 0.11139246847857727,
112
+ "scr_dir2_threshold_20": 0.11139246847857727,
113
+ "scr_dir1_threshold_50": 0.38235317320140844,
114
+ "scr_metric_threshold_50": 0.2050633461500911,
115
+ "scr_dir2_threshold_50": 0.2050633461500911,
116
+ "scr_dir1_threshold_100": 0.4117654277377151,
117
+ "scr_metric_threshold_100": 0.3696202852542198,
118
+ "scr_dir2_threshold_100": 0.3696202852542198,
119
+ "scr_dir1_threshold_500": -3.4558807416568857,
120
+ "scr_metric_threshold_500": 0.40759492868644503,
121
+ "scr_dir2_threshold_500": 0.40759492868644503
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3513511191443136,
126
+ "scr_metric_threshold_2": 0.26176464091535895,
127
+ "scr_dir2_threshold_2": 0.26176464091535895,
128
+ "scr_dir1_threshold_5": 0.3783783348395588,
129
+ "scr_metric_threshold_5": 0.36470588441538543,
130
+ "scr_dir2_threshold_5": 0.36470588441538543,
131
+ "scr_dir1_threshold_10": 0.34234222623882343,
132
+ "scr_metric_threshold_10": 0.4970588616846304,
133
+ "scr_dir2_threshold_10": 0.4970588616846304,
134
+ "scr_dir1_threshold_20": 0.3333333333333333,
135
+ "scr_metric_threshold_20": 0.6029410681922649,
136
+ "scr_dir2_threshold_20": 0.6029410681922649,
137
+ "scr_dir1_threshold_50": 0.3063061176380881,
138
+ "scr_metric_threshold_50": 0.7029411733769219,
139
+ "scr_dir2_threshold_50": 0.7029411733769219,
140
+ "scr_dir1_threshold_100": 0.2882883318271079,
141
+ "scr_metric_threshold_100": 0.6235293870153749,
142
+ "scr_dir2_threshold_100": 0.6235293870153749,
143
+ "scr_dir1_threshold_500": -0.8198199939750982,
144
+ "scr_metric_threshold_500": 0.447058809092302,
145
+ "scr_dir2_threshold_500": 0.447058809092302
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3518516065653388,
150
+ "scr_metric_threshold_2": 0.1102940145248341,
151
+ "scr_dir2_threshold_2": 0.1102940145248341,
152
+ "scr_dir1_threshold_5": 0.4814806229786858,
153
+ "scr_metric_threshold_5": 0.18382350363120853,
154
+ "scr_dir2_threshold_5": 0.18382350363120853,
155
+ "scr_dir1_threshold_10": 0.40740753005066394,
156
+ "scr_metric_threshold_10": 0.2965686131284492,
157
+ "scr_dir2_threshold_10": 0.2965686131284492,
158
+ "scr_dir1_threshold_20": -0.07407419671733059,
159
+ "scr_metric_threshold_20": 0.38970583938534753,
160
+ "scr_dir2_threshold_20": 0.38970583938534753,
161
+ "scr_dir1_threshold_50": -0.05555592348532512,
162
+ "scr_metric_threshold_50": 0.49509795614500524,
163
+ "scr_dir2_threshold_50": 0.49509795614500524,
164
+ "scr_dir1_threshold_100": -0.9259258032826694,
165
+ "scr_metric_threshold_100": 0.24754890502759344,
166
+ "scr_dir2_threshold_100": 0.24754890502759344,
167
+ "scr_dir1_threshold_500": -5.1111096393920326,
168
+ "scr_metric_threshold_500": -0.05392160586603213,
169
+ "scr_dir2_threshold_500": -0.05392160586603213
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.27343766007104126,
174
+ "scr_metric_threshold_2": 0.21791046422583005,
175
+ "scr_dir2_threshold_2": 0.21791046422583005,
176
+ "scr_dir1_threshold_5": 0.32812491268852295,
177
+ "scr_metric_threshold_5": 0.31343285706558716,
178
+ "scr_dir2_threshold_5": 0.31343285706558716,
179
+ "scr_dir1_threshold_10": 0.28125005820765137,
180
+ "scr_metric_threshold_10": 0.4447760360175704,
181
+ "scr_dir2_threshold_10": 0.4447760360175704,
182
+ "scr_dir1_threshold_20": 0.046874854480871565,
183
+ "scr_metric_threshold_20": 0.5432836426369234,
184
+ "scr_dir2_threshold_20": 0.5432836426369234,
185
+ "scr_dir1_threshold_50": 0.07031251455191284,
186
+ "scr_metric_threshold_50": 0.5910447500946557,
187
+ "scr_dir2_threshold_50": 0.5910447500946557,
188
+ "scr_dir1_threshold_100": 0.08593731082513303,
189
+ "scr_metric_threshold_100": 0.5253730716565179,
190
+ "scr_dir2_threshold_100": 0.5253730716565179,
191
+ "scr_dir1_threshold_500": -0.2656247962732202,
192
+ "scr_metric_threshold_500": 0.5462686784922268,
193
+ "scr_dir2_threshold_500": 0.5462686784922268
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.07142869813909379,
198
+ "scr_metric_threshold_2": 0.4317342134585075,
199
+ "scr_dir2_threshold_2": 0.4317342134585075,
200
+ "scr_dir1_threshold_5": 0.11904783023182298,
201
+ "scr_metric_threshold_5": 0.509225082511735,
202
+ "scr_dir2_threshold_5": 0.509225082511735,
203
+ "scr_dir1_threshold_10": 0.13095243586027397,
204
+ "scr_metric_threshold_10": 0.5276752475352052,
205
+ "scr_dir2_threshold_10": 0.5276752475352052,
206
+ "scr_dir1_threshold_20": 0.15476200190663858,
207
+ "scr_metric_threshold_20": 0.5719557755575233,
208
+ "scr_dir2_threshold_20": 0.5719557755575233,
209
+ "scr_dir1_threshold_50": -0.2678571745347734,
210
+ "scr_metric_threshold_50": 0.7084871286971922,
211
+ "scr_dir2_threshold_50": 0.7084871286971922,
212
+ "scr_dir1_threshold_100": -0.19047617358145416,
213
+ "scr_metric_threshold_100": 0.7269372937206623,
214
+ "scr_dir2_threshold_100": 0.7269372937206623,
215
+ "scr_dir1_threshold_500": -0.26190451693108535,
216
+ "scr_metric_threshold_500": 0.6752767876662826,
217
+ "scr_dir2_threshold_500": 0.6752767876662826
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.2923977240088898,
222
+ "scr_metric_threshold_2": 0.056391140868614346,
223
+ "scr_dir2_threshold_2": 0.056391140868614346,
224
+ "scr_dir1_threshold_5": 0.32163746155326217,
225
+ "scr_metric_threshold_5": 0.0864661620439461,
226
+ "scr_dir2_threshold_5": 0.0864661620439461,
227
+ "scr_dir1_threshold_10": 0.32163746155326217,
228
+ "scr_metric_threshold_10": 0.1691729744506738,
229
+ "scr_dir2_threshold_10": 0.1691729744506738,
230
+ "scr_dir1_threshold_20": 0.38011693664200685,
231
+ "scr_metric_threshold_20": 0.23684216423094331,
232
+ "scr_dir2_threshold_20": 0.23684216423094331,
233
+ "scr_dir1_threshold_50": 0.403508796390538,
234
+ "scr_metric_threshold_50": 0.30075200437399446,
235
+ "scr_dir2_threshold_50": 0.30075200437399446,
236
+ "scr_dir1_threshold_100": 0.49707623538466256,
237
+ "scr_metric_threshold_100": 0.38721816641794055,
238
+ "scr_dir2_threshold_100": 0.38721816641794055,
239
+ "scr_dir1_threshold_500": 0.4853801312278138,
240
+ "scr_metric_threshold_500": 0.5225565459784796,
241
+ "scr_dir2_threshold_500": 0.5225565459784796
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.4159289934835792,
246
+ "scr_metric_threshold_2": 0.3647416600600322,
247
+ "scr_dir2_threshold_2": 0.3647416600600322,
248
+ "scr_dir1_threshold_5": 0.4601767250792003,
249
+ "scr_metric_threshold_5": 0.4133737968557734,
250
+ "scr_dir2_threshold_5": 0.4133737968557734,
251
+ "scr_dir1_threshold_10": 0.442477843430779,
252
+ "scr_metric_threshold_10": 0.5015196467056929,
253
+ "scr_dir2_threshold_10": 0.5015196467056929,
254
+ "scr_dir1_threshold_20": -0.1681414855582738,
255
+ "scr_metric_threshold_20": 0.6170213112877068,
256
+ "scr_dir2_threshold_20": 0.6170213112877068,
257
+ "scr_dir1_threshold_50": 0.1681414855582738,
258
+ "scr_metric_threshold_50": 0.7082066357181472,
259
+ "scr_dir2_threshold_50": 0.7082066357181472,
260
+ "scr_dir1_threshold_100": -0.6637165014088846,
261
+ "scr_metric_threshold_100": 0.7386017438616274,
262
+ "scr_dir2_threshold_100": 0.7386017438616274,
263
+ "scr_dir1_threshold_500": -1.7787602870727588,
264
+ "scr_metric_threshold_500": 0.29483283886237377,
265
+ "scr_dir2_threshold_500": 0.29483283886237377
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.3173076124013192,
270
+ "scr_metric_threshold_2": 0.1244240049046106,
271
+ "scr_dir2_threshold_2": 0.1244240049046106,
272
+ "scr_dir1_threshold_5": 0.35096159770247026,
273
+ "scr_metric_threshold_5": 0.10599078847328613,
274
+ "scr_dir2_threshold_5": 0.10599078847328613,
275
+ "scr_dir1_threshold_10": 0.3317307898962106,
276
+ "scr_metric_threshold_10": 0.13364075045815918,
277
+ "scr_dir2_threshold_10": 0.13364075045815918,
278
+ "scr_dir1_threshold_20": 0.37980766613146666,
279
+ "scr_metric_threshold_20": 0.22119832250012086,
280
+ "scr_dir2_threshold_20": 0.22119832250012086,
281
+ "scr_dir1_threshold_50": 0.4855769657855018,
282
+ "scr_metric_threshold_50": 0.2488480098092212,
283
+ "scr_dir2_threshold_50": 0.2488480098092212,
284
+ "scr_dir1_threshold_100": 0.5865383485673823,
285
+ "scr_metric_threshold_100": 0.3410138172900708,
286
+ "scr_dir2_threshold_100": 0.3410138172900708,
287
+ "scr_dir1_threshold_500": 0.5048077735917615,
288
+ "scr_metric_threshold_500": 0.5345621778053186,
289
+ "scr_dir2_threshold_500": 0.5345621778053186
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_16k/average_l0_73",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_115_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732189934100,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.26540917872531267,
76
+ "scr_metric_threshold_2": 0.17676805731304376,
77
+ "scr_dir2_threshold_2": 0.17676805731304376,
78
+ "scr_dir1_threshold_5": 0.31976397682531105,
79
+ "scr_metric_threshold_5": 0.23798548405632222,
80
+ "scr_dir2_threshold_5": 0.23798548405632222,
81
+ "scr_dir1_threshold_10": 0.29888250848627496,
82
+ "scr_metric_threshold_10": 0.2933723593138509,
83
+ "scr_dir2_threshold_10": 0.2933723593138509,
84
+ "scr_dir1_threshold_20": 0.2286730228944795,
85
+ "scr_metric_threshold_20": 0.36328865319113113,
86
+ "scr_dir2_threshold_20": 0.36328865319113113,
87
+ "scr_dir1_threshold_50": 0.23066294403101006,
88
+ "scr_metric_threshold_50": 0.4482671808256601,
89
+ "scr_dir2_threshold_50": 0.4482671808256601,
90
+ "scr_dir1_threshold_100": 0.15686999236588467,
91
+ "scr_metric_threshold_100": 0.4633773116392915,
92
+ "scr_dir2_threshold_100": 0.4633773116392915,
93
+ "scr_dir1_threshold_500": -0.9192007624881388,
94
+ "scr_metric_threshold_500": 0.45229140201993157,
95
+ "scr_dir2_threshold_500": 0.45229140201993157
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3235295406674491,
102
+ "scr_metric_threshold_2": 0.05063300880745302,
103
+ "scr_dir2_threshold_2": 0.05063300880745302,
104
+ "scr_dir1_threshold_5": 0.4411768057353677,
105
+ "scr_metric_threshold_5": 0.10379747943300446,
106
+ "scr_dir2_threshold_5": 0.10379747943300446,
107
+ "scr_dir1_threshold_10": 0.38235317320140844,
108
+ "scr_metric_threshold_10": 0.09620264128525105,
109
+ "scr_dir2_threshold_10": 0.09620264128525105,
110
+ "scr_dir1_threshold_20": 0.4411768057353677,
111
+ "scr_metric_threshold_20": 0.1265822956719035,
112
+ "scr_dir2_threshold_20": 0.1265822956719035,
113
+ "scr_dir1_threshold_50": 0.455882494734194,
114
+ "scr_metric_threshold_50": 0.13417728471747628,
115
+ "scr_dir2_threshold_50": 0.13417728471747628,
116
+ "scr_dir1_threshold_100": 0.39705886220023473,
117
+ "scr_metric_threshold_100": 0.21772156062749953,
118
+ "scr_dir2_threshold_100": 0.21772156062749953,
119
+ "scr_dir1_threshold_500": -2.323528664128795,
120
+ "scr_metric_threshold_500": 0.42784813220942625,
121
+ "scr_dir2_threshold_500": 0.42784813220942625
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.36036054902857856,
126
+ "scr_metric_threshold_2": 0.20294117337692186,
127
+ "scr_dir2_threshold_2": 0.20294117337692186,
128
+ "scr_dir1_threshold_5": 0.44144165913553934,
129
+ "scr_metric_threshold_5": 0.2941176883077086,
130
+ "scr_dir2_threshold_5": 0.2941176883077086,
131
+ "scr_dir1_threshold_10": 0.41441444344029416,
132
+ "scr_metric_threshold_10": 0.3999998948153431,
133
+ "scr_dir2_threshold_10": 0.3999998948153431,
134
+ "scr_dir1_threshold_20": 0.4594594449465196,
135
+ "scr_metric_threshold_20": 0.5323528720845881,
136
+ "scr_dir2_threshold_20": 0.5323528720845881,
137
+ "scr_dir1_threshold_50": 0.4684683378520097,
138
+ "scr_metric_threshold_50": 0.6647058493538331,
139
+ "scr_dir2_threshold_50": 0.6647058493538331,
140
+ "scr_dir1_threshold_100": 0.4864866606417648,
141
+ "scr_metric_threshold_100": 0.526470595453849,
142
+ "scr_dir2_threshold_100": 0.526470595453849,
143
+ "scr_dir1_threshold_500": -0.07207221720147068,
144
+ "scr_metric_threshold_500": 0.4117646233845828,
145
+ "scr_dir2_threshold_500": 0.4117646233845828
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4444440765146749,
150
+ "scr_metric_threshold_2": 0.029411678770695084,
151
+ "scr_dir2_threshold_2": 0.029411678770695084,
152
+ "scr_dir1_threshold_5": 0.5,
153
+ "scr_metric_threshold_5": 0.09558817513948656,
154
+ "scr_dir2_threshold_5": 0.09558817513948656,
155
+ "scr_dir1_threshold_10": 0.4444440765146749,
156
+ "scr_metric_threshold_10": 0.14215678826793574,
157
+ "scr_dir2_threshold_10": 0.14215678826793574,
158
+ "scr_dir1_threshold_20": 0.4629623497466804,
159
+ "scr_metric_threshold_20": 0.21813722625689835,
160
+ "scr_dir2_threshold_20": 0.21813722625689835,
161
+ "scr_dir1_threshold_50": 0.4814806229786858,
162
+ "scr_metric_threshold_50": 0.3382351824019036,
163
+ "scr_dir2_threshold_50": 0.3382351824019036,
164
+ "scr_dir1_threshold_100": 0.3518516065653388,
165
+ "scr_metric_threshold_100": 0.26960773715052394,
166
+ "scr_dir2_threshold_100": 0.26960773715052394,
167
+ "scr_dir1_threshold_500": -4.537035442674703,
168
+ "scr_metric_threshold_500": 0.0024509488825881975,
169
+ "scr_dir2_threshold_500": 0.0024509488825881975
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3437501746229541,
174
+ "scr_metric_threshold_2": 0.17910446433400803,
175
+ "scr_dir2_threshold_2": 0.17910446433400803,
176
+ "scr_dir1_threshold_5": 0.32031251455191284,
177
+ "scr_metric_threshold_5": 0.2925372502298782,
178
+ "scr_dir2_threshold_5": 0.2925372502298782,
179
+ "scr_dir1_threshold_10": 0.2656247962732202,
180
+ "scr_metric_threshold_10": 0.3910446789249387,
181
+ "scr_dir2_threshold_10": 0.3910446789249387,
182
+ "scr_dir1_threshold_20": 0.19531274738251833,
183
+ "scr_metric_threshold_20": 0.45970139321838,
184
+ "scr_dir2_threshold_20": 0.45970139321838,
185
+ "scr_dir1_threshold_50": 0.046874854480871565,
186
+ "scr_metric_threshold_50": 0.5671641074036433,
187
+ "scr_dir2_threshold_50": 0.5671641074036433,
188
+ "scr_dir1_threshold_100": 0.21093754365573852,
189
+ "scr_metric_threshold_100": 0.5313433212914173,
190
+ "scr_dir2_threshold_100": 0.5313433212914173,
191
+ "scr_dir1_threshold_500": -0.05468725261748167,
192
+ "scr_metric_threshold_500": 0.3373134997565996,
193
+ "scr_dir2_threshold_500": 0.3373134997565996
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.053571434906954686,
198
+ "scr_metric_threshold_2": 0.468634763448764,
199
+ "scr_dir2_threshold_2": 0.468634763448764,
200
+ "scr_dir1_threshold_5": 0.06547639532486829,
201
+ "scr_metric_threshold_5": 0.4833949394562033,
202
+ "scr_dir2_threshold_5": 0.4833949394562033,
203
+ "scr_dir1_threshold_10": 0.029761868860590093,
204
+ "scr_metric_threshold_10": 0.5276752475352052,
205
+ "scr_dir2_threshold_10": 0.5276752475352052,
206
+ "scr_dir1_threshold_20": 0.029761868860590093,
207
+ "scr_metric_threshold_20": 0.5719557755575233,
208
+ "scr_dir2_threshold_20": 0.5719557755575233,
209
+ "scr_dir1_threshold_50": 0.10714286981390937,
210
+ "scr_metric_threshold_50": 0.6715867986502518,
211
+ "scr_dir2_threshold_50": 0.6715867986502518,
212
+ "scr_dir1_threshold_100": -0.25595221411685987,
213
+ "scr_metric_threshold_100": 0.7084871286971922,
214
+ "scr_dir2_threshold_100": 0.7084871286971922,
215
+ "scr_dir1_threshold_500": -0.3630950839307692,
216
+ "scr_metric_threshold_500": 0.7749078107590112,
217
+ "scr_dir2_threshold_500": 0.7749078107590112
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.1871345294230828,
222
+ "scr_metric_threshold_2": 0.026315895615697978,
223
+ "scr_dir2_threshold_2": 0.026315895615697978,
224
+ "scr_dir1_threshold_5": 0.21052638917161398,
225
+ "scr_metric_threshold_5": 0.03007524525291637,
226
+ "scr_dir2_threshold_5": 0.03007524525291637,
227
+ "scr_dir1_threshold_10": 0.26315798646451743,
228
+ "scr_metric_threshold_10": 0.07894746276950931,
229
+ "scr_dir2_threshold_10": 0.07894746276950931,
230
+ "scr_dir1_threshold_20": 0.3333335657101109,
231
+ "scr_metric_threshold_20": 0.13533837956053904,
232
+ "scr_dir2_threshold_20": 0.13533837956053904,
233
+ "scr_dir1_threshold_50": 0.31578958375742094,
234
+ "scr_metric_threshold_50": 0.22556389124170353,
235
+ "scr_dir2_threshold_50": 0.22556389124170353,
236
+ "scr_dir1_threshold_100": 0.3742690588461656,
237
+ "scr_metric_threshold_100": 0.3308270255493262,
238
+ "scr_dir2_threshold_100": 0.3308270255493262,
239
+ "scr_dir1_threshold_500": 0.403508796390538,
240
+ "scr_metric_threshold_500": 0.40225578904439874,
241
+ "scr_dir2_threshold_500": 0.40225578904439874
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.25663694874951604,
246
+ "scr_metric_threshold_2": 0.34650445023863596,
247
+ "scr_dir2_threshold_2": 0.34650445023863596,
248
+ "scr_dir1_threshold_5": 0.3628318210637474,
249
+ "scr_metric_threshold_5": 0.5075987770358701,
250
+ "scr_dir2_threshold_5": 0.5075987770358701,
251
+ "scr_dir1_threshold_10": 0.3893806710109472,
252
+ "scr_metric_threshold_10": 0.5957446268857896,
253
+ "scr_dir2_threshold_10": 0.5957446268857896,
254
+ "scr_dir1_threshold_20": -0.3185840894681263,
255
+ "scr_metric_threshold_20": 0.668692922663969,
256
+ "scr_dir2_threshold_20": 0.668692922663969,
257
+ "scr_dir1_threshold_50": -0.3716812618879581,
258
+ "scr_metric_threshold_50": 0.7082066357181472,
259
+ "scr_dir2_threshold_50": 0.7082066357181472,
260
+ "scr_dir1_threshold_100": -0.699114792180295,
261
+ "scr_metric_threshold_100": 0.7446808741918045,
262
+ "scr_dir2_threshold_100": 0.7446808741918045,
263
+ "scr_dir1_threshold_500": -0.91150400933419,
264
+ "scr_metric_threshold_500": 0.6534953685922289,
265
+ "scr_dir2_threshold_500": 0.6534953685922289
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.15384617588929125,
270
+ "scr_metric_threshold_2": 0.11059902391217406,
271
+ "scr_dir2_threshold_2": 0.11059902391217406,
272
+ "scr_dir1_threshold_5": 0.21634622961943867,
273
+ "scr_metric_threshold_5": 0.09677431759551026,
274
+ "scr_dir2_threshold_5": 0.09677431759551026,
275
+ "scr_dir1_threshold_10": 0.20192305212454734,
276
+ "scr_metric_threshold_10": 0.11520753402683473,
277
+ "scr_dir2_threshold_10": 0.11520753402683473,
278
+ "scr_dir1_threshold_20": 0.22596149024217538,
279
+ "scr_metric_threshold_20": 0.1935483605152478,
280
+ "scr_dir2_threshold_20": 0.1935483605152478,
281
+ "scr_dir1_threshold_50": 0.34134605051894723,
282
+ "scr_metric_threshold_50": 0.2764976971183215,
283
+ "scr_dir2_threshold_50": 0.2764976971183215,
284
+ "scr_dir1_threshold_100": 0.38942321331498964,
285
+ "scr_metric_threshold_100": 0.37788025015271975,
286
+ "scr_dir2_threshold_100": 0.37788025015271975,
287
+ "scr_dir1_threshold_500": 0.5048077735917615,
288
+ "scr_metric_threshold_500": 0.6082950435306165,
289
+ "scr_dir2_threshold_500": 0.6082950435306165
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/average_l0_115",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_216_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732191704795,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.23544247925867484,
76
+ "scr_metric_threshold_2": 0.17636884119244936,
77
+ "scr_dir2_threshold_2": 0.17636884119244936,
78
+ "scr_dir1_threshold_5": 0.362753253679831,
79
+ "scr_metric_threshold_5": 0.24553944002450154,
80
+ "scr_dir2_threshold_5": 0.24553944002450154,
81
+ "scr_dir1_threshold_10": 0.401711659633447,
82
+ "scr_metric_threshold_10": 0.30022879204204755,
83
+ "scr_dir2_threshold_10": 0.30022879204204755,
84
+ "scr_dir1_threshold_20": 0.3694687092573685,
85
+ "scr_metric_threshold_20": 0.37880727798507274,
86
+ "scr_dir2_threshold_20": 0.37880727798507274,
87
+ "scr_dir1_threshold_50": 0.33864931593222813,
88
+ "scr_metric_threshold_50": 0.45107348680446563,
89
+ "scr_dir2_threshold_50": 0.45107348680446563,
90
+ "scr_dir1_threshold_100": 0.30203894469567294,
91
+ "scr_metric_threshold_100": 0.4409637942717861,
92
+ "scr_dir2_threshold_100": 0.4409637942717861,
93
+ "scr_dir1_threshold_500": -0.5570568036322155,
94
+ "scr_metric_threshold_500": 0.5214083782194925,
95
+ "scr_dir2_threshold_500": 0.5214083782194925
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3235295406674491,
102
+ "scr_metric_threshold_2": 0.07341782504635204,
103
+ "scr_dir2_threshold_2": 0.07341782504635204,
104
+ "scr_dir1_threshold_5": 0.500000438269327,
105
+ "scr_metric_threshold_5": 0.11392408119449507,
106
+ "scr_dir2_threshold_5": 0.11392408119449507,
107
+ "scr_dir1_threshold_10": 0.4852947492705007,
108
+ "scr_metric_threshold_10": 0.14936711191080249,
109
+ "scr_dir2_threshold_10": 0.14936711191080249,
110
+ "scr_dir1_threshold_20": 0.455882494734194,
111
+ "scr_metric_threshold_20": 0.2101265715819267,
112
+ "scr_dir2_threshold_20": 0.2101265715819267,
113
+ "scr_dir1_threshold_50": 0.4852947492705007,
114
+ "scr_metric_threshold_50": 0.2101265715819267,
115
+ "scr_dir2_threshold_50": 0.2101265715819267,
116
+ "scr_dir1_threshold_100": 0.4852947492705007,
117
+ "scr_metric_threshold_100": 0.3164558146286684,
118
+ "scr_dir2_threshold_100": 0.3164558146286684,
119
+ "scr_dir1_threshold_500": -1.9411754909273866,
120
+ "scr_metric_threshold_500": 0.47088615197130645,
121
+ "scr_dir2_threshold_500": 0.47088615197130645
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.09009000301245093,
126
+ "scr_metric_threshold_2": 0.2000000350615523,
127
+ "scr_dir2_threshold_2": 0.2000000350615523,
128
+ "scr_dir1_threshold_5": 0.4324322292512744,
129
+ "scr_metric_threshold_5": 0.30882355519231786,
130
+ "scr_dir2_threshold_5": 0.30882355519231786,
131
+ "scr_dir1_threshold_10": 0.5225227692425002,
132
+ "scr_metric_threshold_10": 0.3970587564999735,
133
+ "scr_dir2_threshold_10": 0.3970587564999735,
134
+ "scr_dir1_threshold_20": 0.5315316621479903,
135
+ "scr_metric_threshold_20": 0.5588234675384371,
136
+ "scr_dir2_threshold_20": 0.5588234675384371,
137
+ "scr_dir1_threshold_50": 0.5675677707487257,
138
+ "scr_metric_threshold_50": 0.6529411207845934,
139
+ "scr_dir2_threshold_50": 0.6529411207845934,
140
+ "scr_dir1_threshold_100": 0.5765766636542158,
141
+ "scr_metric_threshold_100": 0.3882353415538649,
142
+ "scr_dir2_threshold_100": 0.3882353415538649,
143
+ "scr_dir1_threshold_500": 0.297297224732598,
144
+ "scr_metric_threshold_500": 0.6823528545538119,
145
+ "scr_dir2_threshold_500": 0.6823528545538119
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4814806229786858,
150
+ "scr_metric_threshold_2": 0.041666569273454426,
151
+ "scr_dir2_threshold_2": 0.041666569273454426,
152
+ "scr_dir1_threshold_5": 0.5,
153
+ "scr_metric_threshold_5": 0.0882351824019036,
154
+ "scr_dir2_threshold_5": 0.0882351824019036,
155
+ "scr_dir1_threshold_10": 0.537036546464011,
156
+ "scr_metric_threshold_10": 0.1764705108936256,
157
+ "scr_dir2_threshold_10": 0.1764705108936256,
158
+ "scr_dir1_threshold_20": 0.4444440765146749,
159
+ "scr_metric_threshold_20": 0.26470583938534753,
160
+ "scr_dir2_threshold_20": 0.26470583938534753,
161
+ "scr_dir1_threshold_50": 0.4814806229786858,
162
+ "scr_metric_threshold_50": 0.32352934301655606,
163
+ "scr_dir2_threshold_50": 0.32352934301655606,
164
+ "scr_dir1_threshold_100": 0.11111074318134155,
165
+ "scr_metric_threshold_100": 0.3848039416201712,
166
+ "scr_dir2_threshold_100": 0.3848039416201712,
167
+ "scr_dir1_threshold_500": -3.1481472896453524,
168
+ "scr_metric_threshold_500": 0.024509781005518688,
169
+ "scr_dir2_threshold_500": 0.024509781005518688
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3671873690327844,
174
+ "scr_metric_threshold_2": 0.12537310724137637,
175
+ "scr_dir2_threshold_2": 0.12537310724137637,
176
+ "scr_dir1_threshold_5": 0.3750002328306055,
177
+ "scr_metric_threshold_5": 0.29552246400947413,
178
+ "scr_dir2_threshold_5": 0.29552246400947413,
179
+ "scr_dir1_threshold_10": 0.32812491268852295,
180
+ "scr_metric_threshold_10": 0.38208957135902843,
181
+ "scr_dir2_threshold_10": 0.38208957135902843,
182
+ "scr_dir1_threshold_20": 0.21874994179234863,
183
+ "scr_metric_threshold_20": 0.44179100016226697,
184
+ "scr_dir2_threshold_20": 0.44179100016226697,
185
+ "scr_dir1_threshold_50": 0.578124912688523,
186
+ "scr_metric_threshold_50": 0.4955223572548986,
187
+ "scr_dir2_threshold_50": 0.4955223572548986,
188
+ "scr_dir1_threshold_100": 0.07812491268852294,
189
+ "scr_metric_threshold_100": 0.5641790715483399,
190
+ "scr_dir2_threshold_100": 0.5641790715483399,
191
+ "scr_dir1_threshold_500": -0.8828121653060046,
192
+ "scr_metric_threshold_500": 0.31641789292089056,
193
+ "scr_dir2_threshold_500": 0.31641789292089056
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.059523737721180185,
198
+ "scr_metric_threshold_2": 0.5387454345266138,
199
+ "scr_dir2_threshold_2": 0.5387454345266138,
200
+ "scr_dir1_threshold_5": 0.32142860944172813,
201
+ "scr_metric_threshold_5": 0.5682657865414925,
202
+ "scr_dir2_threshold_5": 0.5682657865414925,
203
+ "scr_dir1_threshold_10": 0.3630950839307692,
204
+ "scr_metric_threshold_10": 0.5830257426056157,
205
+ "scr_dir2_threshold_10": 0.5830257426056157,
206
+ "scr_dir1_threshold_20": 0.17261891034931506,
207
+ "scr_metric_threshold_20": 0.6346862486599953,
208
+ "scr_dir2_threshold_20": 0.6346862486599953,
209
+ "scr_dir1_threshold_50": 0.23809530567418336,
210
+ "scr_metric_threshold_50": 0.686346754714375,
211
+ "scr_dir2_threshold_50": 0.686346754714375,
212
+ "scr_dir1_threshold_100": 0.23809530567418336,
213
+ "scr_metric_threshold_100": 0.5682657865414925,
214
+ "scr_dir2_threshold_100": 0.5682657865414925,
215
+ "scr_dir1_threshold_500": -0.25595221411685987,
216
+ "scr_metric_threshold_500": 0.7306272827366931,
217
+ "scr_dir2_threshold_500": 0.7306272827366931
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.19298275578409038,
222
+ "scr_metric_threshold_2": 0.026315895615697978,
223
+ "scr_dir2_threshold_2": 0.026315895615697978,
224
+ "scr_dir1_threshold_5": 0.23976612671598632,
225
+ "scr_metric_threshold_5": 0.04135351824215616,
226
+ "scr_dir2_threshold_5": 0.04135351824215616,
227
+ "scr_dir1_threshold_10": 0.29824560180473103,
228
+ "scr_metric_threshold_10": 0.10150378467040429,
229
+ "scr_dir2_threshold_10": 0.10150378467040429,
230
+ "scr_dir1_threshold_20": 0.3742690588461656,
231
+ "scr_metric_threshold_20": 0.1691729744506738,
232
+ "scr_dir2_threshold_20": 0.1691729744506738,
233
+ "scr_dir1_threshold_50": 0.38011693664200685,
234
+ "scr_metric_threshold_50": 0.2857143817475363,
235
+ "scr_dir2_threshold_50": 0.2857143817475363,
236
+ "scr_dir1_threshold_100": 0.4385967602959179,
237
+ "scr_metric_threshold_100": 0.3796992430659191,
238
+ "scr_dir2_threshold_100": 0.3796992430659191,
239
+ "scr_dir1_threshold_500": 0.5555557104734072,
240
+ "scr_metric_threshold_500": 0.6052631343076227,
241
+ "scr_dir2_threshold_500": 0.6052631343076227
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.23893806710109472,
246
+ "scr_metric_threshold_2": 0.29483283886237377,
247
+ "scr_dir2_threshold_2": 0.29483283886237377,
248
+ "scr_dir1_threshold_5": 0.3362829711165476,
249
+ "scr_metric_threshold_5": 0.4468083795797746,
250
+ "scr_dir2_threshold_5": 0.4468083795797746,
251
+ "scr_dir1_threshold_10": 0.3716812618879581,
252
+ "scr_metric_threshold_10": 0.48328261805343187,
253
+ "scr_dir2_threshold_10": 0.48328261805343187,
254
+ "scr_dir1_threshold_20": 0.4601767250792003,
255
+ "scr_metric_threshold_20": 0.5714284679033513,
256
+ "scr_dir2_threshold_20": 0.5714284679033513,
257
+ "scr_dir1_threshold_50": -0.3628318210637474,
258
+ "scr_metric_threshold_50": 0.668692922663969,
259
+ "scr_dir2_threshold_50": 0.668692922663969,
260
+ "scr_dir1_threshold_100": 0.13274319478686336,
261
+ "scr_metric_threshold_100": 0.7325227947005853,
262
+ "scr_dir2_threshold_100": 0.7325227947005853,
263
+ "scr_dir1_threshold_500": 0.32743353029233696,
264
+ "scr_metric_threshold_500": 0.838905673202766,
265
+ "scr_dir2_threshold_500": 0.838905673202766
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.1298077377716632,
270
+ "scr_metric_threshold_2": 0.11059902391217406,
271
+ "scr_dir2_threshold_2": 0.11059902391217406,
272
+ "scr_dir1_threshold_5": 0.19711542181317898,
273
+ "scr_metric_threshold_5": 0.10138255303439819,
274
+ "scr_dir2_threshold_5": 0.10138255303439819,
275
+ "scr_dir1_threshold_10": 0.3076923517785825,
276
+ "scr_metric_threshold_10": 0.12903224034349853,
277
+ "scr_dir2_threshold_10": 0.12903224034349853,
278
+ "scr_dir1_threshold_20": 0.29807680459505953,
279
+ "scr_metric_threshold_20": 0.179723654198584,
280
+ "scr_dir2_threshold_20": 0.179723654198584,
281
+ "scr_dir1_threshold_50": 0.34134605051894723,
282
+ "scr_metric_threshold_50": 0.28571444267187013,
283
+ "scr_dir2_threshold_50": 0.28571444267187013,
284
+ "scr_dir1_threshold_100": 0.3557692280138386,
285
+ "scr_metric_threshold_100": 0.1935483605152478,
286
+ "scr_dir2_threshold_100": 0.1935483605152478,
287
+ "scr_dir1_threshold_500": 0.5913462654395369,
288
+ "scr_metric_threshold_500": 0.5023042550573303,
289
+ "scr_dir2_threshold_500": 0.5023042550573303
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/average_l0_216",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_21_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732190818599,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.14556919582093644,
76
+ "scr_metric_threshold_2": 0.10946286670978239,
77
+ "scr_dir2_threshold_2": 0.10946286670978239,
78
+ "scr_dir1_threshold_5": 0.19063478202887604,
79
+ "scr_metric_threshold_5": 0.18422389992663954,
80
+ "scr_dir2_threshold_5": 0.18422389992663954,
81
+ "scr_dir1_threshold_10": 0.21409255175738928,
82
+ "scr_metric_threshold_10": 0.2354997321762885,
83
+ "scr_dir2_threshold_10": 0.2354997321762885,
84
+ "scr_dir1_threshold_20": 0.12825066952612252,
85
+ "scr_metric_threshold_20": 0.2710209619216867,
86
+ "scr_dir2_threshold_20": 0.2710209619216867,
87
+ "scr_dir1_threshold_50": 0.1060740844727926,
88
+ "scr_metric_threshold_50": 0.35850859644936484,
89
+ "scr_dir2_threshold_50": 0.35850859644936484,
90
+ "scr_dir1_threshold_100": -0.008575233805916557,
91
+ "scr_metric_threshold_100": 0.414356279847092,
92
+ "scr_dir2_threshold_100": 0.414356279847092,
93
+ "scr_dir1_threshold_500": -0.48551583371784746,
94
+ "scr_metric_threshold_500": 0.44834708826354314,
95
+ "scr_dir2_threshold_500": 0.44834708826354314
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2352945301358372,
102
+ "scr_metric_threshold_2": 0.08860765223967824,
103
+ "scr_dir2_threshold_2": 0.08860765223967824,
104
+ "scr_dir1_threshold_5": 0.27941247367097016,
105
+ "scr_metric_threshold_5": 0.10632924304674166,
106
+ "scr_dir2_threshold_5": 0.10632924304674166,
107
+ "scr_dir1_threshold_10": 0.27941247367097016,
108
+ "scr_metric_threshold_10": 0.11645569391041287,
109
+ "scr_dir2_threshold_10": 0.11645569391041287,
110
+ "scr_dir1_threshold_20": -0.14705864306557123,
111
+ "scr_metric_threshold_20": 0.055696234239288635,
112
+ "scr_dir2_threshold_20": 0.055696234239288635,
113
+ "scr_dir1_threshold_50": -0.2058822755995305,
114
+ "scr_metric_threshold_50": 0.06835444871669703,
115
+ "scr_dir2_threshold_50": 0.06835444871669703,
116
+ "scr_dir1_threshold_100": -0.16176433206439755,
117
+ "scr_metric_threshold_100": 0.09113926495559606,
118
+ "scr_dir2_threshold_100": 0.09113926495559606,
119
+ "scr_dir1_threshold_500": -1.5147052507294994,
120
+ "scr_metric_threshold_500": 0.1772153044793565,
121
+ "scr_dir2_threshold_500": 0.1772153044793565
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.07207221720147068,
126
+ "scr_metric_threshold_2": 0.19117644480768212,
127
+ "scr_dir2_threshold_2": 0.19117644480768212,
128
+ "scr_dir1_threshold_5": 0.09909889591794105,
129
+ "scr_metric_threshold_5": 0.28529409805383843,
130
+ "scr_dir2_threshold_5": 0.28529409805383843,
131
+ "scr_dir1_threshold_10": 0.14414389742416653,
132
+ "scr_metric_threshold_10": 0.34705870390764504,
133
+ "scr_dir2_threshold_10": 0.34705870390764504,
134
+ "scr_dir1_threshold_20": 0.1621622202139216,
135
+ "scr_metric_threshold_20": 0.40882348506921323,
136
+ "scr_dir2_threshold_20": 0.40882348506921323,
137
+ "scr_dir1_threshold_50": 0.09009000301245093,
138
+ "scr_metric_threshold_50": 0.5411764623384583,
139
+ "scr_dir2_threshold_50": 0.5411764623384583,
140
+ "scr_dir1_threshold_100": -0.45045055204102946,
141
+ "scr_metric_threshold_100": 0.5499998772845669,
142
+ "scr_dir2_threshold_100": 0.5499998772845669,
143
+ "scr_dir1_threshold_500": -0.5855855565597059,
144
+ "scr_metric_threshold_500": 0.4911764097461298,
145
+ "scr_dir2_threshold_500": 0.4911764097461298
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3333333333333333,
150
+ "scr_metric_threshold_2": 0.044117518156042625,
151
+ "scr_dir2_threshold_2": 0.044117518156042625,
152
+ "scr_dir1_threshold_5": 0.38888815302934976,
153
+ "scr_metric_threshold_5": 0.05637255474862033,
154
+ "scr_dir2_threshold_5": 0.05637255474862033,
155
+ "scr_dir1_threshold_10": 0.24074086338399725,
156
+ "scr_metric_threshold_10": 0.1127449634074223,
157
+ "scr_dir2_threshold_10": 0.1127449634074223,
158
+ "scr_dir1_threshold_20": 0.25925913661600275,
159
+ "scr_metric_threshold_20": 0.18627445251379673,
160
+ "scr_dir2_threshold_20": 0.18627445251379673,
161
+ "scr_dir1_threshold_50": 0.25925913661600275,
162
+ "scr_metric_threshold_50": 0.27205883212293047,
163
+ "scr_dir2_threshold_50": 0.27205883212293047,
164
+ "scr_dir1_threshold_100": -0.05555592348532512,
165
+ "scr_metric_threshold_100": 0.3284313868715508,
166
+ "scr_dir2_threshold_100": 0.3284313868715508,
167
+ "scr_dir1_threshold_500": -1.8703698797973443,
168
+ "scr_metric_threshold_500": 0.1544116787706951,
169
+ "scr_dir2_threshold_500": 0.1544116787706951
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.1484374272404358,
174
+ "scr_metric_threshold_2": 0.16716414298850182,
175
+ "scr_dir2_threshold_2": 0.16716414298850182,
176
+ "scr_dir1_threshold_5": 0.16406268917486697,
177
+ "scr_metric_threshold_5": 0.2537314282623487,
178
+ "scr_dir2_threshold_5": 0.2537314282623487,
179
+ "scr_dir1_threshold_10": 0.21093754365573852,
180
+ "scr_metric_threshold_10": 0.32537317841109337,
181
+ "scr_dir2_threshold_10": 0.32537317841109337,
182
+ "scr_dir1_threshold_20": 0.10156257275956422,
183
+ "scr_metric_threshold_20": 0.30149253572008095,
184
+ "scr_dir2_threshold_20": 0.30149253572008095,
185
+ "scr_dir1_threshold_50": 0.03906245634426147,
186
+ "scr_metric_threshold_50": 0.3880596430696353,
187
+ "scr_dir2_threshold_50": 0.3880596430696353,
188
+ "scr_dir1_threshold_100": -0.007812398136610098,
189
+ "scr_metric_threshold_100": 0.4567163573630766,
190
+ "scr_dir2_threshold_100": 0.4567163573630766,
191
+ "scr_dir1_threshold_500": -0.17187508731147705,
192
+ "scr_metric_threshold_500": 0.48955228554429175,
193
+ "scr_dir2_threshold_500": 0.48955228554429175
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.029761868860590093,
198
+ "scr_metric_threshold_2": 0.02214015403950094,
199
+ "scr_dir2_threshold_2": 0.02214015403950094,
200
+ "scr_dir1_threshold_5": 0.059523737721180185,
201
+ "scr_metric_threshold_5": 0.14391155111504664,
202
+ "scr_dir2_threshold_5": 0.14391155111504664,
203
+ "scr_dir1_threshold_10": 0.08928560658177027,
204
+ "scr_metric_threshold_10": 0.20295203520148788,
205
+ "scr_dir2_threshold_10": 0.20295203520148788,
206
+ "scr_dir1_threshold_20": 0.08333330376754479,
207
+ "scr_metric_threshold_20": 0.28782288228677705,
208
+ "scr_dir2_threshold_20": 0.28782288228677705,
209
+ "scr_dir1_threshold_50": 0.0357145264642782,
210
+ "scr_metric_threshold_50": 0.45387458744132464,
211
+ "scr_dir2_threshold_50": 0.45387458744132464,
212
+ "scr_dir1_threshold_100": 0.029761868860590093,
213
+ "scr_metric_threshold_100": 0.5719557755575233,
214
+ "scr_dir2_threshold_100": 0.5719557755575233,
215
+ "scr_dir1_threshold_500": -0.023809566046364597,
216
+ "scr_metric_threshold_500": 0.6605166116588432,
217
+ "scr_dir2_threshold_500": 0.6605166116588432
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.029239737544372344,
222
+ "scr_metric_threshold_2": 0.04511286787937455,
223
+ "scr_dir2_threshold_2": 0.04511286787937455,
224
+ "scr_dir1_threshold_5": 0.08187133483727584,
225
+ "scr_metric_threshold_5": 0.08270681240672771,
226
+ "scr_dir2_threshold_5": 0.08270681240672771,
227
+ "scr_dir1_threshold_10": 0.15789479187871047,
228
+ "scr_metric_threshold_10": 0.10150378467040429,
229
+ "scr_dir2_threshold_10": 0.10150378467040429,
230
+ "scr_dir1_threshold_20": 0.25731010866867626,
231
+ "scr_metric_threshold_20": 0.11278205765964408,
232
+ "scr_dir2_threshold_20": 0.11278205765964408,
233
+ "scr_dir1_threshold_50": 0.26900586426035866,
234
+ "scr_metric_threshold_50": 0.23308281459372493,
235
+ "scr_dir2_threshold_50": 0.23308281459372493,
236
+ "scr_dir1_threshold_100": 0.29824560180473103,
237
+ "scr_metric_threshold_100": 0.319548976637671,
238
+ "scr_dir2_threshold_100": 0.319548976637671,
239
+ "scr_dir1_threshold_500": 0.26315798646451743,
240
+ "scr_metric_threshold_500": 0.5187969722636766,
241
+ "scr_dir2_threshold_500": 0.5187969722636766
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.17699145385705228,
246
+ "scr_metric_threshold_2": 0.19756838410175617,
247
+ "scr_dir2_threshold_2": 0.19756838410175617,
248
+ "scr_dir1_threshold_5": 0.27433635787250515,
249
+ "scr_metric_threshold_5": 0.4164132714362944,
250
+ "scr_dir2_threshold_5": 0.4164132714362944,
251
+ "scr_dir1_threshold_10": 0.3451329394153261,
252
+ "scr_metric_threshold_10": 0.5258358056881312,
253
+ "scr_dir2_threshold_10": 0.5258358056881312,
254
+ "scr_dir1_threshold_20": 0.035398290771410455,
255
+ "scr_metric_threshold_20": 0.6079027063770086,
256
+ "scr_dir2_threshold_20": 0.6079027063770086,
257
+ "scr_dir1_threshold_50": -0.00884944082421066,
258
+ "scr_metric_threshold_50": 0.662613973502927,
259
+ "scr_dir2_threshold_50": 0.662613973502927,
260
+ "scr_dir1_threshold_100": -0.1681414855582738,
261
+ "scr_metric_threshold_100": 0.7021276865571052,
262
+ "scr_dir2_threshold_100": 0.7021276865571052,
263
+ "scr_dir1_threshold_500": -0.442477843430779,
264
+ "scr_metric_threshold_500": 0.7264436643704083,
265
+ "scr_dir2_threshold_500": 0.7264436643704083
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.13942299839439992,
270
+ "scr_metric_threshold_2": 0.11981576946572266,
271
+ "scr_dir2_threshold_2": 0.11981576946572266,
272
+ "scr_dir1_threshold_5": 0.1778846140069193,
273
+ "scr_metric_threshold_5": 0.12903224034349853,
274
+ "scr_dir2_threshold_5": 0.12903224034349853,
275
+ "scr_dir1_threshold_10": 0.24519229804843506,
276
+ "scr_metric_threshold_10": 0.15207369221371095,
277
+ "scr_dir2_threshold_10": 0.15207369221371095,
278
+ "scr_dir1_threshold_20": 0.27403836647743146,
279
+ "scr_metric_threshold_20": 0.20737334150768433,
280
+ "scr_dir2_threshold_20": 0.20737334150768433,
281
+ "scr_dir1_threshold_50": 0.37019240550872995,
282
+ "scr_metric_threshold_50": 0.2488480098092212,
283
+ "scr_dir2_threshold_50": 0.2488480098092212,
284
+ "scr_dir1_threshold_100": 0.44711535017298243,
285
+ "scr_metric_threshold_100": 0.294930913549646,
286
+ "scr_dir2_threshold_100": 0.294930913549646,
287
+ "scr_dir1_threshold_500": 0.46153852766787373,
288
+ "scr_metric_threshold_500": 0.36866377927494387,
289
+ "scr_dir2_threshold_500": 0.36866377927494387
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/average_l0_21",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_35_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732192574392,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1342011853293917,
76
+ "scr_metric_threshold_2": 0.11959515111612808,
77
+ "scr_dir2_threshold_2": 0.11959515111612808,
78
+ "scr_dir1_threshold_5": 0.2068104477745309,
79
+ "scr_metric_threshold_5": 0.20251659587489546,
80
+ "scr_dir2_threshold_5": 0.20251659587489546,
81
+ "scr_dir1_threshold_10": 0.24057462470571736,
82
+ "scr_metric_threshold_10": 0.2518407450619574,
83
+ "scr_dir2_threshold_10": 0.2518407450619574,
84
+ "scr_dir1_threshold_20": 0.16545242331189738,
85
+ "scr_metric_threshold_20": 0.31539160797309346,
86
+ "scr_dir2_threshold_20": 0.31539160797309346,
87
+ "scr_dir1_threshold_50": 0.12709672099940428,
88
+ "scr_metric_threshold_50": 0.3992584247572392,
89
+ "scr_dir2_threshold_50": 0.3992584247572392,
90
+ "scr_dir1_threshold_100": 0.1686340856226789,
91
+ "scr_metric_threshold_100": 0.4491001685197186,
92
+ "scr_dir2_threshold_100": 0.4491001685197186,
93
+ "scr_dir1_threshold_500": -0.3726387709627251,
94
+ "scr_metric_threshold_500": 0.48165476050021677,
95
+ "scr_dir2_threshold_500": 0.48165476050021677
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2352945301358372,
102
+ "scr_metric_threshold_2": 0.08101266319410545,
103
+ "scr_dir2_threshold_2": 0.08101266319410545,
104
+ "scr_dir1_threshold_5": 0.3088238516686228,
105
+ "scr_metric_threshold_5": 0.11645569391041287,
106
+ "scr_dir2_threshold_5": 0.11645569391041287,
107
+ "scr_dir1_threshold_10": 0.33823522966627545,
108
+ "scr_metric_threshold_10": 0.04810139609153522,
109
+ "scr_dir2_threshold_10": 0.04810139609153522,
110
+ "scr_dir1_threshold_20": -0.02941137799765263,
111
+ "scr_metric_threshold_20": 0.06835444871669703,
112
+ "scr_dir2_threshold_20": 0.06835444871669703,
113
+ "scr_dir1_threshold_50": -0.02941137799765263,
114
+ "scr_metric_threshold_50": 0.07848105047818764,
115
+ "scr_dir2_threshold_50": 0.07848105047818764,
116
+ "scr_dir1_threshold_100": 0.014706565537480355,
117
+ "scr_metric_threshold_100": 0.12151907024006786,
118
+ "scr_dir2_threshold_100": 0.12151907024006786,
119
+ "scr_dir1_threshold_500": -1.632352515797418,
120
+ "scr_metric_threshold_500": 0.10126586671708666,
121
+ "scr_dir2_threshold_500": 0.10126586671708666
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.0810811101069608,
126
+ "scr_metric_threshold_2": 0.1705881259845722,
127
+ "scr_dir2_threshold_2": 0.1705881259845722,
128
+ "scr_dir1_threshold_5": 0.11711721870769615,
129
+ "scr_metric_threshold_5": 0.3323528370230358,
130
+ "scr_dir2_threshold_5": 0.3323528370230358,
131
+ "scr_dir1_threshold_10": 0.09909889591794105,
132
+ "scr_metric_threshold_10": 0.3999998948153431,
133
+ "scr_dir2_threshold_10": 0.3999998948153431,
134
+ "scr_dir1_threshold_20": 0.2342344374153923,
135
+ "scr_metric_threshold_20": 0.473529404546151,
136
+ "scr_dir2_threshold_20": 0.473529404546151,
137
+ "scr_dir1_threshold_50": 0.14414389742416653,
138
+ "scr_metric_threshold_50": 0.6205882487000053,
139
+ "scr_dir2_threshold_50": 0.6205882487000053,
140
+ "scr_dir1_threshold_100": 0.17117111311941172,
141
+ "scr_metric_threshold_100": 0.5941176532461563,
142
+ "scr_dir2_threshold_100": 0.5941176532461563,
143
+ "scr_dir1_threshold_500": -0.5855855565597059,
144
+ "scr_metric_threshold_500": 0.5352940103999576,
145
+ "scr_dir2_threshold_500": 0.5352940103999576
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3333333333333333,
150
+ "scr_metric_threshold_2": 0.046568613128449184,
151
+ "scr_dir2_threshold_2": 0.046568613128449184,
152
+ "scr_dir1_threshold_5": 0.4259258032826694,
153
+ "scr_metric_threshold_5": 0.07352934301655607,
154
+ "scr_dir2_threshold_5": 0.07352934301655607,
155
+ "scr_dir1_threshold_10": 0.4444440765146749,
156
+ "scr_metric_threshold_10": 0.1348037955303528,
157
+ "scr_dir2_threshold_10": 0.1348037955303528,
158
+ "scr_dir1_threshold_20": 0.2777774098480082,
159
+ "scr_metric_threshold_20": 0.21813722625689835,
160
+ "scr_dir2_threshold_20": 0.21813722625689835,
161
+ "scr_dir1_threshold_50": 0.24074086338399725,
162
+ "scr_metric_threshold_50": 0.3112744525137967,
163
+ "scr_dir2_threshold_50": 0.3112744525137967,
164
+ "scr_dir1_threshold_100": 0.24074086338399725,
165
+ "scr_metric_threshold_100": 0.28921562039086623,
166
+ "scr_dir2_threshold_100": 0.28921562039086623,
167
+ "scr_dir1_threshold_500": -1.0925924699493361,
168
+ "scr_metric_threshold_500": 0.2352940145248341,
169
+ "scr_dir2_threshold_500": 0.2352940145248341
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.1484374272404358,
174
+ "scr_metric_threshold_2": 0.20895517873562727,
175
+ "scr_dir2_threshold_2": 0.20895517873562727,
176
+ "scr_dir1_threshold_5": 0.15624982537704588,
177
+ "scr_metric_threshold_5": 0.2835821426639679,
178
+ "scr_dir2_threshold_5": 0.2835821426639679,
179
+ "scr_dir1_threshold_10": 0.21093754365573852,
180
+ "scr_metric_threshold_10": 0.3731342858688257,
181
+ "scr_dir2_threshold_10": 0.3731342858688257,
182
+ "scr_dir1_threshold_20": 0.2343752037267798,
183
+ "scr_metric_threshold_20": 0.4388059643069635,
184
+ "scr_dir2_threshold_20": 0.4388059643069635,
185
+ "scr_dir1_threshold_50": -0.03125005820765137,
186
+ "scr_metric_threshold_50": 0.4358209284516601,
187
+ "scr_dir2_threshold_50": 0.4358209284516601,
188
+ "scr_dir1_threshold_100": 0.10937497089617432,
189
+ "scr_metric_threshold_100": 0.5164179640906076,
190
+ "scr_dir2_threshold_100": 0.5164179640906076,
191
+ "scr_dir1_threshold_500": -0.2890624563442615,
192
+ "scr_metric_threshold_500": 0.5492537143475302,
193
+ "scr_dir2_threshold_500": 0.5492537143475302
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.029761868860590093,
198
+ "scr_metric_threshold_2": 0.06273069304578824,
199
+ "scr_dir2_threshold_2": 0.06273069304578824,
200
+ "scr_dir1_threshold_5": 0.059523737721180185,
201
+ "scr_metric_threshold_5": 0.154981518163139,
202
+ "scr_dir2_threshold_5": 0.154981518163139,
203
+ "scr_dir1_threshold_10": 0.09523826418545839,
204
+ "scr_metric_threshold_10": 0.2693727172633069,
205
+ "scr_dir2_threshold_10": 0.2693727172633069,
206
+ "scr_dir1_threshold_20": 0.10714286981390937,
207
+ "scr_metric_threshold_20": 0.36162354238065764,
208
+ "scr_dir2_threshold_20": 0.36162354238065764,
209
+ "scr_dir1_threshold_50": 0.10119056699968389,
210
+ "scr_metric_threshold_50": 0.5498154015747062,
211
+ "scr_dir2_threshold_50": 0.5498154015747062,
212
+ "scr_dir1_threshold_100": 0.10119056699968389,
213
+ "scr_metric_threshold_100": 0.6494464246674347,
214
+ "scr_dir2_threshold_100": 0.6494464246674347,
215
+ "scr_dir1_threshold_500": 0.041666829278503695,
216
+ "scr_metric_threshold_500": 0.6974169417057836,
217
+ "scr_dir2_threshold_500": 0.6974169417057836
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.06432770144975224,
222
+ "scr_metric_threshold_2": 0.02255654597847959,
223
+ "scr_dir2_threshold_2": 0.02255654597847959,
224
+ "scr_dir1_threshold_5": 0.15789479187871047,
225
+ "scr_metric_threshold_5": 0.0714285394174879,
226
+ "scr_dir2_threshold_5": 0.0714285394174879,
227
+ "scr_dir1_threshold_10": 0.1695908960355592,
228
+ "scr_metric_threshold_10": 0.0902257357587491,
229
+ "scr_dir2_threshold_10": 0.0902257357587491,
230
+ "scr_dir1_threshold_20": 0.26315798646451743,
231
+ "scr_metric_threshold_20": 0.1691729744506738,
232
+ "scr_dir2_threshold_20": 0.1691729744506738,
233
+ "scr_dir1_threshold_50": 0.31578958375742094,
234
+ "scr_metric_threshold_50": 0.26691740948385967,
235
+ "scr_dir2_threshold_50": 0.26691740948385967,
236
+ "scr_dir1_threshold_100": 0.3274856879142697,
237
+ "scr_metric_threshold_100": 0.3609022708022426,
238
+ "scr_dir2_threshold_100": 0.3609022708022426,
239
+ "scr_dir1_threshold_500": 0.35672542545864205,
240
+ "scr_metric_threshold_500": 0.5751881131322909,
241
+ "scr_dir2_threshold_500": 0.5751881131322909
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.07079658154282091,
246
+ "scr_metric_threshold_2": 0.2583586003887165,
247
+ "scr_dir2_threshold_2": 0.2583586003887165,
248
+ "scr_dir1_threshold_5": 0.2654869170482945,
249
+ "scr_metric_threshold_5": 0.4772036688923899,
250
+ "scr_dir2_threshold_5": 0.4772036688923899,
251
+ "scr_dir1_threshold_10": 0.3362829711165476,
252
+ "scr_metric_threshold_10": 0.5562309138316113,
253
+ "scr_dir2_threshold_10": 0.5562309138316113,
254
+ "scr_dir1_threshold_20": -0.00884944082421066,
255
+ "scr_metric_threshold_20": 0.6322188653594468,
256
+ "scr_dir2_threshold_20": 0.6322188653594468,
257
+ "scr_dir1_threshold_50": -0.01769888164842132,
258
+ "scr_metric_threshold_50": 0.6869301324853652,
259
+ "scr_dir2_threshold_50": 0.6869301324853652,
260
+ "scr_dir1_threshold_100": -0.05309717241983178,
261
+ "scr_metric_threshold_100": 0.7386017438616274,
262
+ "scr_dir2_threshold_100": 0.7386017438616274,
263
+ "scr_dir1_threshold_500": -0.2654863895737267,
264
+ "scr_metric_threshold_500": 0.7355622692811064,
265
+ "scr_dir2_threshold_500": 0.7355622692811064
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.11057692996540351,
270
+ "scr_metric_threshold_2": 0.10599078847328613,
271
+ "scr_dir2_threshold_2": 0.10599078847328613,
272
+ "scr_dir1_threshold_5": 0.16346143651202796,
273
+ "scr_metric_threshold_5": 0.11059902391217406,
274
+ "scr_dir2_threshold_5": 0.11059902391217406,
275
+ "scr_dir1_threshold_10": 0.23076912055354373,
276
+ "scr_metric_threshold_10": 0.14285722133593506,
277
+ "scr_dir2_threshold_10": 0.14285722133593506,
278
+ "scr_dir1_threshold_20": 0.24519229804843506,
279
+ "scr_metric_threshold_20": 0.16129043776725951,
280
+ "scr_dir2_threshold_20": 0.16129043776725951,
281
+ "scr_dir1_threshold_50": 0.29326917428369115,
282
+ "scr_metric_threshold_50": 0.24423977437033326,
283
+ "scr_dir2_threshold_50": 0.24423977437033326,
284
+ "scr_dir1_threshold_100": 0.4375000895502457,
285
+ "scr_metric_threshold_100": 0.32258060085874635,
286
+ "scr_dir2_threshold_100": 0.32258060085874635,
287
+ "scr_dir1_threshold_500": 0.4855769657855018,
288
+ "scr_metric_threshold_500": 0.42396315389314454,
289
+ "scr_dir2_threshold_500": 0.42396315389314454
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/average_l0_35",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_19_width_65k_average_l0_63_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732193441893,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.20050364162945206,
76
+ "scr_metric_threshold_2": 0.1638035580823632,
77
+ "scr_dir2_threshold_2": 0.1638035580823632,
78
+ "scr_dir1_threshold_5": 0.28236306083718543,
79
+ "scr_metric_threshold_5": 0.23720697931139736,
80
+ "scr_dir2_threshold_5": 0.23720697931139736,
81
+ "scr_dir1_threshold_10": 0.2864415092719489,
82
+ "scr_metric_threshold_10": 0.28521638154735085,
83
+ "scr_dir2_threshold_10": 0.28521638154735085,
84
+ "scr_dir1_threshold_20": 0.20974386588473318,
85
+ "scr_metric_threshold_20": 0.3338403258413778,
86
+ "scr_dir2_threshold_20": 0.3338403258413778,
87
+ "scr_dir1_threshold_50": 0.11627620057320684,
88
+ "scr_metric_threshold_50": 0.41402396212038595,
89
+ "scr_dir2_threshold_50": 0.41402396212038595,
90
+ "scr_dir1_threshold_100": 0.1434389994027683,
91
+ "scr_metric_threshold_100": 0.454710090080829,
92
+ "scr_dir2_threshold_100": 0.454710090080829,
93
+ "scr_dir1_threshold_500": -0.522861146809531,
94
+ "scr_metric_threshold_500": 0.448735224507563,
95
+ "scr_dir2_threshold_500": 0.448735224507563
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.27941247367097016,
102
+ "scr_metric_threshold_2": 0.07341782504635204,
103
+ "scr_dir2_threshold_2": 0.07341782504635204,
104
+ "scr_dir1_threshold_5": 0.39705886220023473,
105
+ "scr_metric_threshold_5": 0.08607603952376044,
106
+ "scr_dir2_threshold_5": 0.08607603952376044,
107
+ "scr_dir1_threshold_10": 0.38235317320140844,
108
+ "scr_metric_threshold_10": 0.06329122328486143,
109
+ "scr_dir2_threshold_10": 0.06329122328486143,
110
+ "scr_dir1_threshold_20": 0.3676474842025821,
111
+ "scr_metric_threshold_20": 0.08860765223967824,
112
+ "scr_dir2_threshold_20": 0.08860765223967824,
113
+ "scr_dir1_threshold_50": 0.19117658660070422,
114
+ "scr_metric_threshold_50": 0.07848105047818764,
115
+ "scr_dir2_threshold_50": 0.07848105047818764,
116
+ "scr_dir1_threshold_100": 0.19117658660070422,
117
+ "scr_metric_threshold_100": 0.12151907024006786,
118
+ "scr_dir2_threshold_100": 0.12151907024006786,
119
+ "scr_dir1_threshold_500": -1.8529404803957747,
120
+ "scr_metric_threshold_500": 0.2962026111056872,
121
+ "scr_dir2_threshold_500": 0.2962026111056872
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3333333333333333,
126
+ "scr_metric_threshold_2": 0.19117644480768212,
127
+ "scr_dir2_threshold_2": 0.19117644480768212,
128
+ "scr_dir1_threshold_5": 0.36036054902857856,
129
+ "scr_metric_threshold_5": 0.3264705603922967,
130
+ "scr_dir2_threshold_5": 0.3264705603922967,
131
+ "scr_dir1_threshold_10": 0.3333333333333333,
132
+ "scr_metric_threshold_10": 0.3882353415538649,
133
+ "scr_dir2_threshold_10": 0.3882353415538649,
134
+ "scr_dir1_threshold_20": 0.3333333333333333,
135
+ "scr_metric_threshold_20": 0.4794116811768901,
136
+ "scr_dir2_threshold_20": 0.4794116811768901,
137
+ "scr_dir1_threshold_50": 0.3333333333333333,
138
+ "scr_metric_threshold_50": 0.579411786361547,
139
+ "scr_dir2_threshold_50": 0.579411786361547,
140
+ "scr_dir1_threshold_100": 0.3333333333333333,
141
+ "scr_metric_threshold_100": 0.6029410681922649,
142
+ "scr_dir2_threshold_100": 0.6029410681922649,
143
+ "scr_dir1_threshold_500": -0.4774777677362747,
144
+ "scr_metric_threshold_500": 0.14999998246922386,
145
+ "scr_dir2_threshold_500": 0.14999998246922386
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.37036987979734426,
150
+ "scr_metric_threshold_2": 0.029411678770695084,
151
+ "scr_dir2_threshold_2": 0.029411678770695084,
152
+ "scr_dir1_threshold_5": 0.5,
153
+ "scr_metric_threshold_5": 0.07843124078173246,
154
+ "scr_dir2_threshold_5": 0.07843124078173246,
155
+ "scr_dir1_threshold_10": 0.4814806229786858,
156
+ "scr_metric_threshold_10": 0.1323528466477646,
157
+ "scr_dir2_threshold_10": 0.1323528466477646,
158
+ "scr_dir1_threshold_20": 0.4629623497466804,
159
+ "scr_metric_threshold_20": 0.2303921167596577,
160
+ "scr_dir2_threshold_20": 0.2303921167596577,
161
+ "scr_dir1_threshold_50": 0.37036987979734426,
162
+ "scr_metric_threshold_50": 0.330882335754139,
163
+ "scr_dir2_threshold_50": 0.330882335754139,
164
+ "scr_dir1_threshold_100": 0.3333333333333333,
165
+ "scr_metric_threshold_100": 0.2843137226256898,
166
+ "scr_dir2_threshold_100": 0.2843137226256898,
167
+ "scr_dir1_threshold_500": -1.4629623497466804,
168
+ "scr_metric_threshold_500": 0.09068627737431016,
169
+ "scr_dir2_threshold_500": 0.09068627737431016
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.28125005820765137,
174
+ "scr_metric_threshold_2": 0.200000071169717,
175
+ "scr_dir2_threshold_2": 0.200000071169717,
176
+ "scr_dir1_threshold_5": 0.2656247962732202,
177
+ "scr_metric_threshold_5": 0.280596928884372,
178
+ "scr_dir2_threshold_5": 0.280596928884372,
179
+ "scr_dir1_threshold_10": 0.2578123981366101,
180
+ "scr_metric_threshold_10": 0.40895524990534426,
181
+ "scr_dir2_threshold_10": 0.40895524990534426,
182
+ "scr_dir1_threshold_20": 0.1484374272404358,
183
+ "scr_metric_threshold_20": 0.3850746072143319,
184
+ "scr_dir2_threshold_20": 0.3850746072143319,
185
+ "scr_dir1_threshold_50": -0.023437660071041276,
186
+ "scr_metric_threshold_50": 0.49253732139959516,
187
+ "scr_dir2_threshold_50": 0.49253732139959516,
188
+ "scr_dir1_threshold_100": 0.14062502910382568,
189
+ "scr_metric_threshold_100": 0.5522387502028336,
190
+ "scr_dir2_threshold_100": 0.5522387502028336,
191
+ "scr_dir1_threshold_500": -0.15624982537704588,
192
+ "scr_metric_threshold_500": 0.480597000054089,
193
+ "scr_dir2_threshold_500": 0.480597000054089
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.041666829278503695,
198
+ "scr_metric_threshold_2": 0.3837639163634748,
199
+ "scr_dir2_threshold_2": 0.3837639163634748,
200
+ "scr_dir1_threshold_5": 0.053571434906954686,
201
+ "scr_metric_threshold_5": 0.44280440044991604,
202
+ "scr_dir2_threshold_5": 0.44280440044991604,
203
+ "scr_dir1_threshold_10": 0.07738100095331929,
204
+ "scr_metric_threshold_10": 0.4723247524647948,
205
+ "scr_dir2_threshold_10": 0.4723247524647948,
206
+ "scr_dir1_threshold_20": 0.11309517262813487,
207
+ "scr_metric_threshold_20": 0.5719557755575233,
208
+ "scr_dir2_threshold_20": 0.5719557755575233,
209
+ "scr_dir1_threshold_50": -0.33333321507017916,
210
+ "scr_metric_threshold_50": 0.6605166116588432,
211
+ "scr_dir2_threshold_50": 0.6605166116588432,
212
+ "scr_dir1_threshold_100": -0.25595221411685987,
213
+ "scr_metric_threshold_100": 0.7047971396811614,
214
+ "scr_dir2_threshold_100": 0.7047971396811614,
215
+ "scr_dir1_threshold_500": -0.26190451693108535,
216
+ "scr_metric_threshold_500": 0.7158671067292537,
217
+ "scr_dir2_threshold_500": 0.7158671067292537
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.10526319458580699,
222
+ "scr_metric_threshold_2": 0.007518923352021404,
223
+ "scr_dir2_threshold_2": 0.007518923352021404,
224
+ "scr_dir1_threshold_5": 0.1754387738314004,
225
+ "scr_metric_threshold_5": 0.03007524525291637,
226
+ "scr_dir2_threshold_5": 0.03007524525291637,
227
+ "scr_dir1_threshold_10": 0.22807037112430392,
228
+ "scr_metric_threshold_10": 0.0902257357587491,
229
+ "scr_dir2_threshold_10": 0.0902257357587491,
230
+ "scr_dir1_threshold_20": 0.27485409062136623,
231
+ "scr_metric_threshold_20": 0.12030075693408086,
232
+ "scr_dir2_threshold_20": 0.12030075693408086,
233
+ "scr_dir1_threshold_50": 0.3625733032544832,
234
+ "scr_metric_threshold_50": 0.22556389124170353,
235
+ "scr_dir2_threshold_50": 0.22556389124170353,
236
+ "scr_dir1_threshold_100": 0.3508771990976345,
237
+ "scr_metric_threshold_100": 0.3045113540112128,
238
+ "scr_dir2_threshold_100": 0.3045113540112128,
239
+ "scr_dir1_threshold_500": 0.49707623538466256,
240
+ "scr_metric_threshold_500": 0.6015037846704043,
241
+ "scr_dir2_threshold_500": 0.6015037846704043
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.10619487231423137,
246
+ "scr_metric_threshold_2": 0.3191488166756768,
247
+ "scr_dir2_threshold_2": 0.3191488166756768,
248
+ "scr_dir1_threshold_5": 0.30973464864391564,
249
+ "scr_metric_threshold_5": 0.5379938851793502,
250
+ "scr_dir2_threshold_5": 0.5379938851793502,
251
+ "scr_dir1_threshold_10": 0.3628318210637474,
252
+ "scr_metric_threshold_10": 0.5927051523052685,
253
+ "scr_dir2_threshold_10": 0.5927051523052685,
254
+ "scr_dir1_threshold_20": -0.18584036720669514,
255
+ "scr_metric_threshold_20": 0.6382978145204887,
256
+ "scr_dir2_threshold_20": 0.6382978145204887,
257
+ "scr_dir1_threshold_50": -0.23008809880231626,
258
+ "scr_metric_threshold_50": 0.7051671611376262,
259
+ "scr_dir2_threshold_50": 0.7051671611376262,
260
+ "scr_dir1_threshold_100": -0.2920352395209265,
261
+ "scr_metric_threshold_100": 0.7355622692811064,
262
+ "scr_dir2_threshold_100": 0.7355622692811064,
263
+ "scr_dir1_threshold_500": -0.9203534501584006,
264
+ "scr_metric_threshold_500": 0.7112461102986682,
265
+ "scr_dir2_threshold_500": 0.7112461102986682
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.08653849184777547,
270
+ "scr_metric_threshold_2": 0.10599078847328613,
271
+ "scr_dir2_threshold_2": 0.10599078847328613,
272
+ "scr_dir1_threshold_5": 0.19711542181317898,
273
+ "scr_metric_threshold_5": 0.11520753402683473,
274
+ "scr_dir2_threshold_5": 0.11520753402683473,
275
+ "scr_dir1_threshold_10": 0.16826935338418259,
276
+ "scr_metric_threshold_10": 0.13364075045815918,
277
+ "scr_dir2_threshold_10": 0.13364075045815918,
278
+ "scr_dir1_threshold_20": 0.16346143651202796,
279
+ "scr_metric_threshold_20": 0.1566822023283716,
280
+ "scr_dir2_threshold_20": 0.1566822023283716,
281
+ "scr_dir1_threshold_50": 0.2596154755433264,
282
+ "scr_metric_threshold_50": 0.23963153893144531,
283
+ "scr_dir2_threshold_50": 0.23963153893144531,
284
+ "scr_dir1_threshold_100": 0.3461539673911019,
285
+ "scr_metric_threshold_100": 0.3317973464122949,
286
+ "scr_dir2_threshold_100": 0.3317973464122949,
287
+ "scr_dir1_threshold_500": 0.45192298048435076,
288
+ "scr_metric_threshold_500": 0.5437789233588672,
289
+ "scr_dir2_threshold_500": 0.5437789233588672
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_19/width_65k/average_l0_63",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_143_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732144251493,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1739229944438654,
76
+ "scr_metric_threshold_2": 0.07808843723357449,
77
+ "scr_dir2_threshold_2": 0.07808843723357449,
78
+ "scr_dir1_threshold_5": 0.28103293452907335,
79
+ "scr_metric_threshold_5": 0.11849839348206448,
80
+ "scr_dir2_threshold_5": 0.11849839348206448,
81
+ "scr_dir1_threshold_10": 0.2900455299208477,
82
+ "scr_metric_threshold_10": 0.1573748234880298,
83
+ "scr_dir2_threshold_10": 0.1573748234880298,
84
+ "scr_dir1_threshold_20": 0.3326377714853802,
85
+ "scr_metric_threshold_20": 0.1940932937450407,
86
+ "scr_dir2_threshold_20": 0.1940932937450407,
87
+ "scr_dir1_threshold_50": 0.15419575976585162,
88
+ "scr_metric_threshold_50": 0.21289007151124054,
89
+ "scr_dir2_threshold_50": 0.21289007151124054,
90
+ "scr_dir1_threshold_100": -0.09603587067176947,
91
+ "scr_metric_threshold_100": 0.23520944310959352,
92
+ "scr_dir2_threshold_100": 0.23520944310959352,
93
+ "scr_dir1_threshold_500": -0.5897787217936767,
94
+ "scr_metric_threshold_500": 0.26347474358711287,
95
+ "scr_dir2_threshold_500": 0.26347474358711287
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.21428510607499116,
102
+ "scr_metric_threshold_2": 0.009389690411235671,
103
+ "scr_dir2_threshold_2": 0.009389690411235671,
104
+ "scr_dir1_threshold_5": 0.32142872348125223,
105
+ "scr_metric_threshold_5": 0.018779380822471343,
106
+ "scr_dir2_threshold_5": 0.018779380822471343,
107
+ "scr_dir1_threshold_10": 0.39285638259373895,
108
+ "scr_metric_threshold_10": 0.030516458857264225,
109
+ "scr_dir2_threshold_10": 0.030516458857264225,
110
+ "scr_dir1_threshold_20": 0.5,
111
+ "scr_metric_threshold_20": 0.06338030533808565,
112
+ "scr_dir2_threshold_20": 0.06338030533808565,
113
+ "scr_dir1_threshold_50": 0.607143617406261,
114
+ "scr_metric_threshold_50": 0.08685446140767143,
115
+ "scr_dir2_threshold_50": 0.08685446140767143,
116
+ "scr_dir1_threshold_100": 0.5714276591124867,
117
+ "scr_metric_threshold_100": 0.12910799829972852,
118
+ "scr_dir2_threshold_100": 0.12910799829972852,
119
+ "scr_dir1_threshold_500": 0.5,
120
+ "scr_metric_threshold_500": 0.18779338847369295,
121
+ "scr_dir2_threshold_500": 0.18779338847369295
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.46153895530476774,
126
+ "scr_metric_threshold_2": 0.04896905474409945,
127
+ "scr_dir2_threshold_2": 0.04896905474409945,
128
+ "scr_dir1_threshold_5": 0.5230766268171394,
129
+ "scr_metric_threshold_5": 0.07474237890118533,
130
+ "scr_dir2_threshold_5": 0.07474237890118533,
131
+ "scr_dir1_threshold_10": 0.5538463795678938,
132
+ "scr_metric_threshold_10": 0.10567012209734605,
133
+ "scr_dir2_threshold_10": 0.10567012209734605,
134
+ "scr_dir1_threshold_20": 0.630769385952927,
135
+ "scr_metric_threshold_20": 0.13659801891372075,
136
+ "scr_dir2_threshold_20": 0.13659801891372075,
137
+ "scr_dir1_threshold_50": 0.6000005501967411,
138
+ "scr_metric_threshold_50": 0.23195884207227524,
139
+ "scr_dir2_threshold_50": 0.23195884207227524,
140
+ "scr_dir1_threshold_100": 0.09230742426312609,
141
+ "scr_metric_threshold_100": 0.1288660063045736,
142
+ "scr_dir2_threshold_100": 0.1288660063045736,
143
+ "scr_dir1_threshold_500": -0.13846159489197346,
144
+ "scr_metric_threshold_500": 0.17010320205973983,
145
+ "scr_dir2_threshold_500": 0.17010320205973983
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.31818157188179863,
150
+ "scr_metric_threshold_2": 0.017811626879176687,
151
+ "scr_dir2_threshold_2": 0.017811626879176687,
152
+ "scr_dir1_threshold_5": 0.43181775079314766,
153
+ "scr_metric_threshold_5": 0.03562340542411358,
154
+ "scr_dir2_threshold_5": 0.03562340542411358,
155
+ "scr_dir1_threshold_10": 0.5,
156
+ "scr_metric_threshold_10": 0.08651394126834436,
157
+ "scr_dir2_threshold_10": 0.08651394126834436,
158
+ "scr_dir1_threshold_20": 0.4772730351477517,
159
+ "scr_metric_threshold_20": 0.09414758231128305,
160
+ "scr_dir2_threshold_20": 0.09414758231128305,
161
+ "scr_dir1_threshold_50": 0.22727235782269797,
162
+ "scr_metric_threshold_50": 0.11959285023339844,
163
+ "scr_dir2_threshold_50": 0.11959285023339844,
164
+ "scr_dir1_threshold_100": 0.045455284354604046,
165
+ "scr_metric_threshold_100": 0.1806615235796274,
166
+ "scr_dir2_threshold_100": 0.1806615235796274,
167
+ "scr_dir1_threshold_500": -2.249997968024839,
168
+ "scr_metric_threshold_500": 0.2671754648479718,
169
+ "scr_dir2_threshold_500": 0.2671754648479718
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.07407382878756102,
174
+ "scr_metric_threshold_2": 0.03494626369444612,
175
+ "scr_dir2_threshold_2": 0.03494626369444612,
176
+ "scr_dir1_threshold_5": 0.2592595045457723,
177
+ "scr_metric_threshold_5": 0.029569989384926595,
178
+ "scr_dir2_threshold_5": 0.029569989384926595,
179
+ "scr_dir1_threshold_10": 0.2839505355217796,
180
+ "scr_metric_threshold_10": 0.06989252738889223,
181
+ "scr_dir2_threshold_10": 0.06989252738889223,
182
+ "scr_dir1_threshold_20": 0.23456773771022588,
183
+ "scr_metric_threshold_20": 0.00806457169180236,
184
+ "scr_dir2_threshold_20": 0.00806457169180236,
185
+ "scr_dir1_threshold_50": -1.2345677377102258,
186
+ "scr_metric_threshold_50": 0.03763440084920587,
187
+ "scr_dir2_threshold_50": 0.03763440084920587,
188
+ "scr_dir1_threshold_100": -2.6913569617831348,
189
+ "scr_metric_threshold_100": 0.19354843878307196,
190
+ "scr_dir2_threshold_100": 0.19354843878307196,
191
+ "scr_dir1_threshold_500": -3.1358014062275794,
192
+ "scr_metric_threshold_500": 0.15053760339682348,
193
+ "scr_dir2_threshold_500": 0.15053760339682348
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.2237443630755011,
199
+ "scr_dir2_threshold_2": 0.2237443630755011,
200
+ "scr_dir1_threshold_5": 0.06250021166422265,
201
+ "scr_metric_threshold_5": 0.2831048625133396,
202
+ "scr_dir2_threshold_5": 0.2831048625133396,
203
+ "scr_dir1_threshold_10": 0.08522719191002408,
204
+ "scr_metric_threshold_10": 0.3607305078559723,
205
+ "scr_dir2_threshold_10": 0.3607305078559723,
206
+ "scr_dir1_threshold_20": 0.10227276575713139,
207
+ "scr_metric_threshold_20": 0.4931505022438831,
208
+ "scr_dir2_threshold_20": 0.4931505022438831,
209
+ "scr_dir1_threshold_50": 0.1193183396042387,
210
+ "scr_metric_threshold_50": 0.27397256172821866,
211
+ "scr_dir2_threshold_50": 0.27397256172821866,
212
+ "scr_dir1_threshold_100": 0.06250021166422265,
213
+ "scr_metric_threshold_100": 0.23744281425318253,
214
+ "scr_dir2_threshold_100": 0.23744281425318253,
215
+ "scr_dir1_threshold_500": -0.14204531985004012,
216
+ "scr_metric_threshold_500": -0.013698723344957598,
217
+ "scr_dir2_threshold_500": -0.013698723344957598
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.12403078926247804,
222
+ "scr_metric_threshold_2": 0.07661287027579163,
223
+ "scr_dir2_threshold_2": 0.07661287027579163,
224
+ "scr_dir1_threshold_5": 0.22480606544219944,
225
+ "scr_metric_threshold_5": 0.1733872498948507,
226
+ "scr_dir2_threshold_5": 0.1733872498948507,
227
+ "scr_dir1_threshold_10": 0.21705422774794722,
228
+ "scr_metric_threshold_10": 0.1854838670912696,
229
+ "scr_dir2_threshold_10": 0.1854838670912696,
230
+ "scr_dir1_threshold_20": 0.3023253664876833,
231
+ "scr_metric_threshold_20": 0.2661291834404855,
232
+ "scr_dir2_threshold_20": 0.2661291834404855,
233
+ "scr_dir1_threshold_50": 0.2868216910991789,
234
+ "scr_metric_threshold_50": 0.3629033227182599,
235
+ "scr_dir2_threshold_50": 0.3629033227182599,
236
+ "scr_dir1_threshold_100": 0.3488373167561583,
237
+ "scr_metric_threshold_100": 0.3709677341825392,
238
+ "scr_dir2_threshold_100": 0.3709677341825392,
239
+ "scr_dir1_threshold_500": -0.10077527617972139,
240
+ "scr_metric_threshold_500": 0.3629033227182599,
241
+ "scr_dir2_threshold_500": 0.3629033227182599
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.11931829919557463,
246
+ "scr_metric_threshold_2": 0.19313308563727843,
247
+ "scr_dir2_threshold_2": 0.19313308563727843,
248
+ "scr_dir1_threshold_5": 0.30681819336349914,
249
+ "scr_metric_threshold_5": 0.2875537337059348,
250
+ "scr_dir2_threshold_5": 0.2875537337059348,
251
+ "scr_dir1_threshold_10": 0.15340909668174957,
252
+ "scr_metric_threshold_10": 0.3347639298333219,
253
+ "scr_dir2_threshold_10": 0.3347639298333219,
254
+ "scr_dir1_threshold_20": 0.23863625972850772,
255
+ "scr_metric_threshold_20": 0.3304721403333562,
256
+ "scr_dir2_threshold_20": 0.3304721403333562,
257
+ "scr_dir1_threshold_50": 0.37500012699849056,
258
+ "scr_metric_threshold_50": 0.3690987574608119,
259
+ "scr_dir2_threshold_50": 0.3690987574608119,
260
+ "scr_dir1_threshold_100": 0.48295460125691253,
261
+ "scr_metric_threshold_100": 0.3948497502744883,
262
+ "scr_dir2_threshold_100": 0.3948497502744883,
263
+ "scr_dir1_threshold_500": 0.19318197596782163,
264
+ "scr_metric_threshold_500": 0.5407726397843796,
265
+ "scr_dir2_threshold_500": 0.5407726397843796
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.05154634107374385,
270
+ "scr_metric_threshold_2": 0.020100543151066925,
271
+ "scr_dir2_threshold_2": 0.020100543151066925,
272
+ "scr_dir1_threshold_5": 0.1185564001253541,
273
+ "scr_metric_threshold_5": 0.04522614720969393,
274
+ "scr_dir2_threshold_5": 0.04522614720969393,
275
+ "scr_dir1_threshold_10": 0.1340204253436484,
276
+ "scr_metric_threshold_10": 0.08542723351182778,
277
+ "scr_dir2_threshold_10": 0.08542723351182778,
278
+ "scr_dir1_threshold_20": 0.17525762109881465,
279
+ "scr_metric_threshold_20": 0.1608040456877088,
280
+ "scr_dir2_threshold_20": 0.1608040456877088,
281
+ "scr_dir1_threshold_50": 0.25257713270943044,
282
+ "scr_metric_threshold_50": 0.22110537562008298,
283
+ "scr_dir2_threshold_50": 0.22110537562008298,
284
+ "scr_dir1_threshold_100": 0.3195874990014686,
285
+ "scr_metric_threshold_100": 0.24623127919953658,
286
+ "scr_dir2_threshold_100": 0.24623127919953658,
287
+ "scr_dir1_threshold_500": 0.3556698148569181,
288
+ "scr_metric_threshold_500": 0.44221105076099254,
289
+ "scr_dir2_threshold_500": 0.44221105076099254
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/average_l0_143",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_18_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732144669587,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.23262239313257055,
76
+ "scr_metric_threshold_2": 0.06843690420671661,
77
+ "scr_dir2_threshold_2": 0.06843690420671661,
78
+ "scr_dir1_threshold_5": 0.25360755616791775,
79
+ "scr_metric_threshold_5": 0.11314570781574068,
80
+ "scr_dir2_threshold_5": 0.11314570781574068,
81
+ "scr_dir1_threshold_10": 0.27548878692008005,
82
+ "scr_metric_threshold_10": 0.1360706707622432,
83
+ "scr_dir2_threshold_10": 0.1360706707622432,
84
+ "scr_dir1_threshold_20": 0.29941274365415277,
85
+ "scr_metric_threshold_20": 0.1634674398123702,
86
+ "scr_dir2_threshold_20": 0.1634674398123702,
87
+ "scr_dir1_threshold_50": 0.32007948712729506,
88
+ "scr_metric_threshold_50": 0.20818131844651377,
89
+ "scr_dir2_threshold_50": 0.20818131844651377,
90
+ "scr_dir1_threshold_100": 0.2404056250465834,
91
+ "scr_metric_threshold_100": 0.25480068625341856,
92
+ "scr_dir2_threshold_100": 0.25480068625341856,
93
+ "scr_dir1_threshold_500": -0.1665992034889775,
94
+ "scr_metric_threshold_500": 0.2673859560364121,
95
+ "scr_dir2_threshold_500": 0.2673859560364121
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.32142872348125223,
102
+ "scr_metric_threshold_2": 0.014084465658350092,
103
+ "scr_dir2_threshold_2": 0.014084465658350092,
104
+ "scr_dir1_threshold_5": 0.4285723408875132,
105
+ "scr_metric_threshold_5": 0.030516458857264225,
106
+ "scr_dir2_threshold_5": 0.030516458857264225,
107
+ "scr_dir1_threshold_10": 0.4285723408875132,
108
+ "scr_metric_threshold_10": 0.037558761644942686,
109
+ "scr_dir2_threshold_10": 0.037558761644942686,
110
+ "scr_dir1_threshold_20": 0.607143617406261,
111
+ "scr_metric_threshold_20": 0.0563380025504072,
112
+ "scr_dir2_threshold_20": 0.0563380025504072,
113
+ "scr_dir1_threshold_50": 0.5714276591124867,
114
+ "scr_metric_threshold_50": 0.07042260812576412,
115
+ "scr_dir2_threshold_50": 0.07042260812576412,
116
+ "scr_dir1_threshold_100": 0.4285723408875132,
117
+ "scr_metric_threshold_100": 0.10328645460658556,
118
+ "scr_dir2_threshold_100": 0.10328645460658556,
119
+ "scr_dir1_threshold_500": -0.8214287234812522,
120
+ "scr_metric_threshold_500": 0.10563384223014277,
121
+ "scr_dir2_threshold_500": 0.10563384223014277
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.4153847846759204,
126
+ "scr_metric_threshold_2": 0.06701036629203817,
127
+ "scr_dir2_threshold_2": 0.06701036629203817,
128
+ "scr_dir1_threshold_5": 0.46153895530476774,
129
+ "scr_metric_threshold_5": 0.10567012209734605,
130
+ "scr_dir2_threshold_5": 0.10567012209734605,
131
+ "scr_dir1_threshold_10": 0.44615362043210616,
132
+ "scr_metric_threshold_10": 0.1288660063045736,
133
+ "scr_dir2_threshold_10": 0.1288660063045736,
134
+ "scr_dir1_threshold_20": 0.46153895530476774,
135
+ "scr_metric_threshold_20": 0.16237118945059267,
136
+ "scr_dir2_threshold_20": 0.16237118945059267,
137
+ "scr_dir1_threshold_50": 0.44615362043210616,
138
+ "scr_metric_threshold_50": 0.22422682946312808,
139
+ "scr_dir2_threshold_50": 0.22422682946312808,
140
+ "scr_dir1_threshold_100": 0.4307692025540133,
141
+ "scr_metric_threshold_100": 0.2680413115479387,
142
+ "scr_dir2_threshold_100": 0.2680413115479387,
143
+ "scr_dir1_threshold_500": 0.13846159489197346,
144
+ "scr_metric_threshold_500": 0.31701036629203816,
145
+ "scr_dir2_threshold_500": 0.31701036629203816
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3636368562364027,
150
+ "scr_metric_threshold_2": 0.010178137501998197,
151
+ "scr_dir2_threshold_2": 0.010178137501998197,
152
+ "scr_dir1_threshold_5": 0.45454607029550337,
153
+ "scr_metric_threshold_5": 0.04071239834223257,
154
+ "scr_dir2_threshold_5": 0.04071239834223257,
155
+ "scr_dir1_threshold_10": 0.4772730351477517,
156
+ "scr_metric_threshold_10": 0.06615766626434796,
157
+ "scr_dir2_threshold_10": 0.06615766626434796,
158
+ "scr_dir1_threshold_20": 0.43181775079314766,
159
+ "scr_metric_threshold_20": 0.09414758231128305,
160
+ "scr_dir2_threshold_20": 0.09414758231128305,
161
+ "scr_dir1_threshold_50": 0.45454607029550337,
162
+ "scr_metric_threshold_50": 0.11959285023339844,
163
+ "scr_dir2_threshold_50": 0.11959285023339844,
164
+ "scr_dir1_threshold_100": 0.045455284354604046,
165
+ "scr_metric_threshold_100": 0.15012711107363283,
166
+ "scr_dir2_threshold_100": 0.15012711107363283,
167
+ "scr_dir1_threshold_500": -1.568180894556745,
168
+ "scr_metric_threshold_500": 0.26972011297279147,
169
+ "scr_dir2_threshold_500": 0.26972011297279147
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3209878178453297,
174
+ "scr_metric_threshold_2": 0.013440846001321878,
175
+ "scr_dir2_threshold_2": 0.013440846001321878,
176
+ "scr_dir1_threshold_5": 0.27160502003377596,
177
+ "scr_metric_threshold_5": 0.021505417693124237,
178
+ "scr_dir2_threshold_5": 0.021505417693124237,
179
+ "scr_dir1_threshold_10": 0.2839505355217796,
180
+ "scr_metric_threshold_10": 0.06720439023413247,
181
+ "scr_dir2_threshold_10": 0.06720439023413247,
182
+ "scr_dir1_threshold_20": 0.27160502003377596,
183
+ "scr_metric_threshold_20": 0.021505417693124237,
184
+ "scr_dir2_threshold_20": 0.021505417693124237,
185
+ "scr_dir1_threshold_50": 0.2592595045457723,
186
+ "scr_metric_threshold_50": 0.04569897254100823,
187
+ "scr_dir2_threshold_50": 0.04569897254100823,
188
+ "scr_dir1_threshold_100": 0.24691325319822952,
189
+ "scr_metric_threshold_100": 0.06451625307937271,
190
+ "scr_dir2_threshold_100": 0.06451625307937271,
191
+ "scr_dir1_threshold_500": 0.2222222222222222,
192
+ "scr_metric_threshold_500": -0.005376274309519518,
193
+ "scr_dir2_threshold_500": -0.005376274309519518
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.11872154321022935,
199
+ "scr_dir2_threshold_2": 0.11872154321022935,
200
+ "scr_dir1_threshold_5": 0.04545463781711534,
201
+ "scr_metric_threshold_5": 0.1826484652079045,
202
+ "scr_dir2_threshold_5": 0.1826484652079045,
203
+ "scr_dir1_threshold_10": 0.07954544684857372,
204
+ "scr_metric_threshold_10": 0.1963469163855859,
205
+ "scr_dir2_threshold_10": 0.1963469163855859,
206
+ "scr_dir1_threshold_20": -0.011363490122900714,
207
+ "scr_metric_threshold_20": 0.30136973625085767,
208
+ "scr_dir2_threshold_20": 0.30136973625085767,
209
+ "scr_dir1_threshold_50": 0.051136382878565693,
210
+ "scr_metric_threshold_50": 0.4018264057235689,
211
+ "scr_dir2_threshold_50": 0.4018264057235689,
212
+ "scr_dir1_threshold_100": 0.10795451081858175,
213
+ "scr_metric_threshold_100": 0.43379000280604463,
214
+ "scr_dir2_threshold_100": 0.43379000280604463,
215
+ "scr_dir1_threshold_500": -0.011363490122900714,
216
+ "scr_metric_threshold_500": 0.44748845398372605,
217
+ "scr_dir2_threshold_500": 0.44748845398372605
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.17829457722520528,
222
+ "scr_metric_threshold_2": 0.036290332271825994,
223
+ "scr_dir2_threshold_2": 0.036290332271825994,
224
+ "scr_dir1_threshold_5": 0.20155055235944278,
225
+ "scr_metric_threshold_5": 0.05241939554166917,
226
+ "scr_dir2_threshold_5": 0.05241939554166917,
227
+ "scr_dir1_threshold_10": 0.23255790313645167,
228
+ "scr_metric_threshold_10": 0.08064531634921589,
229
+ "scr_dir2_threshold_10": 0.08064531634921589,
230
+ "scr_dir1_threshold_20": 0.209302390053695,
231
+ "scr_metric_threshold_20": 0.11290320254761761,
232
+ "scr_dir2_threshold_20": 0.11290320254761761,
233
+ "scr_dir1_threshold_50": 0.27131801571067443,
234
+ "scr_metric_threshold_50": 0.18145166135912996,
235
+ "scr_dir2_threshold_50": 0.18145166135912996,
236
+ "scr_dir1_threshold_100": 0.31007766623341637,
237
+ "scr_metric_threshold_100": 0.2782258006369044,
238
+ "scr_dir2_threshold_100": 0.2782258006369044,
239
+ "scr_dir1_threshold_500": 0.372092829838915,
240
+ "scr_metric_threshold_500": 0.24596791443850266,
241
+ "scr_dir2_threshold_500": 0.24596791443850266
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.1761364078934134,
246
+ "scr_metric_threshold_2": 0.2274679132647684,
247
+ "scr_dir2_threshold_2": 0.2274679132647684,
248
+ "scr_dir1_threshold_5": 0.06250019049773588,
249
+ "scr_metric_threshold_5": 0.38626617127455687,
250
+ "scr_dir2_threshold_5": 0.38626617127455687,
251
+ "scr_dir1_threshold_10": 0.09090924484665525,
252
+ "scr_metric_threshold_10": 0.41630895358819897,
253
+ "scr_dir2_threshold_10": 0.41630895358819897,
254
+ "scr_dir1_threshold_20": 0.25000008466566037,
255
+ "scr_metric_threshold_20": 0.433476367401944,
256
+ "scr_dir2_threshold_20": 0.433476367401944,
257
+ "scr_dir1_threshold_50": 0.295454707088988,
258
+ "scr_metric_threshold_50": 0.44635199171572326,
259
+ "scr_dir2_threshold_50": 0.44635199171572326,
260
+ "scr_dir1_threshold_100": 0.1988637191050772,
261
+ "scr_metric_threshold_100": 0.5193131806567868,
262
+ "scr_dir2_threshold_100": 0.5193131806567868,
263
+ "scr_dir1_threshold_500": 0.1704546647561578,
264
+ "scr_metric_threshold_500": 0.44206020221575754,
265
+ "scr_dir2_threshold_500": 0.44206020221575754
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.05670091373303265,
270
+ "scr_metric_threshold_2": 0.06030162945320077,
271
+ "scr_dir2_threshold_2": 0.06030162945320077,
272
+ "scr_dir1_threshold_5": 0.1030926821474877,
273
+ "scr_metric_threshold_5": 0.08542723351182778,
274
+ "scr_dir2_threshold_5": 0.08542723351182778,
275
+ "scr_dir1_threshold_10": 0.16494816853980915,
276
+ "scr_metric_threshold_10": 0.09547735532694795,
277
+ "scr_dir2_threshold_10": 0.09547735532694795,
278
+ "scr_dir1_threshold_20": 0.17525762109881465,
279
+ "scr_metric_threshold_20": 0.12562802029313505,
280
+ "scr_dir2_threshold_20": 0.12562802029313505,
281
+ "scr_dir1_threshold_50": 0.2113399369542642,
282
+ "scr_metric_threshold_50": 0.17587922841038905,
283
+ "scr_dir2_threshold_50": 0.17587922841038905,
284
+ "scr_dir1_threshold_100": 0.15463902322123155,
285
+ "scr_metric_threshold_100": 0.22110537562008298,
286
+ "scr_dir2_threshold_100": 0.22110537562008298,
287
+ "scr_dir1_threshold_500": 0.16494816853980915,
288
+ "scr_metric_threshold_500": 0.3165830304678575,
289
+ "scr_dir2_threshold_500": 0.3165830304678575
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/average_l0_18",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_309_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732145126099,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.2562072531196681,
76
+ "scr_metric_threshold_2": 0.06296108740508702,
77
+ "scr_dir2_threshold_2": 0.06296108740508702,
78
+ "scr_dir1_threshold_5": 0.3118672116058791,
79
+ "scr_metric_threshold_5": 0.09911593535215107,
80
+ "scr_dir2_threshold_5": 0.09911593535215107,
81
+ "scr_dir1_threshold_10": 0.36376395767676833,
82
+ "scr_metric_threshold_10": 0.15987109620246212,
83
+ "scr_dir2_threshold_10": 0.15987109620246212,
84
+ "scr_dir1_threshold_20": 0.36620426588484173,
85
+ "scr_metric_threshold_20": 0.20324551833076343,
86
+ "scr_dir2_threshold_20": 0.20324551833076343,
87
+ "scr_dir1_threshold_50": 0.1735562660926766,
88
+ "scr_metric_threshold_50": 0.2607511594253647,
89
+ "scr_dir2_threshold_50": 0.2607511594253647,
90
+ "scr_dir1_threshold_100": 0.14375809878152637,
91
+ "scr_metric_threshold_100": 0.3269757705822512,
92
+ "scr_dir2_threshold_100": 0.3269757705822512,
93
+ "scr_dir1_threshold_500": -0.3834034863214657,
94
+ "scr_metric_threshold_500": 0.2083310418886983,
95
+ "scr_dir2_threshold_500": 0.2083310418886983
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5,
102
+ "scr_metric_threshold_2": -0.004694775247114422,
103
+ "scr_dir2_threshold_2": -0.004694775247114422,
104
+ "scr_dir1_threshold_5": 0.607143617406261,
105
+ "scr_metric_threshold_5": 0.035211234104378646,
106
+ "scr_dir2_threshold_5": 0.035211234104378646,
107
+ "scr_dir1_threshold_10": 0.607143617406261,
108
+ "scr_metric_threshold_10": 0.07511738337287854,
109
+ "scr_dir2_threshold_10": 0.07511738337287854,
110
+ "scr_dir1_threshold_20": 0.6428574469625045,
111
+ "scr_metric_threshold_20": 0.1502347667457571,
112
+ "scr_dir2_threshold_20": 0.1502347667457571,
113
+ "scr_dir1_threshold_50": 0.21428510607499116,
114
+ "scr_metric_threshold_50": 0.21830984733095718,
115
+ "scr_dir2_threshold_50": 0.21830984733095718,
116
+ "scr_dir1_threshold_100": 0.39285638259373895,
117
+ "scr_metric_threshold_100": 0.20187799404904985,
118
+ "scr_dir2_threshold_100": 0.20187799404904985,
119
+ "scr_dir1_threshold_500": 0.1785712765187478,
120
+ "scr_metric_threshold_500": 0.014084465658350092,
121
+ "scr_dir2_threshold_500": 0.014084465658350092
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.44615362043210616,
126
+ "scr_metric_threshold_2": 0.020618597877583123,
127
+ "scr_dir2_threshold_2": 0.020618597877583123,
128
+ "scr_dir1_threshold_5": 0.6153849680748341,
129
+ "scr_metric_threshold_5": 0.043814482084810646,
130
+ "scr_dir2_threshold_5": 0.043814482084810646,
131
+ "scr_dir1_threshold_10": 0.630769385952927,
132
+ "scr_metric_threshold_10": 0.07989695156047413,
133
+ "scr_dir2_threshold_10": 0.07989695156047413,
134
+ "scr_dir1_threshold_20": 0.630769385952927,
135
+ "scr_metric_threshold_20": 0.08505152421976293,
136
+ "scr_dir2_threshold_20": 0.08505152421976293,
137
+ "scr_dir1_threshold_50": 0.5846152153240797,
138
+ "scr_metric_threshold_50": 0.22164954313348367,
139
+ "scr_dir2_threshold_50": 0.22164954313348367,
140
+ "scr_dir1_threshold_100": 0.5692307974459867,
141
+ "scr_metric_threshold_100": 0.2835051831460191,
142
+ "scr_dir2_threshold_100": 0.2835051831460191,
143
+ "scr_dir1_threshold_500": 0.2769231897839469,
144
+ "scr_metric_threshold_500": 0.2139175305243365,
145
+ "scr_dir2_threshold_500": 0.2139175305243365
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.45454607029550337,
150
+ "scr_metric_threshold_2": 0.025445267922115385,
151
+ "scr_dir2_threshold_2": 0.025445267922115385,
152
+ "scr_dir1_threshold_5": 0.31818157188179863,
153
+ "scr_metric_threshold_5": 0.04580154292611178,
154
+ "scr_dir2_threshold_5": 0.04580154292611178,
155
+ "scr_dir1_threshold_10": 0.5,
156
+ "scr_metric_threshold_10": 0.08905843772740385,
157
+ "scr_dir2_threshold_10": 0.08905843772740385,
158
+ "scr_dir1_threshold_20": 0.18181842811820134,
159
+ "scr_metric_threshold_20": 0.1984733021245643,
160
+ "scr_dir2_threshold_20": 0.1984733021245643,
161
+ "scr_dir1_threshold_50": -0.22727235782269797,
162
+ "scr_metric_threshold_50": 0.05343503230329027,
163
+ "scr_dir2_threshold_50": 0.05343503230329027,
164
+ "scr_dir1_threshold_100": -1.1590901086158456,
165
+ "scr_metric_threshold_100": 0.14503811815551382,
166
+ "scr_dir2_threshold_100": 0.14503811815551382,
167
+ "scr_dir1_threshold_500": -4.340905827433832,
168
+ "scr_metric_threshold_500": 0.29516538089490685,
169
+ "scr_dir2_threshold_500": 0.29516538089490685
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3209878178453297,
174
+ "scr_metric_threshold_2": 0.053763544232810594,
175
+ "scr_dir2_threshold_2": 0.053763544232810594,
176
+ "scr_dir1_threshold_5": 0.29629605100978323,
177
+ "scr_metric_threshold_5": 0.05107524685052775,
178
+ "scr_dir2_threshold_5": 0.05107524685052775,
179
+ "scr_dir1_threshold_10": 0.40740716212089434,
180
+ "scr_metric_threshold_10": 0.09139794508201646,
181
+ "scr_dir2_threshold_10": 0.09139794508201646,
182
+ "scr_dir1_threshold_20": 0.5308637887200092,
183
+ "scr_metric_threshold_20": -0.008064411464279277,
184
+ "scr_dir2_threshold_20": -0.008064411464279277,
185
+ "scr_dir1_threshold_50": -0.1358021420871184,
186
+ "scr_metric_threshold_50": 0.06720439023413247,
187
+ "scr_dir2_threshold_50": 0.06720439023413247,
188
+ "scr_dir1_threshold_100": 0.30864230235732604,
189
+ "scr_metric_threshold_100": 0.05913981854233011,
190
+ "scr_dir2_threshold_100": 0.05913981854233011,
191
+ "scr_dir1_threshold_500": 0.5555555555555556,
192
+ "scr_metric_threshold_500": -0.27956978910052277,
193
+ "scr_dir2_threshold_500": -0.27956978910052277
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.034090809031458384,
198
+ "scr_metric_threshold_2": 0.21461179012310397,
199
+ "scr_dir2_threshold_2": 0.21461179012310397,
200
+ "scr_dir1_threshold_5": 0.056818127940016054,
201
+ "scr_metric_threshold_5": 0.3378994837258938,
202
+ "scr_dir2_threshold_5": 0.3378994837258938,
203
+ "scr_dir1_threshold_10": 0.028409063970008027,
204
+ "scr_metric_threshold_10": 0.45662102693612316,
205
+ "scr_dir2_threshold_10": 0.45662102693612316,
206
+ "scr_dir1_threshold_20": 0.13636357478858976,
207
+ "scr_metric_threshold_20": 0.5296802497189192,
208
+ "scr_dir2_threshold_20": 0.5296802497189192,
209
+ "scr_dir1_threshold_50": 0.19318170272860583,
210
+ "scr_metric_threshold_50": 0.5753422979790763,
211
+ "scr_dir2_threshold_50": 0.5753422979790763,
212
+ "scr_dir1_threshold_100": 0.005681745061450357,
213
+ "scr_metric_threshold_100": 0.6484017929291486,
214
+ "scr_dir2_threshold_100": 0.6484017929291486,
215
+ "scr_dir1_threshold_500": 0.12500008466568907,
216
+ "scr_metric_threshold_500": 0.5753422979790763,
217
+ "scr_dir2_threshold_500": 0.5753422979790763
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.11627895156822583,
222
+ "scr_metric_threshold_2": 0.04435498407738989,
223
+ "scr_dir2_threshold_2": 0.04435498407738989,
224
+ "scr_dir1_threshold_5": 0.209302390053695,
225
+ "scr_metric_threshold_5": 0.08870972781349516,
226
+ "scr_dir2_threshold_5": 0.08870972781349516,
227
+ "scr_dir1_threshold_10": 0.19379825261370973,
228
+ "scr_metric_threshold_10": 0.10887099681547797,
229
+ "scr_dir2_threshold_10": 0.10887099681547797,
230
+ "scr_dir1_threshold_20": 0.12403078926247804,
231
+ "scr_metric_threshold_20": 0.20161293036111277,
232
+ "scr_dir2_threshold_20": 0.20161293036111277,
233
+ "scr_dir1_threshold_50": 0.209302390053695,
234
+ "scr_metric_threshold_50": 0.35483867091269605,
235
+ "scr_dir2_threshold_50": 0.35483867091269605,
236
+ "scr_dir1_threshold_100": 0.34108501701042526,
237
+ "scr_metric_threshold_100": 0.4717743195337379,
238
+ "scr_dir2_threshold_100": 0.4717743195337379,
239
+ "scr_dir1_threshold_500": 0.58139521989261,
240
+ "scr_metric_threshold_500": 0.5887097278134952,
241
+ "scr_dir2_threshold_500": 0.5887097278134952
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.13636386726998287,
246
+ "scr_metric_threshold_2": 0.12446368619618059,
247
+ "scr_dir2_threshold_2": 0.12446368619618059,
248
+ "scr_dir1_threshold_5": 0.27840913901457975,
249
+ "scr_metric_threshold_5": 0.15021467900985702,
250
+ "scr_dir2_threshold_5": 0.15021467900985702,
251
+ "scr_dir1_threshold_10": 0.40340918134740994,
252
+ "scr_metric_threshold_10": 0.2875537337059348,
253
+ "scr_dir2_threshold_10": 0.2875537337059348,
254
+ "scr_dir1_threshold_20": 0.4715907763197599,
255
+ "scr_metric_threshold_20": 0.34334776464713546,
256
+ "scr_dir2_threshold_20": 0.34334776464713546,
257
+ "scr_dir1_threshold_50": 0.31818167963801025,
258
+ "scr_metric_threshold_50": 0.3690987574608119,
259
+ "scr_dir2_threshold_50": 0.3690987574608119,
260
+ "scr_dir1_threshold_100": 0.45454554690799315,
261
+ "scr_metric_threshold_100": 0.4291845779019783,
262
+ "scr_dir2_threshold_100": 0.4291845779019783,
263
+ "scr_dir1_threshold_500": -0.7272722654600342,
264
+ "scr_metric_threshold_500": 0.042918406627421406,
265
+ "scr_dir2_threshold_500": 0.042918406627421406
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.04123688851473832,
270
+ "scr_metric_threshold_2": 0.02512560405862701,
271
+ "scr_dir2_threshold_2": 0.02512560405862701,
272
+ "scr_dir1_threshold_5": 0.1134018274660653,
273
+ "scr_metric_threshold_5": 0.04020108630213385,
274
+ "scr_dir2_threshold_5": 0.04020108630213385,
275
+ "scr_dir1_threshold_10": 0.13917499800293723,
276
+ "scr_metric_threshold_10": 0.09045229441938786,
277
+ "scr_dir2_threshold_10": 0.09045229441938786,
278
+ "scr_dir1_threshold_20": 0.2113399369542642,
279
+ "scr_metric_threshold_20": 0.12562802029313505,
280
+ "scr_dir2_threshold_20": 0.12562802029313505,
281
+ "scr_dir1_threshold_50": 0.2319585348318473,
282
+ "scr_metric_threshold_50": 0.22613073604846967,
283
+ "scr_dir2_threshold_50": 0.22613073604846967,
284
+ "scr_dir1_threshold_100": 0.23711310749113612,
285
+ "scr_metric_threshold_100": 0.3768843604002317,
286
+ "scr_dir2_threshold_100": 0.3768843604002317,
287
+ "scr_dir1_threshold_500": 0.28350487590559115,
288
+ "scr_metric_threshold_500": 0.2160803147125229,
289
+ "scr_dir2_threshold_500": 0.2160803147125229
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/average_l0_309",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_34_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732145573889,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.18135386088396457,
76
+ "scr_metric_threshold_2": 0.06627557273791226,
77
+ "scr_dir2_threshold_2": 0.06627557273791226,
78
+ "scr_dir1_threshold_5": 0.24664837105141868,
79
+ "scr_metric_threshold_5": 0.12089547170475078,
80
+ "scr_dir2_threshold_5": 0.12089547170475078,
81
+ "scr_dir1_threshold_10": 0.32012514906622835,
82
+ "scr_metric_threshold_10": 0.15035562611707542,
83
+ "scr_dir2_threshold_10": 0.15035562611707542,
84
+ "scr_dir1_threshold_20": 0.3743550820468039,
85
+ "scr_metric_threshold_20": 0.1825275119722751,
86
+ "scr_dir2_threshold_20": 0.1825275119722751,
87
+ "scr_dir1_threshold_50": 0.33517118697270337,
88
+ "scr_metric_threshold_50": 0.22749681405167188,
89
+ "scr_dir2_threshold_50": 0.22749681405167188,
90
+ "scr_dir1_threshold_100": 0.3056121080391627,
91
+ "scr_metric_threshold_100": 0.2707270617617062,
92
+ "scr_dir2_threshold_100": 0.2707270617617062,
93
+ "scr_dir1_threshold_500": -0.1210875458041121,
94
+ "scr_metric_threshold_500": 0.33471540228809077,
95
+ "scr_dir2_threshold_500": 0.33471540228809077
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4285723408875132,
102
+ "scr_metric_threshold_2": 0.028169071233707016,
103
+ "scr_dir2_threshold_2": 0.028169071233707016,
104
+ "scr_dir1_threshold_5": 0.3571425530374956,
105
+ "scr_metric_threshold_5": 0.035211234104378646,
106
+ "scr_dir2_threshold_5": 0.035211234104378646,
107
+ "scr_dir1_threshold_10": 0.5,
108
+ "scr_metric_threshold_10": 0.05399061492684999,
109
+ "scr_dir2_threshold_10": 0.05399061492684999,
110
+ "scr_dir1_threshold_20": 0.607143617406261,
111
+ "scr_metric_threshold_20": 0.07042260812576412,
112
+ "scr_dir2_threshold_20": 0.07042260812576412,
113
+ "scr_dir1_threshold_50": 0.6428574469625045,
114
+ "scr_metric_threshold_50": 0.07511738337287854,
115
+ "scr_dir2_threshold_50": 0.07511738337287854,
116
+ "scr_dir1_threshold_100": 0.46428617044375664,
117
+ "scr_metric_threshold_100": 0.13145538592328573,
118
+ "scr_dir2_threshold_100": 0.13145538592328573,
119
+ "scr_dir1_threshold_500": -0.5714297878500176,
120
+ "scr_metric_threshold_500": 0.16901414756822844,
121
+ "scr_dir2_threshold_500": 0.16901414756822844
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.369230614047073,
126
+ "scr_metric_threshold_2": 0.06958765262168257,
127
+ "scr_dir2_threshold_2": 0.06958765262168257,
128
+ "scr_dir1_threshold_5": 0.49230779106095357,
129
+ "scr_metric_threshold_5": 0.12113414731564039,
130
+ "scr_dir2_threshold_5": 0.12113414731564039,
131
+ "scr_dir1_threshold_10": 0.5846152153240797,
132
+ "scr_metric_threshold_10": 0.1469073178525123,
133
+ "scr_dir2_threshold_10": 0.1469073178525123,
134
+ "scr_dir1_threshold_20": 0.6000005501967411,
135
+ "scr_metric_threshold_20": 0.17010320205973983,
136
+ "scr_dir2_threshold_20": 0.17010320205973983,
137
+ "scr_dir1_threshold_50": 0.47692337318286065,
138
+ "scr_metric_threshold_50": 0.25,
139
+ "scr_dir2_threshold_50": 0.25,
140
+ "scr_dir1_threshold_100": 0.44615362043210616,
141
+ "scr_metric_threshold_100": 0.22422682946312808,
142
+ "scr_dir2_threshold_100": 0.22422682946312808,
143
+ "scr_dir1_threshold_500": -0.5846152153240797,
144
+ "scr_metric_threshold_500": 0.3530928357677017,
145
+ "scr_dir2_threshold_500": 0.3530928357677017
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.22727235782269797,
150
+ "scr_metric_threshold_2": 0.01526713042011719,
151
+ "scr_dir2_threshold_2": 0.01526713042011719,
152
+ "scr_dir1_threshold_5": 0.3409098913841544,
153
+ "scr_metric_threshold_5": 0.04325689480129207,
154
+ "scr_dir2_threshold_5": 0.04325689480129207,
155
+ "scr_dir1_threshold_10": 0.4772730351477517,
156
+ "scr_metric_threshold_10": 0.07379130730728665,
157
+ "scr_dir2_threshold_10": 0.07379130730728665,
158
+ "scr_dir1_threshold_20": 0.5909092140591007,
159
+ "scr_metric_threshold_20": 0.09160308585222356,
160
+ "scr_dir2_threshold_20": 0.09160308585222356,
161
+ "scr_dir1_threshold_50": 0.4772730351477517,
162
+ "scr_metric_threshold_50": 0.12722649127633714,
163
+ "scr_dir2_threshold_50": 0.12722649127633714,
164
+ "scr_dir1_threshold_100": 0.22727235782269797,
165
+ "scr_metric_threshold_100": 0.14503811815551382,
166
+ "scr_dir2_threshold_100": 0.14503811815551382,
167
+ "scr_dir1_threshold_500": -1.2045440383203423,
168
+ "scr_metric_threshold_500": 0.29516538089490685,
169
+ "scr_dir2_threshold_500": 0.29516538089490685
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.08642008013510381,
174
+ "scr_metric_threshold_2": 0.01612914338360472,
175
+ "scr_dir2_threshold_2": 0.01612914338360472,
176
+ "scr_dir1_threshold_5": 0.14814839343466119,
177
+ "scr_metric_threshold_5": 0.04838710969576799,
178
+ "scr_dir2_threshold_5": 0.04838710969576799,
179
+ "scr_dir1_threshold_10": 0.2839505355217796,
180
+ "scr_metric_threshold_10": 0.07795709908069459,
181
+ "scr_dir2_threshold_10": 0.07795709908069459,
182
+ "scr_dir1_threshold_20": 0.3209878178453297,
183
+ "scr_metric_threshold_20": 0.053763544232810594,
184
+ "scr_dir2_threshold_20": 0.053763544232810594,
185
+ "scr_dir1_threshold_50": 0.18518493989867213,
186
+ "scr_metric_threshold_50": 0.07795709908069459,
187
+ "scr_dir2_threshold_50": 0.07795709908069459,
188
+ "scr_dir1_threshold_100": 0.19753119124621493,
189
+ "scr_metric_threshold_100": 0.09139794508201646,
190
+ "scr_dir2_threshold_100": 0.09139794508201646,
191
+ "scr_dir1_threshold_500": 0.3209878178453297,
192
+ "scr_metric_threshold_500": 0.05645168138757035,
193
+ "scr_dir2_threshold_500": 0.05645168138757035
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.13241999438791077,
199
+ "scr_dir2_threshold_2": 0.13241999438791077,
200
+ "scr_dir1_threshold_5": 0.051136382878565693,
201
+ "scr_metric_threshold_5": 0.21917794051566444,
202
+ "scr_dir2_threshold_5": 0.21917794051566444,
203
+ "scr_dir1_threshold_10": 0.07386370178712337,
204
+ "scr_metric_threshold_10": 0.27853871212077913,
205
+ "scr_dir2_threshold_10": 0.27853871212077913,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.3698630808083695,
208
+ "scr_dir2_threshold_20": 0.3698630808083695,
209
+ "scr_dir1_threshold_50": 0.011363828785656956,
210
+ "scr_metric_threshold_50": 0.4840182014587622,
211
+ "scr_dir2_threshold_50": 0.4840182014587622,
212
+ "scr_dir1_threshold_100": 0.028409063970008027,
213
+ "scr_metric_threshold_100": 0.5159817985412378,
214
+ "scr_dir2_threshold_100": 0.5159817985412378,
215
+ "scr_dir1_threshold_500": 0.056818127940016054,
216
+ "scr_metric_threshold_500": 0.5981733221091549,
217
+ "scr_dir2_threshold_500": 0.5981733221091549
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.13953492670246334,
222
+ "scr_metric_threshold_2": 0.040322778345250256,
223
+ "scr_dir2_threshold_2": 0.040322778345250256,
224
+ "scr_dir1_threshold_5": 0.23255790313645167,
225
+ "scr_metric_threshold_5": 0.07258066454365199,
226
+ "scr_dir2_threshold_5": 0.07258066454365199,
227
+ "scr_dir1_threshold_10": 0.2558138782706892,
228
+ "scr_metric_threshold_10": 0.0927419335456348,
229
+ "scr_dir2_threshold_10": 0.0927419335456348,
230
+ "scr_dir1_threshold_20": 0.31007766623341637,
231
+ "scr_metric_threshold_20": 0.12096785435318151,
232
+ "scr_dir2_threshold_20": 0.12096785435318151,
233
+ "scr_dir1_threshold_50": 0.2945735287934311,
234
+ "scr_metric_threshold_50": 0.2419354683650784,
235
+ "scr_dir2_threshold_50": 0.2419354683650784,
236
+ "scr_dir1_threshold_100": 0.34108501701042526,
237
+ "scr_metric_threshold_100": 0.31854833864087,
238
+ "scr_dir2_threshold_100": 0.31854833864087,
239
+ "scr_dir1_threshold_500": 0.41860478010739,
240
+ "scr_metric_threshold_500": 0.3870967974523824,
241
+ "scr_dir2_threshold_500": 0.3870967974523824
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.12500004233283019,
246
+ "scr_metric_threshold_2": 0.19313308563727843,
247
+ "scr_dir2_threshold_2": 0.19313308563727843,
248
+ "scr_dir1_threshold_5": 0.22727277345399657,
249
+ "scr_metric_threshold_5": 0.3218883055195427,
250
+ "scr_dir2_threshold_5": 0.3218883055195427,
251
+ "scr_dir1_threshold_10": 0.21022720537958833,
252
+ "scr_metric_threshold_10": 0.3733905469607776,
253
+ "scr_dir2_threshold_10": 0.3733905469607776,
254
+ "scr_dir1_threshold_20": 0.3238637614379074,
255
+ "scr_metric_threshold_20": 0.4377681569019097,
256
+ "scr_dir2_threshold_20": 0.4377681569019097,
257
+ "scr_dir1_threshold_50": 0.44886346510809605,
258
+ "scr_metric_threshold_50": 0.3476395541471012,
259
+ "scr_dir2_threshold_50": 0.3476395541471012,
260
+ "scr_dir1_threshold_100": 0.477272858119657,
261
+ "scr_metric_threshold_100": 0.46781119502943397,
262
+ "scr_dir2_threshold_100": 0.46781119502943397,
263
+ "scr_dir1_threshold_500": 0.30681819336349914,
264
+ "scr_metric_threshold_500": 0.5021460226569239,
265
+ "scr_dir2_threshold_500": 0.5021460226569239
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.04639146117402712,
270
+ "scr_metric_threshold_2": 0.03517572587374718,
271
+ "scr_dir2_threshold_2": 0.03517572587374718,
272
+ "scr_dir1_threshold_5": 0.12371128002507081,
273
+ "scr_metric_threshold_5": 0.10552747714206812,
274
+ "scr_dir2_threshold_5": 0.10552747714206812,
275
+ "scr_dir1_threshold_10": 0.17525762109881465,
276
+ "scr_metric_threshold_10": 0.10552747714206812,
277
+ "scr_dir2_threshold_10": 0.10552747714206812,
278
+ "scr_dir1_threshold_20": 0.190721646317109,
279
+ "scr_metric_threshold_20": 0.14572856344420196,
280
+ "scr_dir2_threshold_20": 0.14572856344420196,
281
+ "scr_dir1_threshold_50": 0.14432987790265395,
282
+ "scr_metric_threshold_50": 0.2160803147125229,
283
+ "scr_dir2_threshold_50": 0.2160803147125229,
284
+ "scr_dir1_threshold_100": 0.26288658526843595,
285
+ "scr_metric_threshold_100": 0.2713568832581636,
286
+ "scr_dir2_threshold_100": 0.2713568832581636,
287
+ "scr_dir1_threshold_500": 0.2886597558053079,
288
+ "scr_metric_threshold_500": 0.3165830304678575,
289
+ "scr_dir2_threshold_500": 0.3165830304678575
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/average_l0_34",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_16k_average_l0_68_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "3267fd28-3a91-4e4d-a143-123879aef920",
72
+ "datetime_epoch_millis": 1732145987194,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.16928244116508742,
76
+ "scr_metric_threshold_2": 0.06267078848432106,
77
+ "scr_dir2_threshold_2": 0.06267078848432106,
78
+ "scr_dir1_threshold_5": 0.2870248620647946,
79
+ "scr_metric_threshold_5": 0.11518038713162128,
80
+ "scr_dir2_threshold_5": 0.11518038713162128,
81
+ "scr_dir1_threshold_10": 0.3287743105577888,
82
+ "scr_metric_threshold_10": 0.15454135593783394,
83
+ "scr_dir2_threshold_10": 0.15454135593783394,
84
+ "scr_dir1_threshold_20": 0.3626534857288454,
85
+ "scr_metric_threshold_20": 0.19719591244516196,
86
+ "scr_dir2_threshold_20": 0.19719591244516196,
87
+ "scr_dir1_threshold_50": 0.3106852319157752,
88
+ "scr_metric_threshold_50": 0.2532404087435781,
89
+ "scr_dir2_threshold_50": 0.2532404087435781,
90
+ "scr_dir1_threshold_100": 0.2507945280047912,
91
+ "scr_metric_threshold_100": 0.296202247869508,
92
+ "scr_dir2_threshold_100": 0.296202247869508,
93
+ "scr_dir1_threshold_500": 0.02937688466119481,
94
+ "scr_metric_threshold_500": 0.3549711286709597,
95
+ "scr_dir2_threshold_500": 0.3549711286709597
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.32142872348125223,
102
+ "scr_metric_threshold_2": 0.01643199319891413,
103
+ "scr_dir2_threshold_2": 0.01643199319891413,
104
+ "scr_dir1_threshold_5": 0.5357138295562434,
105
+ "scr_metric_threshold_5": 0.028169071233707016,
106
+ "scr_dir2_threshold_5": 0.028169071233707016,
107
+ "scr_dir1_threshold_10": 0.46428617044375664,
108
+ "scr_metric_threshold_10": 0.04694831213917153,
109
+ "scr_dir2_threshold_10": 0.04694831213917153,
110
+ "scr_dir1_threshold_20": 0.5357138295562434,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.46428617044375664,
114
+ "scr_metric_threshold_50": 0.09389676419534988,
115
+ "scr_dir2_threshold_50": 0.09389676419534988,
116
+ "scr_dir1_threshold_100": 0.6785712765187478,
117
+ "scr_metric_threshold_100": 0.1267606106761713,
118
+ "scr_dir2_threshold_100": 0.1267606106761713,
119
+ "scr_dir1_threshold_500": -0.2857148939250088,
120
+ "scr_metric_threshold_500": 0.23943661577698572,
121
+ "scr_dir2_threshold_500": 0.23943661577698572
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3538461961689801,
126
+ "scr_metric_threshold_2": 0.05154649469395781,
127
+ "scr_dir2_threshold_2": 0.05154649469395781,
128
+ "scr_dir1_threshold_5": 0.5230766268171394,
129
+ "scr_metric_threshold_5": 0.0902062504992657,
130
+ "scr_dir2_threshold_5": 0.0902062504992657,
131
+ "scr_dir1_threshold_10": 0.5846152153240797,
132
+ "scr_metric_threshold_10": 0.11597942103613762,
133
+ "scr_dir2_threshold_10": 0.11597942103613762,
134
+ "scr_dir1_threshold_20": 0.6153849680748341,
135
+ "scr_metric_threshold_20": 0.13917530524336513,
136
+ "scr_dir2_threshold_20": 0.13917530524336513,
137
+ "scr_dir1_threshold_50": 0.5692307974459867,
138
+ "scr_metric_threshold_50": 0.21907225680383927,
139
+ "scr_dir2_threshold_50": 0.21907225680383927,
140
+ "scr_dir1_threshold_100": 0.5384619616898009,
141
+ "scr_metric_threshold_100": 0.296391768414455,
142
+ "scr_dir2_threshold_100": 0.296391768414455,
143
+ "scr_dir1_threshold_500": 0.21538460127700665,
144
+ "scr_metric_threshold_500": 0.34020625049926567,
145
+ "scr_dir2_threshold_500": 0.34020625049926567
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.29545460702955034,
150
+ "scr_metric_threshold_2": 0.02290077146305589,
151
+ "scr_dir2_threshold_2": 0.02290077146305589,
152
+ "scr_dir1_threshold_5": 0.3636368562364027,
153
+ "scr_metric_threshold_5": 0.04325689480129207,
154
+ "scr_dir2_threshold_5": 0.04325689480129207,
155
+ "scr_dir1_threshold_10": 0.5,
156
+ "scr_metric_threshold_10": 0.06870231438916767,
157
+ "scr_dir2_threshold_10": 0.06870231438916767,
158
+ "scr_dir1_threshold_20": 0.5681822492068523,
159
+ "scr_metric_threshold_20": 0.09669207877034255,
160
+ "scr_dir2_threshold_20": 0.09669207877034255,
161
+ "scr_dir1_threshold_50": 0.3636368562364027,
162
+ "scr_metric_threshold_50": 0.12213734669245793,
163
+ "scr_dir2_threshold_50": 0.12213734669245793,
164
+ "scr_dir1_threshold_100": 0.15909146326595303,
165
+ "scr_metric_threshold_100": 0.15012711107363283,
166
+ "scr_dir2_threshold_100": 0.15012711107363283,
167
+ "scr_dir1_threshold_500": -0.31818157188179863,
168
+ "scr_metric_threshold_500": 0.25699747901173375,
169
+ "scr_dir2_threshold_500": 0.25699747901173375
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.06172831329955738,
174
+ "scr_metric_threshold_2": 0.01612914338360472,
175
+ "scr_dir2_threshold_2": 0.01612914338360472,
176
+ "scr_dir1_threshold_5": 0.27160502003377596,
177
+ "scr_metric_threshold_5": 0.03763440084920587,
178
+ "scr_dir2_threshold_5": 0.03763440084920587,
179
+ "scr_dir1_threshold_10": 0.2839505355217796,
180
+ "scr_metric_threshold_10": 0.07258066454365199,
181
+ "scr_dir2_threshold_10": 0.07258066454365199,
182
+ "scr_dir1_threshold_20": 0.2839505355217796,
183
+ "scr_metric_threshold_20": 0.032258126539686356,
184
+ "scr_dir2_threshold_20": 0.032258126539686356,
185
+ "scr_dir1_threshold_50": 0.1728394244106685,
186
+ "scr_metric_threshold_50": 0.07795709908069459,
187
+ "scr_dir2_threshold_50": 0.07795709908069459,
188
+ "scr_dir1_threshold_100": -0.24691325319822952,
189
+ "scr_metric_threshold_100": 0.11021506539285786,
190
+ "scr_dir2_threshold_100": 0.11021506539285786,
191
+ "scr_dir1_threshold_500": -0.3209878178453297,
192
+ "scr_metric_threshold_500": 0.1424731919325442,
193
+ "scr_dir2_threshold_500": 0.1424731919325442
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.051136382878565693,
198
+ "scr_metric_threshold_2": 0.1689497418629469,
199
+ "scr_dir2_threshold_2": 0.1689497418629469,
200
+ "scr_dir1_threshold_5": 0.06250021166422265,
201
+ "scr_metric_threshold_5": 0.2648402609430977,
202
+ "scr_dir2_threshold_5": 0.2648402609430977,
203
+ "scr_dir1_threshold_10": 0.09659102069568104,
204
+ "scr_metric_threshold_10": 0.3333333333333333,
205
+ "scr_dir2_threshold_10": 0.3333333333333333,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.46118717732868364,
208
+ "scr_dir2_threshold_20": 0.46118717732868364,
209
+ "scr_dir1_threshold_50": -0.028409063970008027,
210
+ "scr_metric_threshold_50": 0.5388128226713164,
211
+ "scr_dir2_threshold_50": 0.5388128226713164,
212
+ "scr_dir1_threshold_100": 0.04545463781711534,
213
+ "scr_metric_threshold_100": 0.5433789730638768,
214
+ "scr_dir2_threshold_100": 0.5433789730638768,
215
+ "scr_dir1_threshold_500": 0.034090809031458384,
216
+ "scr_metric_threshold_500": 0.5525112738489978,
217
+ "scr_dir2_threshold_500": 0.5525112738489978
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.13178308900821112,
222
+ "scr_metric_threshold_2": 0.056451601273808806,
223
+ "scr_dir2_threshold_2": 0.056451601273808806,
224
+ "scr_dir1_threshold_5": 0.1860464149194575,
225
+ "scr_metric_threshold_5": 0.07661287027579163,
226
+ "scr_dir2_threshold_5": 0.07661287027579163,
227
+ "scr_dir1_threshold_10": 0.23255790313645167,
228
+ "scr_metric_threshold_10": 0.11693564862104187,
229
+ "scr_dir2_threshold_10": 0.11693564862104187,
230
+ "scr_dir1_threshold_20": 0.2635657159649414,
231
+ "scr_metric_threshold_20": 0.16935480382142643,
232
+ "scr_dir2_threshold_20": 0.16935480382142643,
233
+ "scr_dir1_threshold_50": 0.23255790313645167,
234
+ "scr_metric_threshold_50": 0.26209673736706124,
235
+ "scr_dir2_threshold_50": 0.26209673736706124,
236
+ "scr_dir1_threshold_100": 0.2868216910991789,
237
+ "scr_metric_threshold_100": 0.3750001802559635,
238
+ "scr_dir2_threshold_100": 0.3750001802559635,
239
+ "scr_dir1_threshold_500": 0.42635661780164225,
240
+ "scr_metric_threshold_500": 0.4758065252658775,
241
+ "scr_dir2_threshold_500": 0.4758065252658775
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.10795447425842195,
246
+ "scr_metric_threshold_2": 0.12875547569614632,
247
+ "scr_dir2_threshold_2": 0.12875547569614632,
248
+ "scr_dir1_threshold_5": 0.25568182780291593,
249
+ "scr_metric_threshold_5": 0.3304721403333562,
250
+ "scr_dir2_threshold_5": 0.3304721403333562,
251
+ "scr_dir1_threshold_10": 0.3238637614379074,
252
+ "scr_metric_threshold_10": 0.35622313314703263,
253
+ "scr_dir2_threshold_10": 0.35622313314703263,
254
+ "scr_dir1_threshold_20": 0.3977274382101544,
255
+ "scr_metric_threshold_20": 0.44635199171572326,
256
+ "scr_dir2_threshold_20": 0.44635199171572326,
257
+ "scr_dir1_threshold_50": 0.4999998306686792,
258
+ "scr_metric_threshold_50": 0.450643781215689,
259
+ "scr_dir2_threshold_50": 0.450643781215689,
260
+ "scr_dir1_threshold_100": 0.26136357094017154,
261
+ "scr_metric_threshold_100": 0.5064378121568897,
262
+ "scr_dir2_threshold_100": 0.5064378121568897,
263
+ "scr_dir1_threshold_500": 0.2727273958773242,
264
+ "scr_metric_threshold_500": 0.5107296016568553,
265
+ "scr_dir2_threshold_500": 0.5107296016568553
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.030927743196160724,
270
+ "scr_metric_threshold_2": 0.04020108630213385,
271
+ "scr_dir2_threshold_2": 0.04020108630213385,
272
+ "scr_dir1_threshold_5": 0.0979381094881989,
273
+ "scr_metric_threshold_5": 0.05025120811725402,
274
+ "scr_dir2_threshold_5": 0.05025120811725402,
275
+ "scr_dir1_threshold_10": 0.14432987790265395,
276
+ "scr_metric_threshold_10": 0.12562802029313505,
277
+ "scr_dir2_threshold_10": 0.12562802029313505,
278
+ "scr_dir1_threshold_20": 0.18556676641739225,
279
+ "scr_metric_threshold_20": 0.18090458883877572,
280
+ "scr_dir2_threshold_20": 0.18090458883877572,
281
+ "scr_dir1_threshold_50": 0.2113399369542642,
282
+ "scr_metric_threshold_50": 0.26130646192221685,
283
+ "scr_dir2_threshold_50": 0.26130646192221685,
284
+ "scr_dir1_threshold_100": 0.28350487590559115,
285
+ "scr_metric_threshold_100": 0.26130646192221685,
286
+ "scr_dir2_threshold_100": 0.26130646192221685,
287
+ "scr_dir1_threshold_500": 0.2113399369542642,
288
+ "scr_metric_threshold_500": 0.3216080913754176,
289
+ "scr_dir2_threshold_500": 0.3216080913754176
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_16k/average_l0_68",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_105_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732181248702,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.17054927434795833,
76
+ "scr_metric_threshold_2": 0.03513970463569734,
77
+ "scr_dir2_threshold_2": 0.03513970463569734,
78
+ "scr_dir1_threshold_5": 0.26385538968135,
79
+ "scr_metric_threshold_5": 0.06124801734514245,
80
+ "scr_dir2_threshold_5": 0.06124801734514245,
81
+ "scr_dir1_threshold_10": 0.32588406598056496,
82
+ "scr_metric_threshold_10": 0.10303706088162391,
83
+ "scr_dir2_threshold_10": 0.10303706088162391,
84
+ "scr_dir1_threshold_20": 0.3943029178328679,
85
+ "scr_metric_threshold_20": 0.13371773341360824,
86
+ "scr_dir2_threshold_20": 0.13371773341360824,
87
+ "scr_dir1_threshold_50": 0.35571585777011394,
88
+ "scr_metric_threshold_50": 0.19406306100937196,
89
+ "scr_dir2_threshold_50": 0.19406306100937196,
90
+ "scr_dir1_threshold_100": 0.3685569080790775,
91
+ "scr_metric_threshold_100": 0.249219545805795,
92
+ "scr_dir2_threshold_100": 0.249219545805795,
93
+ "scr_dir1_threshold_500": 0.27959893209126135,
94
+ "scr_metric_threshold_500": 0.3602012624266308,
95
+ "scr_dir2_threshold_500": 0.3602012624266308
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2857148939250088,
102
+ "scr_metric_threshold_2": 0.014084465658350092,
103
+ "scr_dir2_threshold_2": 0.014084465658350092,
104
+ "scr_dir1_threshold_5": 0.4285723408875132,
105
+ "scr_metric_threshold_5": 0.014084465658350092,
106
+ "scr_dir2_threshold_5": 0.014084465658350092,
107
+ "scr_dir1_threshold_10": 0.46428617044375664,
108
+ "scr_metric_threshold_10": 0.028169071233707016,
109
+ "scr_dir2_threshold_10": 0.028169071233707016,
110
+ "scr_dir1_threshold_20": 0.607143617406261,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.6428574469625045,
114
+ "scr_metric_threshold_50": 0.07042260812576412,
115
+ "scr_dir2_threshold_50": 0.07042260812576412,
116
+ "scr_dir1_threshold_100": 0.7499989356312345,
117
+ "scr_metric_threshold_100": 0.09389676419534988,
118
+ "scr_dir2_threshold_100": 0.09389676419534988,
119
+ "scr_dir1_threshold_500": 0.32142872348125223,
120
+ "scr_metric_threshold_500": 0.22300476249507842,
121
+ "scr_dir2_threshold_500": 0.22300476249507842
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.3384617782908872,
126
+ "scr_metric_threshold_2": 0.03608246947566349,
127
+ "scr_dir2_threshold_2": 0.03608246947566349,
128
+ "scr_dir1_threshold_5": 0.44615362043210616,
129
+ "scr_metric_threshold_5": 0.07474237890118533,
130
+ "scr_dir2_threshold_5": 0.07474237890118533,
131
+ "scr_dir1_threshold_10": 0.5076922089390464,
132
+ "scr_metric_threshold_10": 0.10309283576770166,
133
+ "scr_dir2_threshold_10": 0.10309283576770166,
134
+ "scr_dir1_threshold_20": 0.6000005501967411,
135
+ "scr_metric_threshold_20": 0.11855670736578201,
136
+ "scr_dir2_threshold_20": 0.11855670736578201,
137
+ "scr_dir1_threshold_50": 0.5692307974459867,
138
+ "scr_metric_threshold_50": 0.15721661679130386,
139
+ "scr_dir2_threshold_50": 0.15721661679130386,
140
+ "scr_dir1_threshold_100": 0.5692307974459867,
141
+ "scr_metric_threshold_100": 0.20103094525590054,
142
+ "scr_dir2_threshold_100": 0.20103094525590054,
143
+ "scr_dir1_threshold_500": 0.6153849680748341,
144
+ "scr_metric_threshold_500": 0.3195876526216826,
145
+ "scr_dir2_threshold_500": 0.3195876526216826
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.18181842811820134,
150
+ "scr_metric_threshold_2": 0.012722633961057692,
151
+ "scr_dir2_threshold_2": 0.012722633961057692,
152
+ "scr_dir1_threshold_5": 0.3409098913841544,
153
+ "scr_metric_threshold_5": 0.017811626879176687,
154
+ "scr_dir2_threshold_5": 0.017811626879176687,
155
+ "scr_dir1_threshold_10": 0.45454607029550337,
156
+ "scr_metric_threshold_10": 0.04071239834223257,
157
+ "scr_dir2_threshold_10": 0.04071239834223257,
158
+ "scr_dir1_threshold_20": 0.545455284354604,
159
+ "scr_metric_threshold_20": 0.06361316980528846,
160
+ "scr_dir2_threshold_20": 0.06361316980528846,
161
+ "scr_dir1_threshold_50": 0.4772730351477517,
162
+ "scr_metric_threshold_50": 0.10941471273140024,
163
+ "scr_dir2_threshold_50": 0.10941471273140024,
164
+ "scr_dir1_threshold_100": 0.386363821088651,
165
+ "scr_metric_threshold_100": 0.13231548419445613,
166
+ "scr_dir2_threshold_100": 0.13231548419445613,
167
+ "scr_dir1_threshold_500": -0.022726964852248312,
168
+ "scr_metric_threshold_500": 0.20101779858362379,
169
+ "scr_dir2_threshold_500": 0.20101779858362379
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.23456773771022588,
174
+ "scr_metric_threshold_2": 0.013440846001321878,
175
+ "scr_dir2_threshold_2": 0.013440846001321878,
176
+ "scr_dir1_threshold_5": 0.345678848821337,
177
+ "scr_metric_threshold_5": 0.01612914338360472,
178
+ "scr_dir2_threshold_5": 0.01612914338360472,
179
+ "scr_dir1_threshold_10": 0.38271613114488706,
180
+ "scr_metric_threshold_10": 0.043010835386248475,
181
+ "scr_dir2_threshold_10": 0.043010835386248475,
182
+ "scr_dir1_threshold_20": 0.4567899599324481,
183
+ "scr_metric_threshold_20": 0.010752708846562119,
184
+ "scr_dir2_threshold_20": 0.010752708846562119,
185
+ "scr_dir1_threshold_50": 0.4567899599324481,
186
+ "scr_metric_threshold_50": 0.07526880169841174,
187
+ "scr_dir2_threshold_50": 0.07526880169841174,
188
+ "scr_dir1_threshold_100": 0.27160502003377596,
189
+ "scr_metric_threshold_100": 0.08602151054497387,
190
+ "scr_dir2_threshold_100": 0.08602151054497387,
191
+ "scr_dir1_threshold_500": 0.14814839343466119,
192
+ "scr_metric_threshold_500": 0.11021506539285786,
193
+ "scr_dir2_threshold_500": 0.11021506539285786
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.12328769360278982,
199
+ "scr_dir2_threshold_2": 0.12328769360278982,
200
+ "scr_dir1_threshold_5": 0.056818127940016054,
201
+ "scr_metric_threshold_5": 0.16438359147038642,
202
+ "scr_dir2_threshold_5": 0.16438359147038642,
203
+ "scr_dir1_threshold_10": 0.051136382878565693,
204
+ "scr_metric_threshold_10": 0.21917794051566444,
205
+ "scr_dir2_threshold_10": 0.21917794051566444,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.31506845959581525,
208
+ "scr_dir2_threshold_20": 0.31506845959581525,
209
+ "scr_dir1_threshold_50": -0.07954544684857372,
210
+ "scr_metric_threshold_50": 0.41552512906852657,
211
+ "scr_dir2_threshold_50": 0.41552512906852657,
212
+ "scr_dir1_threshold_100": -0.056818127940016054,
213
+ "scr_metric_threshold_100": 0.5479451234564373,
214
+ "scr_dir2_threshold_100": 0.5479451234564373,
215
+ "scr_dir1_threshold_500": -0.051136382878565693,
216
+ "scr_metric_threshold_500": 0.62557076879907,
217
+ "scr_dir2_threshold_500": 0.62557076879907
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.12403078926247804,
222
+ "scr_metric_threshold_2": 0.012096857537703539,
223
+ "scr_dir2_threshold_2": 0.012096857537703539,
224
+ "scr_dir1_threshold_5": 0.21705422774794722,
225
+ "scr_metric_threshold_5": 0.028225920807546715,
226
+ "scr_dir2_threshold_5": 0.028225920807546715,
227
+ "scr_dir1_threshold_10": 0.2635657159649414,
228
+ "scr_metric_threshold_10": 0.07661287027579163,
229
+ "scr_dir2_threshold_10": 0.07661287027579163,
230
+ "scr_dir1_threshold_20": 0.2868216910991789,
231
+ "scr_metric_threshold_20": 0.0927419335456348,
232
+ "scr_dir2_threshold_20": 0.0927419335456348,
233
+ "scr_dir1_threshold_50": 0.16279043978521998,
234
+ "scr_metric_threshold_50": 0.2056451360932524,
235
+ "scr_dir2_threshold_50": 0.2056451360932524,
236
+ "scr_dir1_threshold_100": 0.3023253664876833,
237
+ "scr_metric_threshold_100": 0.29032265817460795,
238
+ "scr_dir2_threshold_100": 0.29032265817460795,
239
+ "scr_dir1_threshold_500": 0.36434099214466276,
240
+ "scr_metric_threshold_500": 0.5040322057321397,
241
+ "scr_dir2_threshold_500": 0.5040322057321397
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.12500004233283019,
246
+ "scr_metric_threshold_2": 0.0643776099411321,
247
+ "scr_dir2_threshold_2": 0.0643776099411321,
248
+ "scr_dir1_threshold_5": 0.19318197596782163,
249
+ "scr_metric_threshold_5": 0.15450646850982275,
250
+ "scr_dir2_threshold_5": 0.15450646850982275,
251
+ "scr_dir1_threshold_10": 0.31818167963801025,
252
+ "scr_metric_threshold_10": 0.2532189060784448,
253
+ "scr_dir2_threshold_10": 0.2532189060784448,
254
+ "scr_dir1_threshold_20": 0.4318182356963293,
255
+ "scr_metric_threshold_20": 0.3218883055195427,
256
+ "scr_dir2_threshold_20": 0.3218883055195427,
257
+ "scr_dir1_threshold_50": 0.42045441075917667,
258
+ "scr_metric_threshold_50": 0.40343358508830185,
259
+ "scr_dir2_threshold_50": 0.40343358508830185,
260
+ "scr_dir1_threshold_100": 0.4886363443941681,
261
+ "scr_metric_threshold_100": 0.47639477402936536,
262
+ "scr_dir2_threshold_100": 0.47639477402936536,
263
+ "scr_dir1_threshold_500": 0.5056819124685763,
264
+ "scr_metric_threshold_500": 0.566523632598056,
265
+ "scr_dir2_threshold_500": 0.566523632598056
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.04639146117402712,
270
+ "scr_metric_threshold_2": 0.005025060907560086,
271
+ "scr_dir2_threshold_2": 0.005025060907560086,
272
+ "scr_dir1_threshold_5": 0.08247408426990457,
273
+ "scr_metric_threshold_5": 0.020100543151066925,
274
+ "scr_dir2_threshold_5": 0.020100543151066925,
275
+ "scr_dir1_threshold_10": 0.16494816853980915,
276
+ "scr_metric_threshold_10": 0.06030162945320077,
277
+ "scr_dir2_threshold_10": 0.06030162945320077,
278
+ "scr_dir1_threshold_20": 0.17525762109881465,
279
+ "scr_metric_threshold_20": 0.09547735532694795,
280
+ "scr_dir2_threshold_20": 0.09547735532694795,
281
+ "scr_dir1_threshold_50": 0.1958762189763978,
282
+ "scr_metric_threshold_50": 0.11557789847801488,
283
+ "scr_dir2_threshold_50": 0.11557789847801488,
284
+ "scr_dir1_threshold_100": 0.23711310749113612,
285
+ "scr_metric_threshold_100": 0.1658291065952689,
286
+ "scr_dir2_threshold_100": 0.1658291065952689,
287
+ "scr_dir1_threshold_500": 0.3556698148569181,
288
+ "scr_metric_threshold_500": 0.3316582131905378,
289
+ "scr_dir2_threshold_500": 0.3316582131905378
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/average_l0_105",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_17_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732182110801,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1363566988136543,
76
+ "scr_metric_threshold_2": 0.027699553886629015,
77
+ "scr_dir2_threshold_2": 0.027699553886629015,
78
+ "scr_dir1_threshold_5": 0.21525531878102952,
79
+ "scr_metric_threshold_5": 0.054384170644885904,
80
+ "scr_dir2_threshold_5": 0.054384170644885904,
81
+ "scr_dir1_threshold_10": 0.26847766723592825,
82
+ "scr_metric_threshold_10": 0.08222623654566083,
83
+ "scr_dir2_threshold_10": 0.08222623654566083,
84
+ "scr_dir1_threshold_20": 0.29060488816610186,
85
+ "scr_metric_threshold_20": 0.10800636434940818,
86
+ "scr_dir2_threshold_20": 0.10800636434940818,
87
+ "scr_dir1_threshold_50": 0.3112266281659473,
88
+ "scr_metric_threshold_50": 0.1526981295168183,
89
+ "scr_dir2_threshold_50": 0.1526981295168183,
90
+ "scr_dir1_threshold_100": 0.34056954753457025,
91
+ "scr_metric_threshold_100": 0.18874049831802828,
92
+ "scr_dir2_threshold_100": 0.18874049831802828,
93
+ "scr_dir1_threshold_500": 0.09997839042526786,
94
+ "scr_metric_threshold_500": 0.2523438351104831,
95
+ "scr_dir2_threshold_500": 0.2523438351104831
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3571425530374956,
102
+ "scr_metric_threshold_2": 0.021126768446028555,
103
+ "scr_dir2_threshold_2": 0.021126768446028555,
104
+ "scr_dir1_threshold_5": 0.39285638259373895,
105
+ "scr_metric_threshold_5": 0.03286384648082143,
106
+ "scr_dir2_threshold_5": 0.03286384648082143,
107
+ "scr_dir1_threshold_10": 0.46428617044375664,
108
+ "scr_metric_threshold_10": 0.04694831213917153,
109
+ "scr_dir2_threshold_10": 0.04694831213917153,
110
+ "scr_dir1_threshold_20": 0.5357138295562434,
111
+ "scr_metric_threshold_20": 0.05868553009097124,
112
+ "scr_dir2_threshold_20": 0.05868553009097124,
113
+ "scr_dir1_threshold_50": 0.6785712765187478,
114
+ "scr_metric_threshold_50": 0.07981215861999297,
115
+ "scr_dir2_threshold_50": 0.07981215861999297,
116
+ "scr_dir1_threshold_100": 0.6428574469625045,
117
+ "scr_metric_threshold_100": 0.09154937657179267,
118
+ "scr_dir2_threshold_100": 0.09154937657179267,
119
+ "scr_dir1_threshold_500": -0.6428574469625045,
120
+ "scr_metric_threshold_500": 0.2464789185646642,
121
+ "scr_dir2_threshold_500": 0.2464789185646642
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.29230760766203984,
126
+ "scr_metric_threshold_2": 0.06443307996239377,
127
+ "scr_dir2_threshold_2": 0.06443307996239377,
128
+ "scr_dir1_threshold_5": 0.46153895530476774,
129
+ "scr_metric_threshold_5": 0.08505152421976293,
130
+ "scr_dir2_threshold_5": 0.08505152421976293,
131
+ "scr_dir1_threshold_10": 0.5230766268171394,
132
+ "scr_metric_threshold_10": 0.10567012209734605,
133
+ "scr_dir2_threshold_10": 0.10567012209734605,
134
+ "scr_dir1_threshold_20": 0.5384619616898009,
135
+ "scr_metric_threshold_20": 0.11597942103613762,
136
+ "scr_dir2_threshold_20": 0.11597942103613762,
137
+ "scr_dir1_threshold_50": 0.46153895530476774,
138
+ "scr_metric_threshold_50": 0.16237118945059267,
139
+ "scr_dir2_threshold_50": 0.16237118945059267,
140
+ "scr_dir1_threshold_100": 0.49230779106095357,
141
+ "scr_metric_threshold_100": 0.20360823158554495,
142
+ "scr_dir2_threshold_100": 0.20360823158554495,
143
+ "scr_dir1_threshold_500": 0.261538771905854,
144
+ "scr_metric_threshold_500": 0.12113414731564039,
145
+ "scr_dir2_threshold_500": 0.12113414731564039
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.11363617891134899,
150
+ "scr_metric_threshold_2": 0.005088992918118993,
151
+ "scr_dir2_threshold_2": 0.005088992918118993,
152
+ "scr_dir1_threshold_5": 0.3409098913841544,
153
+ "scr_metric_threshold_5": 0.025445267922115385,
154
+ "scr_dir2_threshold_5": 0.025445267922115385,
155
+ "scr_dir1_threshold_10": 0.4772730351477517,
156
+ "scr_metric_threshold_10": 0.04325689480129207,
157
+ "scr_dir2_threshold_10": 0.04325689480129207,
158
+ "scr_dir1_threshold_20": 0.5,
159
+ "scr_metric_threshold_20": 0.05597952876234976,
160
+ "scr_dir2_threshold_20": 0.05597952876234976,
161
+ "scr_dir1_threshold_50": 0.545455284354604,
162
+ "scr_metric_threshold_50": 0.08905843772740385,
163
+ "scr_dir2_threshold_50": 0.08905843772740385,
164
+ "scr_dir1_threshold_100": 0.5909092140591007,
165
+ "scr_metric_threshold_100": 0.11195920919045974,
166
+ "scr_dir2_threshold_100": 0.11195920919045974,
167
+ "scr_dir1_threshold_500": -0.36363550158629526,
168
+ "scr_metric_threshold_500": 0.23155205942385818,
169
+ "scr_dir2_threshold_500": 0.23155205942385818
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.06172831329955738,
174
+ "scr_metric_threshold_2": 0.0026882973822828417,
175
+ "scr_dir2_threshold_2": 0.0026882973822828417,
176
+ "scr_dir1_threshold_5": 0.16049390892266485,
177
+ "scr_metric_threshold_5": 0.013440846001321878,
178
+ "scr_dir2_threshold_5": 0.013440846001321878,
179
+ "scr_dir1_threshold_10": 0.27160502003377596,
180
+ "scr_metric_threshold_10": 0.024193554847883995,
181
+ "scr_dir2_threshold_10": 0.024193554847883995,
182
+ "scr_dir1_threshold_20": 0.30864230235732604,
183
+ "scr_metric_threshold_20": 0.00806457169180236,
184
+ "scr_dir2_threshold_20": 0.00806457169180236,
185
+ "scr_dir1_threshold_50": 0.3209878178453297,
186
+ "scr_metric_threshold_50": 0.04838710969576799,
187
+ "scr_dir2_threshold_50": 0.04838710969576799,
188
+ "scr_dir1_threshold_100": 0.30864230235732604,
189
+ "scr_metric_threshold_100": 0.032258126539686356,
190
+ "scr_dir2_threshold_100": 0.032258126539686356,
191
+ "scr_dir1_threshold_500": 0.6049383533671093,
192
+ "scr_metric_threshold_500": 0.00806457169180236,
193
+ "scr_dir2_threshold_500": 0.00806457169180236
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.005681745061450357,
198
+ "scr_metric_threshold_2": 0.06392692199767515,
199
+ "scr_dir2_threshold_2": 0.06392692199767515,
200
+ "scr_dir1_threshold_5": 0.02272731890855767,
201
+ "scr_metric_threshold_5": 0.10045666947271129,
202
+ "scr_dir2_threshold_5": 0.10045666947271129,
203
+ "scr_dir1_threshold_10": -0.02272731890855767,
204
+ "scr_metric_threshold_10": 0.1141551206503927,
205
+ "scr_dir2_threshold_10": 0.1141551206503927,
206
+ "scr_dir1_threshold_20": -0.034090809031458384,
207
+ "scr_metric_threshold_20": 0.15068486812542883,
208
+ "scr_dir2_threshold_20": 0.15068486812542883,
209
+ "scr_dir1_threshold_50": -0.06249987300146641,
210
+ "scr_metric_threshold_50": 0.24200923681301917,
211
+ "scr_dir2_threshold_50": 0.24200923681301917,
212
+ "scr_dir1_threshold_100": -0.034090809031458384,
213
+ "scr_metric_threshold_100": 0.3105023092032548,
214
+ "scr_dir2_threshold_100": 0.3105023092032548,
215
+ "scr_dir1_threshold_500": 0.11363625588003211,
216
+ "scr_metric_threshold_500": 0.4109589786759661,
217
+ "scr_dir2_threshold_500": 0.4109589786759661
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.15503860209096776,
222
+ "scr_metric_threshold_2": 0.012096857537703539,
223
+ "scr_dir2_threshold_2": 0.012096857537703539,
224
+ "scr_dir1_threshold_5": 0.17054273953095306,
225
+ "scr_metric_threshold_5": 0.028225920807546715,
226
+ "scr_dir2_threshold_5": 0.028225920807546715,
227
+ "scr_dir1_threshold_10": 0.17054273953095306,
228
+ "scr_metric_threshold_10": 0.032258126539686356,
229
+ "scr_dir2_threshold_10": 0.032258126539686356,
230
+ "scr_dir1_threshold_20": 0.1860464149194575,
231
+ "scr_metric_threshold_20": 0.06451625307937271,
232
+ "scr_dir2_threshold_20": 0.06451625307937271,
233
+ "scr_dir1_threshold_50": 0.209302390053695,
234
+ "scr_metric_threshold_50": 0.14112912335516434,
235
+ "scr_dir2_threshold_50": 0.14112912335516434,
236
+ "scr_dir1_threshold_100": 0.27131801571067443,
237
+ "scr_metric_threshold_100": 0.2137097878988163,
238
+ "scr_dir2_threshold_100": 0.2137097878988163,
239
+ "scr_dir1_threshold_500": 0.34108501701042526,
240
+ "scr_metric_threshold_500": 0.32661299044643394,
241
+ "scr_dir2_threshold_500": 0.32661299044643394
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.07954541990950256,
246
+ "scr_metric_threshold_2": 0.04721045194126922,
247
+ "scr_dir2_threshold_2": 0.04721045194126922,
248
+ "scr_dir1_threshold_5": 0.14204561040723843,
249
+ "scr_metric_threshold_5": 0.12446368619618059,
250
+ "scr_dir2_threshold_5": 0.12446368619618059,
251
+ "scr_dir1_threshold_10": 0.1761364078934134,
252
+ "scr_metric_threshold_10": 0.23605149226469982,
253
+ "scr_dir2_threshold_10": 0.23605149226469982,
254
+ "scr_dir1_threshold_20": 0.18181815103066895,
255
+ "scr_metric_threshold_20": 0.3347639298333219,
256
+ "scr_dir2_threshold_20": 0.3347639298333219,
257
+ "scr_dir1_threshold_50": 0.18181815103066895,
258
+ "scr_metric_threshold_50": 0.3733905469607776,
259
+ "scr_dir2_threshold_50": 0.3733905469607776,
260
+ "scr_dir1_threshold_100": 0.26704565274006864,
261
+ "scr_metric_threshold_100": 0.3905579607745226,
262
+ "scr_dir2_threshold_100": 0.3905579607745226,
263
+ "scr_dir1_threshold_500": 0.2840908821518353,
264
+ "scr_metric_threshold_500": 0.4377681569019097,
265
+ "scr_dir2_threshold_500": 0.4377681569019097
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.025773170536871923,
270
+ "scr_metric_threshold_2": 0.005025060907560086,
271
+ "scr_dir2_threshold_2": 0.005025060907560086,
272
+ "scr_dir1_threshold_5": 0.030927743196160724,
273
+ "scr_metric_threshold_5": 0.02512560405862701,
274
+ "scr_dir2_threshold_5": 0.02512560405862701,
275
+ "scr_dir1_threshold_10": 0.08762865692919337,
276
+ "scr_metric_threshold_10": 0.055276269024814105,
277
+ "scr_dir2_threshold_10": 0.055276269024814105,
278
+ "scr_dir1_threshold_20": 0.1082472548067765,
279
+ "scr_metric_threshold_20": 0.07537681217588102,
280
+ "scr_dir2_threshold_20": 0.07537681217588102,
281
+ "scr_dir1_threshold_50": 0.15463902322123155,
282
+ "scr_metric_threshold_50": 0.08542723351182778,
283
+ "scr_dir2_threshold_50": 0.08542723351182778,
284
+ "scr_dir1_threshold_100": 0.18556676641739225,
285
+ "scr_metric_threshold_100": 0.1557789847801487,
286
+ "scr_dir2_threshold_100": 0.1557789847801487,
287
+ "scr_dir1_threshold_500": 0.2010307916356866,
288
+ "scr_metric_threshold_500": 0.23618085786358983,
289
+ "scr_dir2_threshold_500": 0.23618085786358983
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/average_l0_17",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_211_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732182975189,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.2431506277308686,
76
+ "scr_metric_threshold_2": 0.032838664182140556,
77
+ "scr_dir2_threshold_2": 0.032838664182140556,
78
+ "scr_dir1_threshold_5": 0.31559304664372506,
79
+ "scr_metric_threshold_5": 0.06218591081129843,
80
+ "scr_dir2_threshold_5": 0.06218591081129843,
81
+ "scr_dir1_threshold_10": 0.36046783404271493,
82
+ "scr_metric_threshold_10": 0.09567561786477598,
83
+ "scr_dir2_threshold_10": 0.09567561786477598,
84
+ "scr_dir1_threshold_20": 0.3853676689817936,
85
+ "scr_metric_threshold_20": 0.11647712754150044,
86
+ "scr_dir2_threshold_20": 0.11647712754150044,
87
+ "scr_dir1_threshold_50": 0.35434951299848444,
88
+ "scr_metric_threshold_50": 0.1740255918070966,
89
+ "scr_dir2_threshold_50": 0.1740255918070966,
90
+ "scr_dir1_threshold_100": 0.37086910546898705,
91
+ "scr_metric_threshold_100": 0.2352365407593985,
92
+ "scr_dir2_threshold_100": 0.2352365407593985,
93
+ "scr_dir1_threshold_500": 0.3263093840703489,
94
+ "scr_metric_threshold_500": 0.3823823102100171,
95
+ "scr_dir2_threshold_500": 0.3823823102100171
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5357138295562434,
102
+ "scr_metric_threshold_2": 0.01643199319891413,
103
+ "scr_dir2_threshold_2": 0.01643199319891413,
104
+ "scr_dir1_threshold_5": 0.5714276591124867,
105
+ "scr_metric_threshold_5": 0.023474156069585764,
106
+ "scr_dir2_threshold_5": 0.023474156069585764,
107
+ "scr_dir1_threshold_10": 0.607143617406261,
108
+ "scr_metric_threshold_10": 0.0399061492684999,
109
+ "scr_dir2_threshold_10": 0.0399061492684999,
110
+ "scr_dir1_threshold_20": 0.6428574469625045,
111
+ "scr_metric_threshold_20": 0.05868553009097124,
112
+ "scr_dir2_threshold_20": 0.05868553009097124,
113
+ "scr_dir1_threshold_50": 0.607143617406261,
114
+ "scr_metric_threshold_50": 0.07981215861999297,
115
+ "scr_dir2_threshold_50": 0.07981215861999297,
116
+ "scr_dir1_threshold_100": 0.7857148939250088,
117
+ "scr_metric_threshold_100": 0.04694831213917153,
118
+ "scr_dir2_threshold_100": 0.04694831213917153,
119
+ "scr_dir1_threshold_500": 0.6785712765187478,
120
+ "scr_metric_threshold_500": 0.22535215011863563,
121
+ "scr_dir2_threshold_500": 0.22535215011863563
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.4153847846759204,
126
+ "scr_metric_threshold_2": 0.030927896816374686,
127
+ "scr_dir2_threshold_2": 0.030927896816374686,
128
+ "scr_dir1_threshold_5": 0.5230766268171394,
129
+ "scr_metric_threshold_5": 0.05154649469395781,
130
+ "scr_dir2_threshold_5": 0.05154649469395781,
131
+ "scr_dir1_threshold_10": 0.5846152153240797,
132
+ "scr_metric_threshold_10": 0.06443307996239377,
133
+ "scr_dir2_threshold_10": 0.06443307996239377,
134
+ "scr_dir1_threshold_20": 0.6153849680748341,
135
+ "scr_metric_threshold_20": 0.12371143364528478,
136
+ "scr_dir2_threshold_20": 0.12371143364528478,
137
+ "scr_dir1_threshold_50": 0.6000005501967411,
138
+ "scr_metric_threshold_50": 0.16237118945059267,
139
+ "scr_dir2_threshold_50": 0.16237118945059267,
140
+ "scr_dir1_threshold_100": 0.5538463795678938,
141
+ "scr_metric_threshold_100": 0.20876295786504773,
142
+ "scr_dir2_threshold_100": 0.20876295786504773,
143
+ "scr_dir1_threshold_500": 0.630769385952927,
144
+ "scr_metric_threshold_500": 0.23195884207227524,
145
+ "scr_dir2_threshold_500": 0.23195884207227524
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3636368562364027,
150
+ "scr_metric_threshold_2": 0.010178137501998197,
151
+ "scr_dir2_threshold_2": 0.010178137501998197,
152
+ "scr_dir1_threshold_5": 0.5227269648522483,
153
+ "scr_metric_threshold_5": 0.017811626879176687,
154
+ "scr_dir2_threshold_5": 0.017811626879176687,
155
+ "scr_dir1_threshold_10": 0.5681822492068523,
156
+ "scr_metric_threshold_10": 0.03053426084023438,
157
+ "scr_dir2_threshold_10": 0.03053426084023438,
158
+ "scr_dir1_threshold_20": 0.545455284354604,
159
+ "scr_metric_threshold_20": 0.05343503230329027,
160
+ "scr_dir2_threshold_20": 0.05343503230329027,
161
+ "scr_dir1_threshold_50": 0.5227269648522483,
162
+ "scr_metric_threshold_50": 0.08905843772740385,
163
+ "scr_dir2_threshold_50": 0.08905843772740385,
164
+ "scr_dir1_threshold_100": 0.272727642177302,
165
+ "scr_metric_threshold_100": 0.11959285023339844,
166
+ "scr_dir2_threshold_100": 0.11959285023339844,
167
+ "scr_dir1_threshold_500": -0.340908536734047,
168
+ "scr_metric_threshold_500": 0.32315514527608175,
169
+ "scr_dir2_threshold_500": 0.32315514527608175
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.2839505355217796,
174
+ "scr_metric_threshold_2": 0.00806457169180236,
175
+ "scr_dir2_threshold_2": 0.00806457169180236,
176
+ "scr_dir1_threshold_5": 0.35802436430934065,
177
+ "scr_metric_threshold_5": 0.013440846001321878,
178
+ "scr_dir2_threshold_5": 0.013440846001321878,
179
+ "scr_dir1_threshold_10": 0.3703706156568834,
180
+ "scr_metric_threshold_10": 0.04569897254100823,
181
+ "scr_dir2_threshold_10": 0.04569897254100823,
182
+ "scr_dir1_threshold_20": 0.3703706156568834,
183
+ "scr_metric_threshold_20": -0.013440846001321878,
184
+ "scr_dir2_threshold_20": -0.013440846001321878,
185
+ "scr_dir1_threshold_50": 0.29629605100978323,
186
+ "scr_metric_threshold_50": 0.05107524685052775,
187
+ "scr_dir2_threshold_50": 0.05107524685052775,
188
+ "scr_dir1_threshold_100": 0.3209878178453297,
189
+ "scr_metric_threshold_100": 0.11290320254761761,
190
+ "scr_dir2_threshold_100": 0.11290320254761761,
191
+ "scr_dir1_threshold_500": 0.23456773771022588,
192
+ "scr_metric_threshold_500": 0.11827963708466022,
193
+ "scr_dir2_threshold_500": 0.11827963708466022
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.02272731890855767,
198
+ "scr_metric_threshold_2": 0.10958897025783222,
199
+ "scr_dir2_threshold_2": 0.10958897025783222,
200
+ "scr_dir1_threshold_5": 0.034090809031458384,
201
+ "scr_metric_threshold_5": 0.14611871773286836,
202
+ "scr_dir2_threshold_5": 0.14611871773286836,
203
+ "scr_dir1_threshold_10": 0.056818127940016054,
204
+ "scr_metric_threshold_10": 0.18721461560046498,
205
+ "scr_dir2_threshold_10": 0.18721461560046498,
206
+ "scr_dir1_threshold_20": 0.04545463781711534,
207
+ "scr_metric_threshold_20": 0.26027383838326107,
208
+ "scr_dir2_threshold_20": 0.26027383838326107,
209
+ "scr_dir1_threshold_50": -0.03977255409290874,
210
+ "scr_metric_threshold_50": 0.38812768237861134,
211
+ "scr_dir2_threshold_50": 0.38812768237861134,
212
+ "scr_dir1_threshold_100": 0.011363828785656956,
213
+ "scr_metric_threshold_100": 0.49771692480371976,
214
+ "scr_dir2_threshold_100": 0.49771692480371976,
215
+ "scr_dir1_threshold_500": 0.005681745061450357,
216
+ "scr_metric_threshold_500": 0.6438356425365881,
217
+ "scr_dir2_threshold_500": 0.6438356425365881
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.10852711387397361,
222
+ "scr_metric_threshold_2": 0.008064651805563901,
223
+ "scr_dir2_threshold_2": 0.008064651805563901,
224
+ "scr_dir1_threshold_5": 0.20155055235944278,
225
+ "scr_metric_threshold_5": 0.07258066454365199,
226
+ "scr_dir2_threshold_5": 0.07258066454365199,
227
+ "scr_dir1_threshold_10": 0.24031020288218471,
228
+ "scr_metric_threshold_10": 0.07258066454365199,
229
+ "scr_dir2_threshold_10": 0.07258066454365199,
230
+ "scr_dir1_threshold_20": 0.2790698534049267,
231
+ "scr_metric_threshold_20": 0.1290322658174608,
232
+ "scr_dir2_threshold_20": 0.1290322658174608,
233
+ "scr_dir1_threshold_50": 0.24806204057643694,
234
+ "scr_metric_threshold_50": 0.2056451360932524,
235
+ "scr_dir2_threshold_50": 0.2056451360932524,
236
+ "scr_dir1_threshold_100": 0.24806204057643694,
237
+ "scr_metric_threshold_100": 0.2943548639067476,
238
+ "scr_dir2_threshold_100": 0.2943548639067476,
239
+ "scr_dir1_threshold_500": 0.41860478010739,
240
+ "scr_metric_threshold_500": 0.5,
241
+ "scr_dir2_threshold_500": 0.5
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.15340909668174957,
246
+ "scr_metric_threshold_2": 0.0643776099411321,
247
+ "scr_dir2_threshold_2": 0.0643776099411321,
248
+ "scr_dir1_threshold_5": 0.21590928717948543,
249
+ "scr_metric_threshold_5": 0.13733905469607777,
250
+ "scr_dir2_threshold_5": 0.13733905469607777,
251
+ "scr_dir1_threshold_10": 0.30681819336349914,
252
+ "scr_metric_threshold_10": 0.24463532707851338,
253
+ "scr_dir2_threshold_10": 0.24463532707851338,
254
+ "scr_dir1_threshold_20": 0.40909092448466555,
255
+ "scr_metric_threshold_20": 0.21459228895098914,
256
+ "scr_dir2_threshold_20": 0.21459228895098914,
257
+ "scr_dir1_threshold_50": 0.4147726676219211,
258
+ "scr_metric_threshold_50": 0.2703863198921898,
259
+ "scr_dir2_threshold_50": 0.2703863198921898,
260
+ "scr_dir1_threshold_100": 0.5113636556058319,
261
+ "scr_metric_threshold_100": 0.3905579607745226,
262
+ "scr_dir2_threshold_100": 0.3905579607745226,
263
+ "scr_dir1_threshold_500": 0.59659081865259,
264
+ "scr_metric_threshold_500": 0.6094420392254775,
265
+ "scr_dir2_threshold_500": 0.6094420392254775
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.06185548639232145,
270
+ "scr_metric_threshold_2": 0.015075482243506837,
271
+ "scr_dir2_threshold_2": 0.015075482243506837,
272
+ "scr_dir1_threshold_5": 0.0979381094881989,
273
+ "scr_metric_threshold_5": 0.03517572587374718,
274
+ "scr_dir2_threshold_5": 0.03517572587374718,
275
+ "scr_dir1_threshold_10": 0.14948445056194273,
276
+ "scr_metric_threshold_10": 0.08040187308344111,
277
+ "scr_dir2_threshold_10": 0.08040187308344111,
278
+ "scr_dir1_threshold_20": 0.17525762109881465,
279
+ "scr_metric_threshold_20": 0.10552747714206812,
280
+ "scr_dir2_threshold_20": 0.10552747714206812,
281
+ "scr_dir1_threshold_50": 0.18556676641739225,
282
+ "scr_metric_threshold_50": 0.14572856344420196,
283
+ "scr_dir2_threshold_50": 0.14572856344420196,
284
+ "scr_dir1_threshold_100": 0.26288658526843595,
285
+ "scr_metric_threshold_100": 0.21105525380496282,
286
+ "scr_dir2_threshold_100": 0.21105525380496282,
287
+ "scr_dir1_threshold_500": 0.38659786529350676,
288
+ "scr_metric_threshold_500": 0.4070350253664188,
289
+ "scr_dir2_threshold_500": 0.4070350253664188
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/average_l0_211",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_29_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732183839297,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.12658742924426053,
76
+ "scr_metric_threshold_2": 0.03912358746152851,
77
+ "scr_dir2_threshold_2": 0.03912358746152851,
78
+ "scr_dir1_threshold_5": 0.2622713484137316,
79
+ "scr_metric_threshold_5": 0.06965970096930328,
80
+ "scr_dir2_threshold_5": 0.06965970096930328,
81
+ "scr_dir1_threshold_10": 0.29048837753058215,
82
+ "scr_metric_threshold_10": 0.09600597286096252,
83
+ "scr_dir2_threshold_10": 0.09600597286096252,
84
+ "scr_dir1_threshold_20": 0.35511036133696916,
85
+ "scr_metric_threshold_20": 0.1228530604583989,
86
+ "scr_dir2_threshold_20": 0.1228530604583989,
87
+ "scr_dir1_threshold_50": 0.3236488763443772,
88
+ "scr_metric_threshold_50": 0.16770420737256775,
89
+ "scr_dir2_threshold_50": 0.16770420737256775,
90
+ "scr_dir1_threshold_100": 0.34120698479036793,
91
+ "scr_metric_threshold_100": 0.22610119684910493,
92
+ "scr_dir2_threshold_100": 0.22610119684910493,
93
+ "scr_dir1_threshold_500": 0.0956535059733725,
94
+ "scr_metric_threshold_500": 0.2996043450056894,
95
+ "scr_dir2_threshold_500": 0.2996043450056894
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.24999893563123454,
102
+ "scr_metric_threshold_2": 0.023474156069585764,
103
+ "scr_dir2_threshold_2": 0.023474156069585764,
104
+ "scr_dir1_threshold_5": 0.5357138295562434,
105
+ "scr_metric_threshold_5": 0.037558761644942686,
106
+ "scr_dir2_threshold_5": 0.037558761644942686,
107
+ "scr_dir1_threshold_10": 0.5357138295562434,
108
+ "scr_metric_threshold_10": 0.04694831213917153,
109
+ "scr_dir2_threshold_10": 0.04694831213917153,
110
+ "scr_dir1_threshold_20": 0.6428574469625045,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.6428574469625045,
114
+ "scr_metric_threshold_50": 0.07042260812576412,
115
+ "scr_dir2_threshold_50": 0.07042260812576412,
116
+ "scr_dir1_threshold_100": 0.7142851060749912,
117
+ "scr_metric_threshold_100": 0.10093892706602152,
118
+ "scr_dir2_threshold_100": 0.10093892706602152,
119
+ "scr_dir1_threshold_500": -0.7857148939250088,
120
+ "scr_metric_threshold_500": 0.21830984733095718,
121
+ "scr_dir2_threshold_500": 0.21830984733095718
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.2769231897839469,
126
+ "scr_metric_threshold_2": 0.04639176841445505,
127
+ "scr_dir2_threshold_2": 0.04639176841445505,
128
+ "scr_dir1_threshold_5": 0.49230779106095357,
129
+ "scr_metric_threshold_5": 0.08762896416962129,
130
+ "scr_dir2_threshold_5": 0.08762896416962129,
131
+ "scr_dir1_threshold_10": 0.5538463795678938,
132
+ "scr_metric_threshold_10": 0.10824740842699046,
133
+ "scr_dir2_threshold_10": 0.10824740842699046,
134
+ "scr_dir1_threshold_20": 0.5846152153240797,
135
+ "scr_metric_threshold_20": 0.13402073258407635,
136
+ "scr_dir2_threshold_20": 0.13402073258407635,
137
+ "scr_dir1_threshold_50": 0.5076922089390464,
138
+ "scr_metric_threshold_50": 0.16237118945059267,
139
+ "scr_dir2_threshold_50": 0.16237118945059267,
140
+ "scr_dir1_threshold_100": 0.5846152153240797,
141
+ "scr_metric_threshold_100": 0.25,
142
+ "scr_dir2_threshold_100": 0.25,
143
+ "scr_dir1_threshold_500": 0.5076922089390464,
144
+ "scr_metric_threshold_500": 0.2757733241570859,
145
+ "scr_dir2_threshold_500": 0.2757733241570859
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.18181842811820134,
150
+ "scr_metric_threshold_2": 0.01526713042011719,
151
+ "scr_dir2_threshold_2": 0.01526713042011719,
152
+ "scr_dir1_threshold_5": 0.386363821088651,
153
+ "scr_metric_threshold_5": 0.03816790188317308,
154
+ "scr_dir2_threshold_5": 0.03816790188317308,
155
+ "scr_dir1_threshold_10": 0.43181775079314766,
156
+ "scr_metric_threshold_10": 0.05597952876234976,
157
+ "scr_dir2_threshold_10": 0.05597952876234976,
158
+ "scr_dir1_threshold_20": 0.5227269648522483,
159
+ "scr_metric_threshold_20": 0.07124681084822716,
160
+ "scr_dir2_threshold_20": 0.07124681084822716,
161
+ "scr_dir1_threshold_50": 0.5227269648522483,
162
+ "scr_metric_threshold_50": 0.10178107168846154,
163
+ "scr_dir2_threshold_50": 0.10178107168846154,
164
+ "scr_dir1_threshold_100": 0.3409098913841544,
165
+ "scr_metric_threshold_100": 0.10941471273140024,
166
+ "scr_dir2_threshold_100": 0.10941471273140024,
167
+ "scr_dir1_threshold_500": -0.2954532523794429,
168
+ "scr_metric_threshold_500": 0.2595419754707933,
169
+ "scr_dir2_threshold_500": 0.2595419754707933
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.08642008013510381,
174
+ "scr_metric_threshold_2": 0.013440846001321878,
175
+ "scr_dir2_threshold_2": 0.013440846001321878,
176
+ "scr_dir1_threshold_5": 0.2592595045457723,
177
+ "scr_metric_threshold_5": 0.01612914338360472,
178
+ "scr_dir2_threshold_5": 0.01612914338360472,
179
+ "scr_dir1_threshold_10": 0.24691325319822952,
180
+ "scr_metric_threshold_10": 0.05107524685052775,
181
+ "scr_dir2_threshold_10": 0.05107524685052775,
182
+ "scr_dir1_threshold_20": 0.3703706156568834,
183
+ "scr_metric_threshold_20": 0.010752708846562119,
184
+ "scr_dir2_threshold_20": 0.010752708846562119,
185
+ "scr_dir1_threshold_50": 0.2839505355217796,
186
+ "scr_metric_threshold_50": 0.04838710969576799,
187
+ "scr_dir2_threshold_50": 0.04838710969576799,
188
+ "scr_dir1_threshold_100": 0.3209878178453297,
189
+ "scr_metric_threshold_100": 0.09946235654629575,
190
+ "scr_dir2_threshold_100": 0.09946235654629575,
191
+ "scr_dir1_threshold_500": 0.41975341346843714,
192
+ "scr_metric_threshold_500": -0.021505257465601155,
193
+ "scr_dir2_threshold_500": -0.021505257465601155
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.02272731890855767,
198
+ "scr_metric_threshold_2": 0.08219179573519321,
199
+ "scr_dir2_threshold_2": 0.08219179573519321,
200
+ "scr_dir1_threshold_5": 0.034090809031458384,
201
+ "scr_metric_threshold_5": 0.10502281986527176,
202
+ "scr_dir2_threshold_5": 0.10502281986527176,
203
+ "scr_dir1_threshold_10": 0.051136382878565693,
204
+ "scr_metric_threshold_10": 0.1369864169477474,
205
+ "scr_dir2_threshold_10": 0.1369864169477474,
206
+ "scr_dir1_threshold_20": 0.056818127940016054,
207
+ "scr_metric_threshold_20": 0.18721461560046498,
208
+ "scr_dir2_threshold_20": 0.18721461560046498,
209
+ "scr_dir1_threshold_50": -0.02272731890855767,
210
+ "scr_metric_threshold_50": 0.27853871212077913,
211
+ "scr_dir2_threshold_50": 0.27853871212077913,
212
+ "scr_dir1_threshold_100": -0.02272731890855767,
213
+ "scr_metric_threshold_100": 0.39269410493844803,
214
+ "scr_dir2_threshold_100": 0.39269410493844803,
215
+ "scr_dir1_threshold_500": -0.017045573847107313,
216
+ "scr_metric_threshold_500": 0.5022830751962802,
217
+ "scr_dir2_threshold_500": 0.5022830751962802
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.15503860209096776,
222
+ "scr_metric_threshold_2": 0.024193715075407077,
223
+ "scr_dir2_threshold_2": 0.024193715075407077,
224
+ "scr_dir1_threshold_5": 0.17054273953095306,
225
+ "scr_metric_threshold_5": 0.05241939554166917,
226
+ "scr_dir2_threshold_5": 0.05241939554166917,
227
+ "scr_dir1_threshold_10": 0.14728676439671556,
228
+ "scr_metric_threshold_10": 0.040322778345250256,
229
+ "scr_dir2_threshold_10": 0.040322778345250256,
230
+ "scr_dir1_threshold_20": 0.1860464149194575,
231
+ "scr_metric_threshold_20": 0.08064531634921589,
232
+ "scr_dir2_threshold_20": 0.08064531634921589,
233
+ "scr_dir1_threshold_50": 0.22480606544219944,
234
+ "scr_metric_threshold_50": 0.14112912335516434,
235
+ "scr_dir2_threshold_50": 0.14112912335516434,
236
+ "scr_dir1_threshold_100": 0.27131801571067443,
237
+ "scr_metric_threshold_100": 0.21774199363095595,
238
+ "scr_dir2_threshold_100": 0.21774199363095595,
239
+ "scr_dir1_threshold_500": 0.3255813416219208,
240
+ "scr_metric_threshold_500": 0.39112900318452204,
241
+ "scr_dir2_threshold_500": 0.39112900318452204
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.03977287928607206,
246
+ "scr_metric_threshold_2": 0.10300422706858779,
247
+ "scr_dir2_threshold_2": 0.10300422706858779,
248
+ "scr_dir1_threshold_5": 0.147727353544494,
249
+ "scr_metric_threshold_5": 0.21030049945102341,
250
+ "scr_dir2_threshold_5": 0.21030049945102341,
251
+ "scr_dir1_threshold_10": 0.23863625972850772,
252
+ "scr_metric_threshold_10": 0.28326194420596906,
253
+ "scr_dir2_threshold_10": 0.28326194420596906,
254
+ "scr_dir1_threshold_20": 0.3124999365007547,
255
+ "scr_metric_threshold_20": 0.38197438177459114,
256
+ "scr_dir2_threshold_20": 0.38197438177459114,
257
+ "scr_dir1_threshold_50": 0.24431834152840481,
258
+ "scr_metric_threshold_50": 0.433476367401944,
259
+ "scr_dir2_threshold_50": 0.433476367401944,
260
+ "scr_dir1_threshold_100": 0.32954550457516296,
261
+ "scr_metric_threshold_100": 0.49785397734307607,
262
+ "scr_dir2_threshold_100": 0.49785397734307607,
263
+ "scr_dir1_threshold_500": 0.3011364502262436,
264
+ "scr_metric_threshold_500": 0.5150213911568211,
265
+ "scr_dir2_threshold_500": 0.5150213911568211
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.0,
270
+ "scr_metric_threshold_2": 0.005025060907560086,
271
+ "scr_dir2_threshold_2": 0.005025060907560086,
272
+ "scr_dir1_threshold_5": 0.07216493895132697,
273
+ "scr_metric_threshold_5": 0.010050121815120171,
274
+ "scr_dir2_threshold_5": 0.010050121815120171,
275
+ "scr_dir1_threshold_10": 0.1185564001253541,
276
+ "scr_metric_threshold_10": 0.04522614720969393,
277
+ "scr_dir2_threshold_10": 0.04522614720969393,
278
+ "scr_dir1_threshold_20": 0.16494816853980915,
279
+ "scr_metric_threshold_20": 0.06532669036076086,
280
+ "scr_dir2_threshold_20": 0.06532669036076086,
281
+ "scr_dir1_threshold_50": 0.18556676641739225,
282
+ "scr_metric_threshold_50": 0.10552747714206812,
283
+ "scr_dir2_threshold_50": 0.10552747714206812,
284
+ "scr_dir1_threshold_100": 0.190721646317109,
285
+ "scr_metric_threshold_100": 0.14070350253664188,
286
+ "scr_dir2_threshold_100": 0.14070350253664188,
287
+ "scr_dir1_threshold_500": 0.309278353682891,
288
+ "scr_metric_threshold_500": 0.25628140101465674,
289
+ "scr_dir2_threshold_500": 0.25628140101465674
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/average_l0_29",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-2b-pt-res/scr/gemma-scope-2b-pt-res_layer_5_width_65k_average_l0_53_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "da3131d3-09a1-442d-afc4-33375e189fb3",
72
+ "datetime_epoch_millis": 1732184710096,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.12499654838232388,
76
+ "scr_metric_threshold_2": 0.04484837503889204,
77
+ "scr_dir2_threshold_2": 0.04484837503889204,
78
+ "scr_dir1_threshold_5": 0.27662244157358035,
79
+ "scr_metric_threshold_5": 0.07063996356616027,
80
+ "scr_dir2_threshold_5": 0.07063996356616027,
81
+ "scr_dir1_threshold_10": 0.3276114650682804,
82
+ "scr_metric_threshold_10": 0.10157113666110139,
83
+ "scr_dir2_threshold_10": 0.10157113666110139,
84
+ "scr_dir1_threshold_20": 0.353666344812546,
85
+ "scr_metric_threshold_20": 0.13351585732035592,
86
+ "scr_dir2_threshold_20": 0.13351585732035592,
87
+ "scr_dir1_threshold_50": 0.3239730777531515,
88
+ "scr_metric_threshold_50": 0.18248176907808852,
89
+ "scr_dir2_threshold_50": 0.18248176907808852,
90
+ "scr_dir1_threshold_100": 0.3076813700072183,
91
+ "scr_metric_threshold_100": 0.23710683179675363,
92
+ "scr_dir2_threshold_100": 0.23710683179675363,
93
+ "scr_dir1_threshold_500": 0.11648298893798527,
94
+ "scr_metric_threshold_500": 0.32567596566074314,
95
+ "scr_dir2_threshold_500": 0.32567596566074314
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.24999893563123454,
102
+ "scr_metric_threshold_2": 0.018779380822471343,
103
+ "scr_dir2_threshold_2": 0.018779380822471343,
104
+ "scr_dir1_threshold_5": 0.5,
105
+ "scr_metric_threshold_5": 0.023474156069585764,
106
+ "scr_dir2_threshold_5": 0.023474156069585764,
107
+ "scr_dir1_threshold_10": 0.5,
108
+ "scr_metric_threshold_10": 0.04225353689205711,
109
+ "scr_dir2_threshold_10": 0.04225353689205711,
110
+ "scr_dir1_threshold_20": 0.6428574469625045,
111
+ "scr_metric_threshold_20": 0.05164322730329278,
112
+ "scr_dir2_threshold_20": 0.05164322730329278,
113
+ "scr_dir1_threshold_50": 0.7499989356312345,
114
+ "scr_metric_threshold_50": 0.07042260812576412,
115
+ "scr_dir2_threshold_50": 0.07042260812576412,
116
+ "scr_dir1_threshold_100": 0.7142851060749912,
117
+ "scr_metric_threshold_100": 0.10093892706602152,
118
+ "scr_dir2_threshold_100": 0.10093892706602152,
119
+ "scr_dir1_threshold_500": -0.3571425530374956,
120
+ "scr_metric_threshold_500": 0.21596245970739997,
121
+ "scr_dir2_threshold_500": 0.21596245970739997
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.261538771905854,
126
+ "scr_metric_threshold_2": 0.04896905474409945,
127
+ "scr_dir2_threshold_2": 0.04896905474409945,
128
+ "scr_dir1_threshold_5": 0.5076922089390464,
129
+ "scr_metric_threshold_5": 0.0902062504992657,
130
+ "scr_dir2_threshold_5": 0.0902062504992657,
131
+ "scr_dir1_threshold_10": 0.5538463795678938,
132
+ "scr_metric_threshold_10": 0.11597942103613762,
133
+ "scr_dir2_threshold_10": 0.11597942103613762,
134
+ "scr_dir1_threshold_20": 0.6000005501967411,
135
+ "scr_metric_threshold_20": 0.14432987790265395,
136
+ "scr_dir2_threshold_20": 0.14432987790265395,
137
+ "scr_dir1_threshold_50": 0.44615362043210616,
138
+ "scr_metric_threshold_50": 0.17525777471902862,
139
+ "scr_dir2_threshold_50": 0.17525777471902862,
140
+ "scr_dir1_threshold_100": 0.49230779106095357,
141
+ "scr_metric_threshold_100": 0.2654640252182943,
142
+ "scr_dir2_threshold_100": 0.2654640252182943,
143
+ "scr_dir1_threshold_500": 0.47692337318286065,
144
+ "scr_metric_threshold_500": 0.29123719575516627,
145
+ "scr_dir2_threshold_500": 0.29123719575516627
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.13636449841370474,
150
+ "scr_metric_threshold_2": 0.012722633961057692,
151
+ "scr_dir2_threshold_2": 0.012722633961057692,
152
+ "scr_dir1_threshold_5": 0.4090907859408993,
153
+ "scr_metric_threshold_5": 0.017811626879176687,
154
+ "scr_dir2_threshold_5": 0.017811626879176687,
155
+ "scr_dir1_threshold_10": 0.5681822492068523,
156
+ "scr_metric_threshold_10": 0.05343503230329027,
157
+ "scr_dir2_threshold_10": 0.05343503230329027,
158
+ "scr_dir1_threshold_20": 0.545455284354604,
159
+ "scr_metric_threshold_20": 0.06870231438916767,
160
+ "scr_dir2_threshold_20": 0.06870231438916767,
161
+ "scr_dir1_threshold_50": 0.43181775079314766,
162
+ "scr_metric_threshold_50": 0.10178107168846154,
163
+ "scr_dir2_threshold_50": 0.10178107168846154,
164
+ "scr_dir1_threshold_100": 0.272727642177302,
165
+ "scr_metric_threshold_100": 0.12468184315151744,
166
+ "scr_dir2_threshold_100": 0.12468184315151744,
167
+ "scr_dir1_threshold_500": -0.045453929704496625,
168
+ "scr_metric_threshold_500": 0.1933841575406851,
169
+ "scr_dir2_threshold_500": 0.1933841575406851
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.04938279781155373,
174
+ "scr_metric_threshold_2": 0.010752708846562119,
175
+ "scr_dir2_threshold_2": 0.010752708846562119,
176
+ "scr_dir1_threshold_5": 0.30864230235732604,
177
+ "scr_metric_threshold_5": 0.021505417693124237,
178
+ "scr_dir2_threshold_5": 0.021505417693124237,
179
+ "scr_dir1_threshold_10": 0.345678848821337,
180
+ "scr_metric_threshold_10": 0.04569897254100823,
181
+ "scr_dir2_threshold_10": 0.04569897254100823,
182
+ "scr_dir1_threshold_20": 0.38271613114488706,
183
+ "scr_metric_threshold_20": 0.021505417693124237,
184
+ "scr_dir2_threshold_20": 0.021505417693124237,
185
+ "scr_dir1_threshold_50": 0.29629605100978323,
186
+ "scr_metric_threshold_50": 0.053763544232810594,
187
+ "scr_dir2_threshold_50": 0.053763544232810594,
188
+ "scr_dir1_threshold_100": 0.12345662659911476,
189
+ "scr_metric_threshold_100": 0.06720439023413247,
190
+ "scr_dir2_threshold_100": 0.06720439023413247,
191
+ "scr_dir1_threshold_500": -0.14814765757512205,
192
+ "scr_metric_threshold_500": -0.002688137154759759,
193
+ "scr_dir2_threshold_500": -0.002688137154759759
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.011363828785656956,
198
+ "scr_metric_threshold_2": 0.10958897025783222,
199
+ "scr_dir2_threshold_2": 0.10958897025783222,
200
+ "scr_dir1_threshold_5": 0.03977289275566498,
201
+ "scr_metric_threshold_5": 0.14611871773286836,
202
+ "scr_dir2_threshold_5": 0.14611871773286836,
203
+ "scr_dir1_threshold_10": 0.06818195672567301,
204
+ "scr_metric_threshold_10": 0.17351589225550737,
205
+ "scr_dir2_threshold_10": 0.17351589225550737,
206
+ "scr_dir1_threshold_20": 0.051136382878565693,
207
+ "scr_metric_threshold_20": 0.24200923681301917,
208
+ "scr_dir2_threshold_20": 0.24200923681301917,
209
+ "scr_dir1_threshold_50": -0.056818127940016054,
210
+ "scr_metric_threshold_50": 0.34703205667829096,
211
+ "scr_dir2_threshold_50": 0.34703205667829096,
212
+ "scr_dir1_threshold_100": -0.02272731890855767,
213
+ "scr_metric_threshold_100": 0.4520546043762865,
214
+ "scr_dir2_threshold_100": 0.4520546043762865,
215
+ "scr_dir1_threshold_500": -0.04545463781711534,
216
+ "scr_metric_threshold_500": 0.5662099971939554,
217
+ "scr_dir2_threshold_500": 0.5662099971939554
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.14728676439671556,
222
+ "scr_metric_threshold_2": 0.024193715075407077,
223
+ "scr_dir2_threshold_2": 0.024193715075407077,
224
+ "scr_dir1_threshold_5": 0.19379825261370973,
225
+ "scr_metric_threshold_5": 0.024193715075407077,
226
+ "scr_dir2_threshold_5": 0.024193715075407077,
227
+ "scr_dir1_threshold_10": 0.16279043978521998,
228
+ "scr_metric_threshold_10": 0.040322778345250256,
229
+ "scr_dir2_threshold_10": 0.040322778345250256,
230
+ "scr_dir1_threshold_20": 0.19379825261370973,
231
+ "scr_metric_threshold_20": 0.08467752208135552,
232
+ "scr_dir2_threshold_20": 0.08467752208135552,
233
+ "scr_dir1_threshold_50": 0.20155055235944278,
234
+ "scr_metric_threshold_50": 0.15322598089286787,
235
+ "scr_dir2_threshold_50": 0.15322598089286787,
236
+ "scr_dir1_threshold_100": 0.2790698534049267,
237
+ "scr_metric_threshold_100": 0.23790326263293876,
238
+ "scr_dir2_threshold_100": 0.23790326263293876,
239
+ "scr_dir1_threshold_500": 0.3875969672789003,
240
+ "scr_metric_threshold_500": 0.427419335456348,
241
+ "scr_dir2_threshold_500": 0.427419335456348
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.10795447425842195,
246
+ "scr_metric_threshold_2": 0.12875547569614632,
247
+ "scr_dir2_threshold_2": 0.12875547569614632,
248
+ "scr_dir1_threshold_5": 0.18181815103066895,
249
+ "scr_metric_threshold_5": 0.23175970276473412,
250
+ "scr_dir2_threshold_5": 0.23175970276473412,
251
+ "scr_dir1_threshold_10": 0.2727273958773242,
252
+ "scr_metric_threshold_10": 0.29613731270586624,
253
+ "scr_dir2_threshold_10": 0.29613731270586624,
254
+ "scr_dir1_threshold_20": 0.23295451659125213,
255
+ "scr_metric_threshold_20": 0.36480696796084616,
256
+ "scr_dir2_threshold_20": 0.36480696796084616,
257
+ "scr_dir1_threshold_50": 0.3011364502262436,
258
+ "scr_metric_threshold_50": 0.4377681569019097,
259
+ "scr_dir2_threshold_50": 0.4377681569019097,
260
+ "scr_dir1_threshold_100": 0.38068187013574617,
261
+ "scr_metric_threshold_100": 0.49785397734307607,
262
+ "scr_dir2_threshold_100": 0.49785397734307607,
263
+ "scr_dir1_threshold_500": 0.31818167963801025,
264
+ "scr_metric_threshold_500": 0.5922746254117325,
265
+ "scr_dir2_threshold_500": 0.5922746254117325
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.03608231585544952,
270
+ "scr_metric_threshold_2": 0.005025060907560086,
271
+ "scr_dir2_threshold_2": 0.005025060907560086,
272
+ "scr_dir1_threshold_5": 0.07216493895132697,
273
+ "scr_metric_threshold_5": 0.010050121815120171,
274
+ "scr_dir2_threshold_5": 0.010050121815120171,
275
+ "scr_dir1_threshold_10": 0.14948445056194273,
276
+ "scr_metric_threshold_10": 0.04522614720969393,
277
+ "scr_dir2_threshold_10": 0.04522614720969393,
278
+ "scr_dir1_threshold_20": 0.18041219375810347,
279
+ "scr_metric_threshold_20": 0.09045229441938786,
280
+ "scr_dir2_threshold_20": 0.09045229441938786,
281
+ "scr_dir1_threshold_50": 0.2216493895132697,
282
+ "scr_metric_threshold_50": 0.12060295938557496,
283
+ "scr_dir2_threshold_50": 0.12060295938557496,
284
+ "scr_dir1_threshold_100": 0.2216493895132697,
285
+ "scr_metric_threshold_100": 0.15075362435176204,
286
+ "scr_dir2_threshold_100": 0.15075362435176204,
287
+ "scr_dir1_threshold_500": 0.34536066953834055,
288
+ "scr_metric_threshold_500": 0.3216080913754176,
289
+ "scr_dir2_threshold_500": 0.3216080913754176
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_5/width_65k/average_l0_53",
294
+ "sae_lens_release_id": "gemma-scope-2b-pt-res",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/gemma-scope-9b-pt-res-canonical/scr/gemma-scope-9b-pt-res-canonical_layer_9_width_16k_canonical_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-9b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5599316a-fde9-4d6e-b55c-b4f23f213dab",
72
+ "datetime_epoch_millis": 1732195931804,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.26844469170294294,
76
+ "scr_metric_threshold_2": 0.06402471270682004,
77
+ "scr_dir2_threshold_2": 0.06402471270682004,
78
+ "scr_dir1_threshold_5": 0.28711018731101595,
79
+ "scr_metric_threshold_5": 0.14511459094346388,
80
+ "scr_dir2_threshold_5": 0.14511459094346388,
81
+ "scr_dir1_threshold_10": 0.3205183392570567,
82
+ "scr_metric_threshold_10": 0.22141205397572933,
83
+ "scr_dir2_threshold_10": 0.22141205397572933,
84
+ "scr_dir1_threshold_20": 0.2951365439287145,
85
+ "scr_metric_threshold_20": 0.31002086392023087,
86
+ "scr_dir2_threshold_20": 0.31002086392023087,
87
+ "scr_dir1_threshold_50": 0.18900283075811042,
88
+ "scr_metric_threshold_50": 0.4256189398348345,
89
+ "scr_dir2_threshold_50": 0.4256189398348345,
90
+ "scr_dir1_threshold_100": -0.03827424458802126,
91
+ "scr_metric_threshold_100": 0.4953786935319285,
92
+ "scr_dir2_threshold_100": 0.4953786935319285,
93
+ "scr_dir1_threshold_500": -0.3595947719409785,
94
+ "scr_metric_threshold_500": 0.48917779254464344,
95
+ "scr_dir2_threshold_500": 0.48917779254464344
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.511627745796144,
102
+ "scr_metric_threshold_2": 0.016627147324558472,
103
+ "scr_dir2_threshold_2": 0.016627147324558472,
104
+ "scr_dir1_threshold_5": 0.5581401151338816,
105
+ "scr_metric_threshold_5": 0.06413303983119285,
106
+ "scr_dir2_threshold_5": 0.06413303983119285,
107
+ "scr_dir1_threshold_10": 0.5813956067261696,
108
+ "scr_metric_threshold_10": 0.0973871929015824,
109
+ "scr_dir2_threshold_10": 0.0973871929015824,
110
+ "scr_dir1_threshold_20": 0.534883237388432,
111
+ "scr_metric_threshold_20": 0.10688839971865476,
112
+ "scr_dir2_threshold_20": 0.10688839971865476,
113
+ "scr_dir1_threshold_50": 0.534883237388432,
114
+ "scr_metric_threshold_50": 0.1710214395498476,
115
+ "scr_dir2_threshold_50": 0.1710214395498476,
116
+ "scr_dir1_threshold_100": -0.4883708680506944,
117
+ "scr_metric_threshold_100": 0.2137767994373095,
118
+ "scr_dir2_threshold_100": 0.2137767994373095,
119
+ "scr_dir1_threshold_500": -0.6744175730953216,
120
+ "scr_metric_threshold_500": 0.4180522504788193,
121
+ "scr_dir2_threshold_500": 0.4180522504788193
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.47474780922908344,
126
+ "scr_metric_threshold_2": 0.2259888013995304,
127
+ "scr_dir2_threshold_2": 0.2259888013995304,
128
+ "scr_dir1_threshold_5": 0.5252527928378122,
129
+ "scr_metric_threshold_5": 0.3361582115914186,
130
+ "scr_dir2_threshold_5": 0.3361582115914186,
131
+ "scr_dir1_threshold_10": 0.5858586527549076,
132
+ "scr_metric_threshold_10": 0.42372887634310386,
133
+ "scr_dir2_threshold_10": 0.42372887634310386,
134
+ "scr_dir1_threshold_20": 0.5959595290632742,
135
+ "scr_metric_threshold_20": 0.4971750936794668,
136
+ "scr_dir2_threshold_20": 0.4971750936794668,
137
+ "scr_dir1_threshold_50": 0.6161618837469032,
138
+ "scr_metric_threshold_50": 0.655367322009013,
139
+ "scr_dir2_threshold_50": 0.655367322009013,
140
+ "scr_dir1_threshold_100": 0.5959595290632742,
141
+ "scr_metric_threshold_100": 0.7853108238826024,
142
+ "scr_dir2_threshold_100": 0.7853108238826024,
143
+ "scr_dir1_threshold_500": 0.44444457823708794,
144
+ "scr_metric_threshold_500": 0.06497175093679466,
145
+ "scr_dir2_threshold_500": 0.06497175093679466
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.476922456188292,
150
+ "scr_metric_threshold_2": 0.042821206340913034,
151
+ "scr_dir2_threshold_2": 0.042821206340913034,
152
+ "scr_dir1_threshold_5": 0.5538463795678938,
153
+ "scr_metric_threshold_5": 0.07304781658585509,
154
+ "scr_dir2_threshold_5": 0.07304781658585509,
155
+ "scr_dir1_threshold_10": 0.64615380383102,
156
+ "scr_metric_threshold_10": 0.17632239355429088,
157
+ "scr_dir2_threshold_10": 0.17632239355429088,
158
+ "scr_dir1_threshold_20": 0.5999996332021725,
159
+ "scr_metric_threshold_20": 0.3274558951919172,
160
+ "scr_dir2_threshold_20": 0.3274558951919172,
161
+ "scr_dir1_threshold_50": 0.5846152153240797,
162
+ "scr_metric_threshold_50": 0.4282116129962143,
163
+ "scr_dir2_threshold_50": 0.4282116129962143,
164
+ "scr_dir1_threshold_100": 0.4615380383101991,
165
+ "scr_metric_threshold_100": 0.5465994950871211,
166
+ "scr_dir2_threshold_100": 0.5465994950871211,
167
+ "scr_dir1_threshold_500": -0.046154170628847364,
168
+ "scr_metric_threshold_500": 0.3148614492335849,
169
+ "scr_dir2_threshold_500": 0.3148614492335849
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.05384637956789382,
174
+ "scr_metric_threshold_2": 0.029673561113860433,
175
+ "scr_dir2_threshold_2": 0.029673561113860433,
176
+ "scr_dir1_threshold_5": 0.08461521532407963,
177
+ "scr_metric_threshold_5": 0.1275963835369501,
178
+ "scr_dir2_threshold_5": 0.1275963835369501,
179
+ "scr_dir1_threshold_10": 0.1692308891454436,
180
+ "scr_metric_threshold_10": 0.1899110033707575,
181
+ "scr_dir2_threshold_10": 0.1899110033707575,
182
+ "scr_dir1_threshold_20": -0.015384417878092908,
183
+ "scr_metric_threshold_20": 0.2640949061554086,
184
+ "scr_dir2_threshold_20": 0.2640949061554086,
185
+ "scr_dir1_threshold_50": -0.17692309808449005,
186
+ "scr_metric_threshold_50": 0.3946587872984452,
187
+ "scr_dir2_threshold_50": 0.3946587872984452,
188
+ "scr_dir1_threshold_100": -0.28461539872299335,
189
+ "scr_metric_threshold_100": 0.545994090473834,
190
+ "scr_dir2_threshold_100": 0.545994090473834,
191
+ "scr_dir1_threshold_500": -0.007692208939046454,
192
+ "scr_metric_threshold_500": 0.8071216758915316,
193
+ "scr_dir2_threshold_500": 0.8071216758915316
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.022900926602102543,
198
+ "scr_metric_threshold_2": 0.05923351749853221,
199
+ "scr_dir2_threshold_2": 0.05923351749853221,
200
+ "scr_dir1_threshold_5": 0.022900926602102543,
201
+ "scr_metric_threshold_5": 0.10801400023800321,
202
+ "scr_dir2_threshold_5": 0.10801400023800321,
203
+ "scr_dir1_threshold_10": -0.061068834274040355,
204
+ "scr_metric_threshold_10": 0.18815337957296907,
205
+ "scr_dir2_threshold_10": 0.18815337957296907,
206
+ "scr_dir1_threshold_20": -0.06870232480895799,
207
+ "scr_metric_threshold_20": 0.3728222757717919,
208
+ "scr_dir2_threshold_20": 0.3728222757717919,
209
+ "scr_dir1_threshold_50": -0.08396930587879325,
210
+ "scr_metric_threshold_50": 0.5679444144113647,
211
+ "scr_dir2_threshold_50": 0.5679444144113647,
212
+ "scr_dir1_threshold_100": -0.36641209564954286,
213
+ "scr_metric_threshold_100": 0.6898955174191979,
214
+ "scr_dir2_threshold_100": 0.6898955174191979,
215
+ "scr_dir1_threshold_500": -0.34351162404478996,
216
+ "scr_metric_threshold_500": 0.7665506210617061,
217
+ "scr_dir2_threshold_500": 0.7665506210617061
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.1568624815634649,
222
+ "scr_metric_threshold_2": 0.018987286856797948,
223
+ "scr_dir2_threshold_2": 0.018987286856797948,
224
+ "scr_dir1_threshold_5": 0.12745051061313312,
225
+ "scr_metric_threshold_5": 0.06962030330466275,
226
+ "scr_dir2_threshold_5": 0.06962030330466275,
227
+ "scr_dir1_threshold_10": 0.1568624815634649,
228
+ "scr_metric_threshold_10": 0.11075939339067452,
229
+ "scr_dir2_threshold_10": 0.11075939339067452,
230
+ "scr_dir1_threshold_20": 0.10784291955242754,
231
+ "scr_metric_threshold_20": 0.21518994265881997,
232
+ "scr_dir2_threshold_20": 0.21518994265881997,
233
+ "scr_dir1_threshold_50": 0.13725489050275935,
234
+ "scr_metric_threshold_50": 0.28481005734118003,
235
+ "scr_dir2_threshold_50": 0.28481005734118003,
236
+ "scr_dir1_threshold_100": 0.05882335754139017,
237
+ "scr_metric_threshold_100": 0.3512658442734269,
238
+ "scr_dir2_threshold_100": 0.3512658442734269,
239
+ "scr_dir1_threshold_500": 0.08823532849172197,
240
+ "scr_metric_threshold_500": 0.5632910819375283,
241
+ "scr_dir2_threshold_500": 0.5632910819375283
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.31428595757000355,
246
+ "scr_metric_threshold_2": 0.07458565309804979,
247
+ "scr_dir2_threshold_2": 0.07458565309804979,
248
+ "scr_dir1_threshold_5": 0.24285744696250441,
249
+ "scr_metric_threshold_5": 0.2679557733594606,
250
+ "scr_dir2_threshold_5": 0.2679557733594606,
251
+ "scr_dir1_threshold_10": 0.2714285106074991,
252
+ "scr_metric_threshold_10": 0.41160228954253114,
253
+ "scr_dir2_threshold_10": 0.41160228954253114,
254
+ "scr_dir1_threshold_20": 0.31428595757000355,
255
+ "scr_metric_threshold_20": 0.4751381979608299,
256
+ "scr_dir2_threshold_20": 0.4751381979608299,
257
+ "scr_dir1_threshold_50": -0.45714297878500176,
258
+ "scr_metric_threshold_50": 0.6077348048104564,
259
+ "scr_dir2_threshold_50": 0.6077348048104564,
260
+ "scr_dir1_threshold_100": -0.6857140424299965,
261
+ "scr_metric_threshold_100": 0.5386741063791286,
262
+ "scr_dir2_threshold_100": 0.5386741063791286,
263
+ "scr_dir1_threshold_500": -2.7142859575700036,
264
+ "scr_metric_threshold_500": 0.6132597594771784,
265
+ "scr_dir2_threshold_500": 0.6132597594771784
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.13636377710655914,
270
+ "scr_metric_threshold_2": 0.04428052802231809,
271
+ "scr_dir2_threshold_2": 0.04428052802231809,
272
+ "scr_dir1_threshold_5": 0.18181811144672044,
273
+ "scr_metric_threshold_5": 0.1143911991001679,
274
+ "scr_dir2_threshold_5": 0.1143911991001679,
275
+ "scr_dir1_threshold_10": 0.21428560370198924,
276
+ "scr_metric_threshold_10": 0.17343190312992535,
277
+ "scr_dir2_threshold_10": 0.17343190312992535,
278
+ "scr_dir1_threshold_20": 0.292207817340457,
279
+ "scr_metric_threshold_20": 0.22140220022495802,
280
+ "scr_dir2_threshold_20": 0.22140220022495802,
281
+ "scr_dir1_threshold_50": 0.3571428018509946,
282
+ "scr_metric_threshold_50": 0.29520308026215486,
283
+ "scr_dir2_threshold_50": 0.29520308026215486,
284
+ "scr_dir1_threshold_100": 0.40259752323419357,
285
+ "scr_metric_threshold_100": 0.29151287130280784,
286
+ "scr_dir2_threshold_100": 0.29151287130280784,
287
+ "scr_dir1_threshold_500": 0.376623452021371,
288
+ "scr_metric_threshold_500": 0.36531375134000466,
289
+ "scr_dir2_threshold_500": 0.36531375134000466
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "layer_9/width_16k/canonical",
294
+ "sae_lens_release_id": "gemma-scope-9b-pt-res-canonical",
295
+ "sae_lens_version": "4.4.1",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_0_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732134073194,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.17329168329391084,
76
+ "scr_metric_threshold_2": 0.11726044418487562,
77
+ "scr_dir2_threshold_2": 0.12249238234682713,
78
+ "scr_dir1_threshold_5": 0.14842141092496283,
79
+ "scr_metric_threshold_5": 0.18384289673701756,
80
+ "scr_dir2_threshold_5": 0.19878262433834434,
81
+ "scr_dir1_threshold_10": 0.1759351377995283,
82
+ "scr_metric_threshold_10": 0.22227076969727985,
83
+ "scr_dir2_threshold_10": 0.23720284034100605,
84
+ "scr_dir1_threshold_20": 0.07933174794043227,
85
+ "scr_metric_threshold_20": 0.2427395707952018,
86
+ "scr_dir2_threshold_20": 0.25886210208624566,
87
+ "scr_dir1_threshold_50": 0.0273018900453712,
88
+ "scr_metric_threshold_50": 0.2627190044020786,
89
+ "scr_dir2_threshold_50": 0.2862858496545046,
90
+ "scr_dir1_threshold_100": -0.09154199900133624,
91
+ "scr_metric_threshold_100": 0.2117411679323499,
92
+ "scr_dir2_threshold_100": 0.2367922605151227,
93
+ "scr_dir1_threshold_500": -0.39134487982042554,
94
+ "scr_metric_threshold_500": 0.1364773204976103,
95
+ "scr_dir2_threshold_500": 0.179459641694154
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.39062549476503666,
102
+ "scr_metric_threshold_2": 0.019704325740837254,
103
+ "scr_dir2_threshold_2": 0.019704325740837254,
104
+ "scr_dir1_threshold_5": 0.39062549476503666,
105
+ "scr_metric_threshold_5": 0.05418707929913162,
106
+ "scr_dir2_threshold_5": 0.05418707929913162,
107
+ "scr_dir1_threshold_10": 0.42187508731147705,
108
+ "scr_metric_threshold_10": 0.06650233794070366,
109
+ "scr_dir2_threshold_10": 0.06650233794070366,
110
+ "scr_dir1_threshold_20": 0.2656252619344312,
111
+ "scr_metric_threshold_20": 0.10098509149899802,
112
+ "scr_dir2_threshold_20": 0.10098509149899802,
113
+ "scr_dir1_threshold_50": 0.18750034924590825,
114
+ "scr_metric_threshold_50": 0.08620681049100425,
115
+ "scr_dir2_threshold_50": 0.08620681049100425,
116
+ "scr_dir1_threshold_100": 0.21874994179234863,
117
+ "scr_metric_threshold_100": 0.10098509149899802,
118
+ "scr_dir2_threshold_100": 0.10098509149899802,
119
+ "scr_dir1_threshold_500": -0.6406250291038257,
120
+ "scr_metric_threshold_500": 0.022167494916722333,
121
+ "scr_dir2_threshold_500": 0.022167494916722333
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.18811873007892008,
126
+ "scr_metric_threshold_2": 0.19373224066079303,
127
+ "scr_dir2_threshold_2": 0.19373224066079303,
128
+ "scr_dir1_threshold_5": 0.0693072111579883,
129
+ "scr_metric_threshold_5": 0.24786327543984746,
130
+ "scr_dir2_threshold_5": 0.24786327543984746,
131
+ "scr_dir1_threshold_10": 0.0693072111579883,
132
+ "scr_metric_threshold_10": 0.29059827608429084,
133
+ "scr_dir2_threshold_10": 0.29059827608429084,
134
+ "scr_dir1_threshold_20": -0.6138610881446375,
135
+ "scr_metric_threshold_20": 0.3504274128375495,
136
+ "scr_dir2_threshold_20": 0.3504274128375495,
137
+ "scr_dir1_threshold_50": -0.59405936503946,
138
+ "scr_metric_threshold_50": 0.38461551524138266,
139
+ "scr_dir2_threshold_50": 0.38461551524138266,
140
+ "scr_dir1_threshold_100": -0.6138610881446375,
141
+ "scr_metric_threshold_100": 0.14529913804214542,
142
+ "scr_dir2_threshold_100": 0.14529913804214542,
143
+ "scr_dir1_threshold_500": -1.2376236279867974,
144
+ "scr_metric_threshold_500": 0.07977206912847994,
145
+ "scr_dir2_threshold_500": 0.07977206912847994
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5396829601737049,
150
+ "scr_metric_threshold_2": 0.03291141800038962,
151
+ "scr_dir2_threshold_2": 0.03291141800038962,
152
+ "scr_dir1_threshold_5": 0.5555557658011382,
153
+ "scr_metric_threshold_5": 0.10886085576265946,
154
+ "scr_dir2_threshold_5": 0.10886085576265946,
155
+ "scr_dir1_threshold_10": 0.49206359718628334,
156
+ "scr_metric_threshold_10": 0.13164567200155847,
157
+ "scr_dir2_threshold_10": 0.13164567200155847,
158
+ "scr_dir1_threshold_20": 0.33333364870170723,
159
+ "scr_metric_threshold_20": 0.15696210095637528,
160
+ "scr_dir2_threshold_20": 0.15696210095637528,
161
+ "scr_dir1_threshold_50": 0.14285714285714285,
162
+ "scr_metric_threshold_50": 0.2101265715819267,
163
+ "scr_dir2_threshold_50": 0.2101265715819267,
164
+ "scr_dir1_threshold_100": -0.6984119625531593,
165
+ "scr_metric_threshold_100": 0.08860765223967824,
166
+ "scr_dir2_threshold_100": 0.08860765223967824,
167
+ "scr_dir1_threshold_500": -1.126983391124588,
168
+ "scr_metric_threshold_500": 0.06582283600077923,
169
+ "scr_dir2_threshold_500": 0.06582283600077923
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.14960628073511784,
174
+ "scr_metric_threshold_2": 0.16568045667738668,
175
+ "scr_dir2_threshold_2": 0.16568045667738668,
176
+ "scr_dir1_threshold_5": 0.0787400983523771,
177
+ "scr_metric_threshold_5": 0.19526640366557985,
178
+ "scr_dir2_threshold_5": 0.19526640366557985,
179
+ "scr_dir1_threshold_10": 0.10236184626128617,
180
+ "scr_metric_threshold_10": 0.0917160300696616,
181
+ "scr_dir2_threshold_10": 0.0917160300696616,
182
+ "scr_dir1_threshold_20": 0.1102362315589293,
183
+ "scr_metric_threshold_20": 0.1301776024437199,
184
+ "scr_dir2_threshold_20": 0.1301776024437199,
185
+ "scr_dir1_threshold_50": 0.19685024588094274,
186
+ "scr_metric_threshold_50": 0.1686391748177782,
187
+ "scr_dir2_threshold_50": 0.1686391748177782,
188
+ "scr_dir1_threshold_100": 0.1574801967047542,
189
+ "scr_metric_threshold_100": -0.0059170835905767525,
190
+ "scr_dir2_threshold_100": -0.0059170835905767525,
191
+ "scr_dir1_threshold_500": -0.2047246311785859,
192
+ "scr_metric_threshold_500": -0.0502957395552118,
193
+ "scr_dir2_threshold_500": -0.0502957395552118
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.016393554752069144,
198
+ "scr_metric_threshold_2": 0.1914063154836078,
199
+ "scr_dir2_threshold_2": 0.1914063154836078,
200
+ "scr_dir1_threshold_5": -0.021857964433295084,
201
+ "scr_metric_threshold_5": 0.4609375436557385,
202
+ "scr_dir2_threshold_5": 0.4609375436557385,
203
+ "scr_dir1_threshold_10": -0.07103830298111119,
204
+ "scr_metric_threshold_10": 0.5703125145519129,
205
+ "scr_dir2_threshold_10": 0.5703125145519129,
206
+ "scr_dir1_threshold_20": 0.016393554752069144,
207
+ "scr_metric_threshold_20": 0.61718760186339,
208
+ "scr_dir2_threshold_20": 0.61718760186339,
209
+ "scr_dir1_threshold_50": -0.22404372830578545,
210
+ "scr_metric_threshold_50": 0.625,
211
+ "scr_dir2_threshold_50": 0.625,
212
+ "scr_dir1_threshold_100": -0.15300542532467426,
213
+ "scr_metric_threshold_100": 0.628906199068305,
214
+ "scr_dir2_threshold_100": 0.628906199068305,
215
+ "scr_dir1_threshold_500": -0.12568305121015325,
216
+ "scr_metric_threshold_500": 0.5703125145519129,
217
+ "scr_dir2_threshold_500": 0.5703125145519129
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.03589728071373967,
222
+ "scr_metric_threshold_2": 0.09677413927777444,
223
+ "scr_dir2_threshold_2": 0.09677413927777444,
224
+ "scr_dir1_threshold_5": 0.05641014354938642,
225
+ "scr_metric_threshold_5": 0.12500006008532116,
226
+ "scr_dir2_threshold_5": 0.12500006008532116,
227
+ "scr_dir1_threshold_10": 0.1692307363130155,
228
+ "scr_metric_threshold_10": 0.2217741993630956,
229
+ "scr_dir2_threshold_10": 0.2217741993630956,
230
+ "scr_dir1_threshold_20": 0.158974152062764,
231
+ "scr_metric_threshold_20": 0.14516132908730398,
232
+ "scr_dir2_threshold_20": 0.14516132908730398,
233
+ "scr_dir1_threshold_50": 0.14871787347736873,
234
+ "scr_metric_threshold_50": 0.1854838670912696,
235
+ "scr_dir2_threshold_50": 0.1854838670912696,
236
+ "scr_dir1_threshold_100": 0.09230742426312609,
237
+ "scr_metric_threshold_100": 0.2419354683650784,
238
+ "scr_dir2_threshold_100": 0.2419354683650784,
239
+ "scr_dir1_threshold_500": -0.00512844495755385,
240
+ "scr_metric_threshold_500": 0.13306447154960044,
241
+ "scr_dir2_threshold_500": 0.13306447154960044
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.03167433750520866,
246
+ "scr_metric_threshold_2": 0.2035398300107262,
247
+ "scr_dir2_threshold_2": 0.2035398300107262,
248
+ "scr_dir1_threshold_5": 0.05429874870710522,
249
+ "scr_metric_threshold_5": 0.2743361664878967,
250
+ "scr_dir2_threshold_5": 0.2743361664878967,
251
+ "scr_dir1_threshold_10": 0.17647073101990013,
252
+ "scr_metric_threshold_10": 0.3584069314396288,
253
+ "scr_dir2_threshold_10": 0.3584069314396288,
254
+ "scr_dir1_threshold_20": 0.31674202653080763,
255
+ "scr_metric_threshold_20": 0.3938052315468908,
256
+ "scr_dir2_threshold_20": 0.3938052315468908,
257
+ "scr_dir1_threshold_50": 0.33484160943317604,
258
+ "scr_metric_threshold_50": 0.41592910317959114,
259
+ "scr_dir2_threshold_50": 0.41592910317959114,
260
+ "scr_dir1_threshold_100": 0.2171947251241651,
261
+ "scr_metric_threshold_100": 0.44690268170778374,
262
+ "scr_dir2_threshold_100": 0.44690268170778374,
263
+ "scr_dir1_threshold_500": 0.2443439646255898,
264
+ "scr_metric_threshold_500": 0.3053097450160893,
265
+ "scr_dir2_threshold_500": 0.3053097450160893
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.03433482762748996,
270
+ "scr_metric_threshold_2": 0.03433482762748996,
271
+ "scr_dir2_threshold_2": 0.07619033292310208,
272
+ "scr_dir1_threshold_5": 0.004291789499965721,
273
+ "scr_metric_threshold_5": 0.004291789499965721,
274
+ "scr_dir2_threshold_5": 0.12380961031057988,
275
+ "scr_dir1_threshold_10": 0.047210196127387125,
276
+ "scr_metric_threshold_10": 0.047210196127387125,
277
+ "scr_dir2_threshold_10": 0.16666676127719673,
278
+ "scr_dir1_threshold_20": 0.047210196127387125,
279
+ "scr_metric_threshold_20": 0.047210196127387125,
280
+ "scr_dir2_threshold_20": 0.17619044645573817,
281
+ "scr_dir1_threshold_50": 0.025750992813676425,
282
+ "scr_metric_threshold_50": 0.025750992813676425,
283
+ "scr_dir2_threshold_50": 0.21428575483308432,
284
+ "scr_dir1_threshold_100": 0.047210196127387125,
285
+ "scr_metric_threshold_100": 0.047210196127387125,
286
+ "scr_dir2_threshold_100": 0.24761893678956953,
287
+ "scr_dir1_threshold_500": -0.03433482762748996,
288
+ "scr_metric_threshold_500": -0.03433482762748996,
289
+ "scr_dir2_threshold_500": 0.3095237419448595
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_0",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_1_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732135077139,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.15328536408530907,
76
+ "scr_metric_threshold_2": 0.16292342139780525,
77
+ "scr_dir2_threshold_2": 0.1651715155049132,
78
+ "scr_dir1_threshold_5": 0.17482115694237454,
79
+ "scr_metric_threshold_5": 0.2454072217954903,
80
+ "scr_dir2_threshold_5": 0.254496742582721,
81
+ "scr_dir1_threshold_10": 0.16148527949345035,
82
+ "scr_metric_threshold_10": 0.3093565772292812,
83
+ "scr_dir2_threshold_10": 0.3183796844227482,
84
+ "scr_dir1_threshold_20": 0.14585798288747992,
85
+ "scr_metric_threshold_20": 0.33435565989097665,
86
+ "scr_dir2_threshold_20": 0.34206310072825036,
87
+ "scr_dir1_threshold_50": 0.07712511272463139,
88
+ "scr_metric_threshold_50": 0.356395015846649,
89
+ "scr_dir2_threshold_50": 0.3632645038768485,
90
+ "scr_dir1_threshold_100": -0.16362451586289034,
91
+ "scr_metric_threshold_100": 0.23968141983697314,
92
+ "scr_dir2_threshold_100": 0.23737965957097715,
93
+ "scr_dir1_threshold_500": -0.5740101909736451,
94
+ "scr_metric_threshold_500": 0.18137654398778683,
95
+ "scr_dir2_threshold_500": 0.19058357804734374
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.39062549476503666,
102
+ "scr_metric_threshold_2": 0.049261034566288144,
103
+ "scr_dir2_threshold_2": 0.049261034566288144,
104
+ "scr_dir1_threshold_5": 0.3749997671693945,
105
+ "scr_metric_threshold_5": 0.05418707929913162,
106
+ "scr_dir2_threshold_5": 0.05418707929913162,
107
+ "scr_dir1_threshold_10": 0.3749997671693945,
108
+ "scr_metric_threshold_10": 0.06157629320786018,
109
+ "scr_dir2_threshold_10": 0.06157629320786018,
110
+ "scr_dir1_threshold_20": 0.3749997671693945,
111
+ "scr_metric_threshold_20": 0.0935960243997328,
112
+ "scr_dir2_threshold_20": 0.0935960243997328,
113
+ "scr_dir1_threshold_50": 0.3749997671693945,
114
+ "scr_metric_threshold_50": 0.13793101423317747,
115
+ "scr_dir2_threshold_50": 0.13793101423317747,
116
+ "scr_dir1_threshold_100": -0.3906245634426147,
117
+ "scr_metric_threshold_100": 0.16256153151632155,
118
+ "scr_dir2_threshold_100": 0.16256153151632155,
119
+ "scr_dir1_threshold_500": -0.8906245634426148,
120
+ "scr_metric_threshold_500": 0.18226600406662213,
121
+ "scr_dir2_threshold_500": 0.18226600406662213
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.019802313250111094,
126
+ "scr_metric_threshold_2": 0.23931637719923726,
127
+ "scr_dir2_threshold_2": 0.23931637719923726,
128
+ "scr_dir1_threshold_5": 0.0594057594604659,
129
+ "scr_metric_threshold_5": 0.3076924121931061,
130
+ "scr_dir2_threshold_5": 0.3076924121931061,
131
+ "scr_dir1_threshold_10": 0.009900861552588701,
132
+ "scr_metric_threshold_10": 0.33333344654253166,
133
+ "scr_dir2_threshold_10": 0.33333344654253166,
134
+ "scr_dir1_threshold_20": 0.0594057594604659,
135
+ "scr_metric_threshold_20": 0.39601137956219623,
136
+ "scr_dir2_threshold_20": 0.39601137956219623,
137
+ "scr_dir1_threshold_50": -0.8118812699210799,
138
+ "scr_metric_threshold_50": 0.41310551567101156,
139
+ "scr_dir2_threshold_50": 0.41310551567101156,
140
+ "scr_dir1_threshold_100": -0.9603959636447115,
141
+ "scr_metric_threshold_100": 0.0056981019742042656,
142
+ "scr_dir2_threshold_100": 0.0056981019742042656,
143
+ "scr_dir1_threshold_500": -1.5049504307762944,
144
+ "scr_metric_threshold_500": -0.07407396715427567,
145
+ "scr_dir2_threshold_500": -0.07407396715427567
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5396829601737049,
150
+ "scr_metric_threshold_2": 0.08860765223967824,
151
+ "scr_dir2_threshold_2": 0.08860765223967824,
152
+ "scr_dir1_threshold_5": 0.49206359718628334,
153
+ "scr_metric_threshold_5": 0.1443038864789669,
154
+ "scr_dir2_threshold_5": 0.1443038864789669,
155
+ "scr_dir1_threshold_10": 0.2539686744594192,
156
+ "scr_metric_threshold_10": 0.24050637686639853,
157
+ "scr_dir2_threshold_10": 0.24050637686639853,
158
+ "scr_dir1_threshold_20": 0.11111153160227633,
159
+ "scr_metric_threshold_20": 0.25569620405972476,
160
+ "scr_dir2_threshold_20": 0.25569620405972476,
161
+ "scr_dir1_threshold_50": 0.1587299484845761,
162
+ "scr_metric_threshold_50": 0.2632911931052975,
163
+ "scr_dir2_threshold_50": 0.2632911931052975,
164
+ "scr_dir1_threshold_100": -1.1428561967520212,
165
+ "scr_metric_threshold_100": 0.025316579852636207,
166
+ "scr_dir2_threshold_100": 0.025316579852636207,
167
+ "scr_dir1_threshold_500": -1.8888884683977236,
168
+ "scr_metric_threshold_500": 0.04303801976188022,
169
+ "scr_dir2_threshold_500": 0.04303801976188022
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.04724396514582491,
174
+ "scr_metric_threshold_2": 0.24556214322079165,
175
+ "scr_dir2_threshold_2": 0.24556214322079165,
176
+ "scr_dir1_threshold_5": 0.08661401432201346,
177
+ "scr_metric_threshold_5": 0.28698225739013833,
178
+ "scr_dir2_threshold_5": 0.28698225739013833,
179
+ "scr_dir1_threshold_10": 0.0,
180
+ "scr_metric_threshold_10": 0.3964497145766333,
181
+ "scr_dir2_threshold_10": 0.3964497145766333,
182
+ "scr_dir1_threshold_20": -0.26771689759169026,
183
+ "scr_metric_threshold_20": 0.233727799694535,
184
+ "scr_dir2_threshold_20": 0.233727799694535,
185
+ "scr_dir1_threshold_50": 0.0,
186
+ "scr_metric_threshold_50": 0.30769240264736325,
187
+ "scr_dir2_threshold_50": 0.30769240264736325,
188
+ "scr_dir1_threshold_100": 0.5511811577946465,
189
+ "scr_metric_threshold_100": 0.0591717176312832,
190
+ "scr_dir2_threshold_100": 0.0591717176312832,
191
+ "scr_dir1_threshold_500": -0.28346472953096297,
192
+ "scr_metric_threshold_500": 0.029585770643090033,
193
+ "scr_dir2_threshold_500": 0.029585770643090033
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.03825119347697291,
198
+ "scr_metric_threshold_2": 0.21093754365573852,
199
+ "scr_dir2_threshold_2": 0.21093754365573852,
200
+ "scr_dir1_threshold_5": -0.016393554752069144,
201
+ "scr_metric_threshold_5": 0.5195312281721307,
202
+ "scr_dir2_threshold_5": 0.5195312281721307,
203
+ "scr_dir1_threshold_10": 0.021857964433295084,
204
+ "scr_metric_threshold_10": 0.628906199068305,
205
+ "scr_dir2_threshold_10": 0.628906199068305,
206
+ "scr_dir1_threshold_20": 0.05464474822904205,
207
+ "scr_metric_threshold_20": 0.6875001164153027,
208
+ "scr_dir2_threshold_20": 0.6875001164153027,
209
+ "scr_dir1_threshold_50": -0.04918033854781611,
210
+ "scr_metric_threshold_50": 0.7148437427240436,
211
+ "scr_dir2_threshold_50": 0.7148437427240436,
212
+ "scr_dir1_threshold_100": -0.00546440968122594,
213
+ "scr_metric_threshold_100": 0.703124912688523,
214
+ "scr_dir2_threshold_100": 0.703124912688523,
215
+ "scr_dir1_threshold_500": -0.46448068565524875,
216
+ "scr_metric_threshold_500": 0.6093749708961743,
217
+ "scr_dir2_threshold_500": 0.6093749708961743
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.0410254200064373,
222
+ "scr_metric_threshold_2": 0.10483879108333834,
223
+ "scr_dir2_threshold_2": 0.10483879108333834,
224
+ "scr_dir1_threshold_5": 0.15384601277006638,
225
+ "scr_metric_threshold_5": 0.1491935348194436,
226
+ "scr_dir2_threshold_5": 0.1491935348194436,
227
+ "scr_dir1_threshold_10": 0.2358974641126534,
228
+ "scr_metric_threshold_10": 0.22983885116865949,
229
+ "scr_dir2_threshold_10": 0.22983885116865949,
230
+ "scr_dir1_threshold_20": 0.28717946836934216,
231
+ "scr_metric_threshold_20": 0.3145161329087304,
232
+ "scr_dir2_threshold_20": 0.3145161329087304,
233
+ "scr_dir1_threshold_50": 0.27179474482639304,
234
+ "scr_metric_threshold_50": 0.29032265817460795,
235
+ "scr_dir2_threshold_50": 0.29032265817460795,
236
+ "scr_dir1_threshold_100": 0.18974359914866223,
237
+ "scr_metric_threshold_100": 0.2782258006369044,
238
+ "scr_dir2_threshold_100": 0.2782258006369044,
239
+ "scr_dir1_threshold_500": 0.13846128922711723,
240
+ "scr_metric_threshold_500": 0.17741945562699032,
241
+ "scr_dir2_threshold_500": 0.17741945562699032
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.07239833160947363,
246
+ "scr_metric_threshold_2": 0.28761059496245833,
247
+ "scr_dir2_threshold_2": 0.28761059496245833,
248
+ "scr_dir1_threshold_5": 0.1493212218142196,
249
+ "scr_metric_threshold_5": 0.40265493844238304,
250
+ "scr_dir2_threshold_5": 0.40265493844238304,
251
+ "scr_dir1_threshold_10": 0.25791844952417425,
252
+ "scr_metric_threshold_10": 0.44690268170778374,
253
+ "scr_dir2_threshold_10": 0.44690268170778374,
254
+ "scr_dir1_threshold_20": 0.375565603537441,
255
+ "scr_metric_threshold_20": 0.5221237397640236,
256
+ "scr_dir2_threshold_20": 0.5221237397640236,
257
+ "scr_dir1_threshold_50": 0.47511317464833935,
258
+ "scr_metric_threshold_50": 0.5265487250804465,
259
+ "scr_dir2_threshold_50": 0.5265487250804465,
260
+ "scr_dir1_threshold_100": 0.2262443817232214,
261
+ "scr_metric_threshold_100": 0.4601768464449919,
262
+ "scr_dir2_threshold_100": 0.4601768464449919,
263
+ "scr_dir1_threshold_500": 0.19457004421801274,
264
+ "scr_metric_threshold_500": 0.3761060814932598,
265
+ "scr_dir2_threshold_500": 0.3761060814932598
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.07725323425491137,
270
+ "scr_metric_threshold_2": 0.07725323425491137,
271
+ "scr_dir2_threshold_2": 0.09523798711177515,
272
+ "scr_dir1_threshold_5": 0.09871243756862208,
273
+ "scr_metric_threshold_5": 0.09871243756862208,
274
+ "scr_dir2_threshold_5": 0.17142860386646747,
275
+ "scr_dir1_threshold_10": 0.13733905469607777,
276
+ "scr_metric_threshold_10": 0.13733905469607777,
277
+ "scr_dir2_threshold_10": 0.20952391224381361,
278
+ "scr_dir1_threshold_20": 0.17167388232356773,
279
+ "scr_metric_threshold_20": 0.17167388232356773,
280
+ "scr_dir2_threshold_20": 0.2333334090217574,
281
+ "scr_dir1_threshold_50": 0.19742487513724416,
282
+ "scr_metric_threshold_50": 0.19742487513724416,
283
+ "scr_dir2_threshold_50": 0.25238077937884024,
284
+ "scr_dir1_threshold_100": 0.22317586795092056,
285
+ "scr_metric_threshold_100": 0.22317586795092056,
286
+ "scr_dir2_threshold_100": 0.20476178582295265,
287
+ "scr_dir1_threshold_500": 0.10729601656855352,
288
+ "scr_metric_threshold_500": 0.10729601656855352,
289
+ "scr_dir2_threshold_500": 0.18095228904500887
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_1",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_2_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732136101039,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1854654315889088,
76
+ "scr_metric_threshold_2": 0.17093176293289183,
77
+ "scr_dir2_threshold_2": 0.1781257329322068,
78
+ "scr_dir1_threshold_5": 0.19851885725434026,
79
+ "scr_metric_threshold_5": 0.25324158161302546,
80
+ "scr_dir2_threshold_5": 0.2604866193141677,
81
+ "scr_dir1_threshold_10": 0.2237944025793623,
82
+ "scr_metric_threshold_10": 0.2980554190262721,
83
+ "scr_dir2_threshold_10": 0.3076149999072348,
84
+ "scr_dir1_threshold_20": 0.07237468031861494,
85
+ "scr_metric_threshold_20": 0.3526616959156621,
86
+ "scr_dir2_threshold_20": 0.3513077127947307,
87
+ "scr_dir1_threshold_50": -0.14615840249674983,
88
+ "scr_metric_threshold_50": 0.3895407941312633,
89
+ "scr_dir2_threshold_50": 0.3912140977861374,
90
+ "scr_dir1_threshold_100": -0.34628283464955045,
91
+ "scr_metric_threshold_100": 0.32985918113329155,
92
+ "scr_dir2_threshold_100": 0.3327382948296333,
93
+ "scr_dir1_threshold_500": -0.6573857042479849,
94
+ "scr_metric_threshold_500": 0.2712778574447724,
95
+ "scr_dir2_threshold_500": 0.28767882952690904
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4531256111803394,
102
+ "scr_metric_threshold_2": 0.039408798291137845,
103
+ "scr_dir2_threshold_2": 0.039408798291137845,
104
+ "scr_dir1_threshold_5": 0.4687504074535596,
105
+ "scr_metric_threshold_5": 0.06896550711658873,
106
+ "scr_dir2_threshold_5": 0.06896550711658873,
107
+ "scr_dir1_threshold_10": 0.4687504074535596,
108
+ "scr_metric_threshold_10": 0.088669832857426,
109
+ "scr_dir2_threshold_10": 0.088669832857426,
110
+ "scr_dir1_threshold_20": 0.42187508731147705,
111
+ "scr_metric_threshold_20": 0.12068956404929862,
112
+ "scr_dir2_threshold_20": 0.12068956404929862,
113
+ "scr_dir1_threshold_50": 0.07812491268852294,
114
+ "scr_metric_threshold_50": 0.19458126270819417,
115
+ "scr_dir2_threshold_50": 0.19458126270819417,
116
+ "scr_dir1_threshold_100": -1.0312495925464404,
117
+ "scr_metric_threshold_100": 0.1871920487994656,
118
+ "scr_dir2_threshold_100": 0.1871920487994656,
119
+ "scr_dir1_threshold_500": -0.921875087311477,
120
+ "scr_metric_threshold_500": 0.32019701829979963,
121
+ "scr_dir2_threshold_500": 0.32019701829979963
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.14851469372363157,
126
+ "scr_metric_threshold_2": 0.25925930957445853,
127
+ "scr_dir2_threshold_2": 0.25925930957445853,
128
+ "scr_dir1_threshold_5": 0.049504897907877196,
129
+ "scr_metric_threshold_5": 0.31623931043371634,
130
+ "scr_dir2_threshold_5": 0.31623931043371634,
131
+ "scr_dir1_threshold_10": 0.08910893426316568,
132
+ "scr_metric_threshold_10": 0.3931624134819929,
133
+ "scr_dir2_threshold_10": 0.3931624134819929,
134
+ "scr_dir1_threshold_20": -0.9405936503946004,
135
+ "scr_metric_threshold_20": 0.4387465500204371,
136
+ "scr_dir2_threshold_20": 0.4387465500204371,
137
+ "scr_dir1_threshold_50": -1.21782190488162,
138
+ "scr_metric_threshold_50": 0.45299155023525156,
139
+ "scr_dir2_threshold_50": 0.45299155023525156,
140
+ "scr_dir1_threshold_100": -1.029702584657766,
141
+ "scr_metric_threshold_100": 0.48148155066488046,
142
+ "scr_dir2_threshold_100": 0.48148155066488046,
143
+ "scr_dir1_threshold_500": -1.6633659860525147,
144
+ "scr_metric_threshold_500": -0.22222224109042193,
145
+ "scr_dir2_threshold_500": -0.22222224109042193
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5396829601737049,
150
+ "scr_metric_threshold_2": 0.037974794330044616,
151
+ "scr_dir2_threshold_2": 0.037974794330044616,
152
+ "scr_dir1_threshold_5": 0.6031751287885597,
153
+ "scr_metric_threshold_5": 0.10126586671708666,
154
+ "scr_dir2_threshold_5": 0.10126586671708666,
155
+ "scr_dir1_threshold_10": 0.42857142857142855,
156
+ "scr_metric_threshold_10": 0.13670889743339407,
157
+ "scr_dir2_threshold_10": 0.13670889743339407,
158
+ "scr_dir1_threshold_20": 0.33333364870170723,
159
+ "scr_metric_threshold_20": 0.20759495886600893,
160
+ "scr_dir2_threshold_20": 0.20759495886600893,
161
+ "scr_dir1_threshold_50": -0.5873013770560047,
162
+ "scr_metric_threshold_50": 0.25822796767346196,
163
+ "scr_dir2_threshold_50": 0.25822796767346196,
164
+ "scr_dir1_threshold_100": -1.7619041311680141,
165
+ "scr_metric_threshold_100": 0.010126601761490606,
166
+ "scr_dir2_threshold_100": 0.010126601761490606,
167
+ "scr_dir1_threshold_500": -2.6507925995657375,
168
+ "scr_metric_threshold_500": 0.058227846955206435,
169
+ "scr_dir2_threshold_500": 0.058227846955206435
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.12598406349820201,
174
+ "scr_metric_threshold_2": 0.19822494546086822,
175
+ "scr_dir2_threshold_2": 0.19822494546086822,
176
+ "scr_dir1_threshold_5": 0.05511788111546126,
177
+ "scr_metric_threshold_5": 0.2751479138638817,
178
+ "scr_dir2_threshold_5": 0.2751479138638817,
179
+ "scr_dir1_threshold_10": 0.04724396514582491,
180
+ "scr_metric_threshold_10": 0.33136091335477336,
181
+ "scr_dir2_threshold_10": 0.33136091335477336,
182
+ "scr_dir1_threshold_20": -0.13385844879584513,
183
+ "scr_metric_threshold_20": 0.40828405810289,
184
+ "scr_dir2_threshold_20": 0.40828405810289,
185
+ "scr_dir1_threshold_50": -0.31496086273751517,
186
+ "scr_metric_threshold_50": 0.31656802803322837,
187
+ "scr_dir2_threshold_50": 0.31656802803322837,
188
+ "scr_dir1_threshold_100": 0.4960628073511784,
189
+ "scr_metric_threshold_100": 0.020710145257224904,
190
+ "scr_dir2_threshold_100": 0.020710145257224904,
191
+ "scr_dir1_threshold_500": -0.12598453282620878,
192
+ "scr_metric_threshold_500": 0.06213025942657158,
193
+ "scr_dir2_threshold_500": 0.06213025942657158
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.04371592886659017,
198
+ "scr_metric_threshold_2": 0.3281251455191284,
199
+ "scr_dir2_threshold_2": 0.3281251455191284,
200
+ "scr_dir1_threshold_5": 0.021857964433295084,
201
+ "scr_metric_threshold_5": 0.5507812863797821,
202
+ "scr_dir2_threshold_5": 0.5507812863797821,
203
+ "scr_dir1_threshold_10": 0.016393554752069144,
204
+ "scr_metric_threshold_10": 0.61718760186339,
205
+ "scr_dir2_threshold_10": 0.61718760186339,
206
+ "scr_dir1_threshold_20": -0.1202186415289273,
207
+ "scr_metric_threshold_20": 0.6914063154836078,
208
+ "scr_dir2_threshold_20": 0.6914063154836078,
209
+ "scr_dir1_threshold_50": -0.08196712234356307,
210
+ "scr_metric_threshold_50": 0.7109375436557386,
211
+ "scr_dir2_threshold_50": 0.7109375436557386,
212
+ "scr_dir1_threshold_100": -0.20218576387249038,
213
+ "scr_metric_threshold_100": 0.7187499417923486,
214
+ "scr_dir2_threshold_100": 0.7187499417923486,
215
+ "scr_dir1_threshold_500": -0.28415288621605345,
216
+ "scr_metric_threshold_500": 0.7656250291038257,
217
+ "scr_dir2_threshold_500": 0.7656250291038257
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.06666642213478169,
222
+ "scr_metric_threshold_2": 0.12096785435318151,
223
+ "scr_dir2_threshold_2": 0.12096785435318151,
224
+ "scr_dir1_threshold_5": 0.16410229135546164,
225
+ "scr_metric_threshold_5": 0.1854838670912696,
226
+ "scr_dir2_threshold_5": 0.1854838670912696,
227
+ "scr_dir1_threshold_10": 0.2769228841190907,
228
+ "scr_metric_threshold_10": 0.19758072462897314,
229
+ "scr_dir2_threshold_10": 0.19758072462897314,
230
+ "scr_dir1_threshold_20": 0.27179474482639304,
231
+ "scr_metric_threshold_20": 0.2056451360932524,
232
+ "scr_dir2_threshold_20": 0.2056451360932524,
233
+ "scr_dir1_threshold_50": 0.4051282004256689,
234
+ "scr_metric_threshold_50": 0.2983870696388872,
235
+ "scr_dir2_threshold_50": 0.2983870696388872,
236
+ "scr_dir1_threshold_100": 0.2769228841190907,
237
+ "scr_metric_threshold_100": 0.43951619299405154,
238
+ "scr_dir2_threshold_100": 0.43951619299405154,
239
+ "scr_dir1_threshold_500": 0.09230742426312609,
240
+ "scr_metric_threshold_500": 0.4919355885357207,
241
+ "scr_dir2_threshold_500": 0.4919355885357207
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.058823577006633376,
246
+ "scr_metric_threshold_2": 0.3362830598069284,
247
+ "scr_dir2_threshold_2": 0.3362830598069284,
248
+ "scr_dir1_threshold_5": 0.1312216389118512,
249
+ "scr_metric_threshold_5": 0.4336282532332221,
250
+ "scr_dir2_threshold_5": 0.4336282532332221,
251
+ "scr_dir1_threshold_10": 0.3303167811336479,
252
+ "scr_metric_threshold_10": 0.48672570339411514,
253
+ "scr_dir2_threshold_10": 0.48672570339411514,
254
+ "scr_dir1_threshold_20": 0.4977375858502359,
255
+ "scr_metric_threshold_20": 0.49999986813132324,
256
+ "scr_dir2_threshold_20": 0.49999986813132324,
257
+ "scr_dir1_threshold_50": 0.2533936212246461,
258
+ "scr_metric_threshold_50": 0.5884956183994782,
259
+ "scr_dir2_threshold_50": 0.5884956183994782,
260
+ "scr_dir1_threshold_100": 0.2714932041270145,
261
+ "scr_metric_threshold_100": 0.5707964683458472,
262
+ "scr_dir2_threshold_100": 0.5707964683458472,
263
+ "scr_dir1_threshold_500": 0.14027156521516332,
264
+ "scr_metric_threshold_500": 0.5398228898176546,
265
+ "scr_dir2_threshold_500": 0.5398228898176546
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.047210196127387125,
270
+ "scr_metric_threshold_2": 0.047210196127387125,
271
+ "scr_dir2_threshold_2": 0.10476195612190681,
272
+ "scr_dir1_threshold_5": 0.09442064806865635,
273
+ "scr_metric_threshold_5": 0.09442064806865635,
274
+ "scr_dir2_threshold_5": 0.1523809496777944,
275
+ "scr_dir1_threshold_10": 0.13304726519611204,
276
+ "scr_metric_threshold_10": 0.13304726519611204,
277
+ "scr_dir2_threshold_10": 0.20952391224381361,
278
+ "scr_dir1_threshold_20": 0.2489271165784791,
279
+ "scr_metric_threshold_20": 0.2489271165784791,
280
+ "scr_dir2_threshold_20": 0.2380952516110281,
281
+ "scr_dir1_threshold_50": 0.29613731270586624,
282
+ "scr_metric_threshold_50": 0.29613731270586624,
283
+ "scr_dir2_threshold_50": 0.3095237419448595,
284
+ "scr_dir1_threshold_100": 0.21030049945102341,
285
+ "scr_metric_threshold_100": 0.21030049945102341,
286
+ "scr_dir2_threshold_100": 0.2333334090217574,
287
+ "scr_dir1_threshold_500": 0.15450646850982275,
288
+ "scr_metric_threshold_500": 0.15450646850982275,
289
+ "scr_dir2_threshold_500": 0.2857142451669157
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_2",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_3_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732137131242,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.179756064224718,
76
+ "scr_metric_threshold_2": 0.16381999896711635,
77
+ "scr_dir2_threshold_2": 0.17417408643197774,
78
+ "scr_dir1_threshold_5": 0.22157635440933438,
79
+ "scr_metric_threshold_5": 0.24003586975792113,
80
+ "scr_dir2_threshold_5": 0.2517490343232025,
81
+ "scr_dir1_threshold_10": 0.24993600347761916,
82
+ "scr_metric_threshold_10": 0.309608090767111,
83
+ "scr_dir2_threshold_10": 0.321431111647118,
84
+ "scr_dir1_threshold_20": 0.12428561353352907,
85
+ "scr_metric_threshold_20": 0.3575686941642805,
86
+ "scr_dir2_threshold_20": 0.36354916518779207,
87
+ "scr_dir1_threshold_50": 0.18973613137388726,
88
+ "scr_metric_threshold_50": 0.41424327732612376,
89
+ "scr_dir2_threshold_50": 0.41360203780117744,
90
+ "scr_dir1_threshold_100": 0.03270314453087406,
91
+ "scr_metric_threshold_100": 0.4821064893344713,
92
+ "scr_dir2_threshold_100": 0.4795620100872735,
93
+ "scr_dir1_threshold_500": -0.4675678519702532,
94
+ "scr_metric_threshold_500": 0.29513905041410365,
95
+ "scr_dir2_threshold_500": 0.31828690327462494
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4687504074535596,
102
+ "scr_metric_threshold_2": 0.04187182065755959,
103
+ "scr_dir2_threshold_2": 0.04187182065755959,
104
+ "scr_dir1_threshold_5": 0.4687504074535596,
105
+ "scr_metric_threshold_5": 0.0566502484750167,
106
+ "scr_dir2_threshold_5": 0.0566502484750167,
107
+ "scr_dir1_threshold_10": 0.5468753201420825,
108
+ "scr_metric_threshold_10": 0.08620681049100425,
109
+ "scr_dir2_threshold_10": 0.08620681049100425,
110
+ "scr_dir1_threshold_20": 0.43749988358469727,
111
+ "scr_metric_threshold_20": 0.0935960243997328,
112
+ "scr_dir2_threshold_20": 0.0935960243997328,
113
+ "scr_dir1_threshold_50": 0.4687504074535596,
114
+ "scr_metric_threshold_50": 0.16748757624916502,
115
+ "scr_dir2_threshold_50": 0.16748757624916502,
116
+ "scr_dir1_threshold_100": 0.43749988358469727,
117
+ "scr_metric_threshold_100": 0.22906401626648853,
118
+ "scr_dir2_threshold_100": 0.22906401626648853,
119
+ "scr_dir1_threshold_500": -1.249999534338789,
120
+ "scr_metric_threshold_500": 0.27586202846635494,
121
+ "scr_dir2_threshold_500": 0.27586202846635494
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.15841614542115398,
126
+ "scr_metric_threshold_2": 0.17663827436577517,
127
+ "scr_dir2_threshold_2": 0.17663827436577517,
128
+ "scr_dir1_threshold_5": 0.2574259412369084,
129
+ "scr_metric_threshold_5": 0.22792034306462622,
130
+ "scr_dir2_threshold_5": 0.22792034306462622,
131
+ "scr_dir1_threshold_10": 0.14851469372363157,
132
+ "scr_metric_threshold_10": 0.324786378488124,
133
+ "scr_dir2_threshold_10": 0.324786378488124,
134
+ "scr_dir1_threshold_20": 0.029703174802699794,
135
+ "scr_metric_threshold_20": 0.3931624134819929,
136
+ "scr_dir2_threshold_20": 0.3931624134819929,
137
+ "scr_dir1_threshold_50": -0.0693066210130546,
138
+ "scr_metric_threshold_50": 0.4558405163154549,
139
+ "scr_dir2_threshold_50": 0.4558405163154549,
140
+ "scr_dir1_threshold_100": -0.9603959636447115,
141
+ "scr_metric_threshold_100": 0.5014244830401017,
142
+ "scr_dir2_threshold_100": 0.5014244830401017,
143
+ "scr_dir1_threshold_500": -1.1980195916315088,
144
+ "scr_metric_threshold_500": -0.03703706848403657,
145
+ "scr_dir2_threshold_500": -0.03703706848403657
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5555557658011382,
150
+ "scr_metric_threshold_2": 0.04050640704596242,
151
+ "scr_dir2_threshold_2": 0.04050640704596242,
152
+ "scr_dir1_threshold_5": 0.5714285714285714,
153
+ "scr_metric_threshold_5": 0.12911405928564068,
154
+ "scr_dir2_threshold_5": 0.12911405928564068,
155
+ "scr_dir1_threshold_10": 0.5714285714285714,
156
+ "scr_metric_threshold_10": 0.1949367443886005,
157
+ "scr_dir2_threshold_10": 0.1949367443886005,
158
+ "scr_dir1_threshold_20": 0.1269843372297096,
159
+ "scr_metric_threshold_20": 0.23544315143456293,
160
+ "scr_dir2_threshold_20": 0.23544315143456293,
161
+ "scr_dir1_threshold_50": 0.09523872597484306,
162
+ "scr_metric_threshold_50": 0.2860760093441966,
163
+ "scr_dir2_threshold_50": 0.2860760093441966,
164
+ "scr_dir1_threshold_100": -0.6190469883108712,
165
+ "scr_metric_threshold_100": 0.28860762206011437,
166
+ "scr_dir2_threshold_100": 0.28860762206011437,
167
+ "scr_dir1_threshold_500": -1.8888884683977236,
168
+ "scr_metric_threshold_500": -0.11392393029667568,
169
+ "scr_dir2_threshold_500": -0.11392393029667568
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.14173189543747472,
174
+ "scr_metric_threshold_2": 0.21005928898712486,
175
+ "scr_dir2_threshold_2": 0.21005928898712486,
176
+ "scr_dir1_threshold_5": 0.1102362315589293,
177
+ "scr_metric_threshold_5": 0.26035502854233666,
178
+ "scr_dir2_threshold_5": 0.26035502854233666,
179
+ "scr_dir1_threshold_10": 0.1102362315589293,
180
+ "scr_metric_threshold_10": 0.30769240264736325,
181
+ "scr_dir2_threshold_10": 0.30769240264736325,
182
+ "scr_dir1_threshold_20": -0.4881893607095489,
183
+ "scr_metric_threshold_20": 0.3609468603429665,
184
+ "scr_dir2_threshold_20": 0.3609468603429665,
185
+ "scr_dir1_threshold_50": -0.02362221723691584,
186
+ "scr_metric_threshold_50": 0.4437870886816599,
187
+ "scr_dir2_threshold_50": 0.4437870886816599,
188
+ "scr_dir1_threshold_100": 0.6692913053232121,
189
+ "scr_metric_threshold_100": 0.5710059729849882,
190
+ "scr_dir2_threshold_100": 0.5710059729849882,
191
+ "scr_dir1_threshold_500": 0.22834637908749494,
192
+ "scr_metric_threshold_500": 0.14201194596997654,
193
+ "scr_dir2_threshold_500": 0.14201194596997654
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": -0.016393554752069144,
198
+ "scr_metric_threshold_2": 0.3554687718278693,
199
+ "scr_dir2_threshold_2": 0.3554687718278693,
200
+ "scr_dir1_threshold_5": 0.016393554752069144,
201
+ "scr_metric_threshold_5": 0.542968888243172,
202
+ "scr_dir2_threshold_5": 0.542968888243172,
203
+ "scr_dir1_threshold_10": -0.01092881936245188,
204
+ "scr_metric_threshold_10": 0.6445312281721307,
205
+ "scr_dir2_threshold_10": 0.6445312281721307,
206
+ "scr_dir1_threshold_20": 0.016393554752069144,
207
+ "scr_metric_threshold_20": 0.671875087311477,
208
+ "scr_dir2_threshold_20": 0.671875087311477,
209
+ "scr_dir1_threshold_50": -0.11475423184770137,
210
+ "scr_metric_threshold_50": 0.7226561408606537,
211
+ "scr_dir2_threshold_50": 0.7226561408606537,
212
+ "scr_dir1_threshold_100": -0.09836067709563222,
213
+ "scr_metric_threshold_100": 0.753906199068305,
214
+ "scr_dir2_threshold_100": 0.753906199068305,
215
+ "scr_dir1_threshold_500": -0.09836067709563222,
216
+ "scr_metric_threshold_500": 0.828124912688523,
217
+ "scr_dir2_threshold_500": 0.828124912688523
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.03589728071373967,
222
+ "scr_metric_threshold_2": 0.0927419335456348,
223
+ "scr_dir2_threshold_2": 0.0927419335456348,
224
+ "scr_dir1_threshold_5": 0.14871787347736873,
225
+ "scr_metric_threshold_5": 0.16129039235714715,
226
+ "scr_dir2_threshold_5": 0.16129039235714715,
227
+ "scr_dir1_threshold_10": 0.2102561563194528,
228
+ "scr_metric_threshold_10": 0.2500001201706423,
229
+ "scr_dir2_threshold_10": 0.2500001201706423,
230
+ "scr_dir1_threshold_20": 0.2615384662409978,
231
+ "scr_metric_threshold_20": 0.3145161329087304,
232
+ "scr_dir2_threshold_20": 0.3145161329087304,
233
+ "scr_dir1_threshold_50": 0.338461472626031,
234
+ "scr_metric_threshold_50": 0.29032265817460795,
235
+ "scr_dir2_threshold_50": 0.29032265817460795,
236
+ "scr_dir1_threshold_100": 0.38461533759002214,
237
+ "scr_metric_threshold_100": 0.5161290632698432,
238
+ "scr_dir2_threshold_100": 0.5161290632698432,
239
+ "scr_dir1_threshold_500": 0.3282048883757795,
240
+ "scr_metric_threshold_500": 0.572580664543652,
241
+ "scr_dir2_threshold_500": 0.572580664543652
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.07692315990900178,
246
+ "scr_metric_threshold_2": 0.3761060814932598,
247
+ "scr_dir2_threshold_2": 0.3761060814932598,
248
+ "scr_dir1_threshold_5": 0.12669681061232305,
249
+ "scr_metric_threshold_5": 0.46902655334048415,
250
+ "scr_dir2_threshold_5": 0.46902655334048415,
251
+ "scr_dir1_threshold_10": 0.2986424436284392,
252
+ "scr_metric_threshold_10": 0.5442476113967241,
253
+ "scr_dir2_threshold_10": 0.5442476113967241,
254
+ "scr_dir1_threshold_20": 0.4343891805440744,
255
+ "scr_metric_threshold_20": 0.615044211611248,
256
+ "scr_dir2_threshold_20": 0.615044211611248,
257
+ "scr_dir1_threshold_50": 0.5656108194559256,
258
+ "scr_metric_threshold_50": 0.6902655334048413,
259
+ "scr_dir2_threshold_50": 0.6902655334048413,
260
+ "scr_dir1_threshold_100": 0.1990951422217967,
261
+ "scr_metric_threshold_100": 0.7477874414074502,
262
+ "scr_dir2_threshold_100": 0.7477874414074502,
263
+ "scr_dir1_threshold_500": 0.09954757111089835,
264
+ "scr_metric_threshold_500": 0.6548672332975793,
265
+ "scr_dir2_threshold_500": 0.6548672332975793
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.01716741381374498,
270
+ "scr_metric_threshold_2": 0.01716741381374498,
271
+ "scr_dir2_threshold_2": 0.10000011353263609,
272
+ "scr_dir1_threshold_5": 0.07296144475494565,
273
+ "scr_metric_threshold_5": 0.07296144475494565,
274
+ "scr_dir2_threshold_5": 0.16666676127719673,
275
+ "scr_dir1_threshold_10": 0.1244634303822985,
276
+ "scr_metric_threshold_10": 0.1244634303822985,
277
+ "scr_dir2_threshold_10": 0.21904759742235502,
278
+ "scr_dir1_threshold_20": 0.17596567182353345,
279
+ "scr_metric_threshold_20": 0.17596567182353345,
280
+ "scr_dir2_threshold_20": 0.22380944001162575,
281
+ "scr_dir1_threshold_50": 0.25751069557841055,
282
+ "scr_metric_threshold_50": 0.25751069557841055,
283
+ "scr_dir2_threshold_50": 0.25238077937884024,
284
+ "scr_dir1_threshold_100": 0.2489271165784791,
285
+ "scr_metric_threshold_100": 0.2489271165784791,
286
+ "scr_dir2_threshold_100": 0.22857128260089646,
287
+ "scr_dir1_threshold_500": 0.03862661712745569,
288
+ "scr_metric_threshold_500": 0.03862661712745569,
289
+ "scr_dir2_threshold_500": 0.22380944001162575
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_3",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_4_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732138142410,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.24308931258340039,
76
+ "scr_metric_threshold_2": 0.1678481937993198,
77
+ "scr_dir2_threshold_2": 0.17069921066663596,
78
+ "scr_dir1_threshold_5": 0.28168484095648305,
79
+ "scr_metric_threshold_5": 0.2404423317094678,
80
+ "scr_dir2_threshold_5": 0.24936323604359617,
81
+ "scr_dir1_threshold_10": 0.328359605466729,
82
+ "scr_metric_threshold_10": 0.3017122876477117,
83
+ "scr_dir2_threshold_10": 0.3047242285315808,
84
+ "scr_dir1_threshold_20": 0.3698277437441625,
85
+ "scr_metric_threshold_20": 0.3421139070292803,
86
+ "scr_dir2_threshold_20": 0.34357515151009016,
87
+ "scr_dir1_threshold_50": 0.25896548346477694,
88
+ "scr_metric_threshold_50": 0.3607845429772905,
89
+ "scr_dir2_threshold_50": 0.35734338976580415,
90
+ "scr_dir1_threshold_100": 0.2597908683841579,
91
+ "scr_metric_threshold_100": 0.3951372867871868,
92
+ "scr_dir2_threshold_100": 0.40230059697936427,
93
+ "scr_dir1_threshold_500": 0.05705960789232327,
94
+ "scr_metric_threshold_500": 0.3154367326261756,
95
+ "scr_dir2_threshold_500": 0.29689495977354474
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.5312505238688624,
102
+ "scr_metric_threshold_2": 0.039408798291137845,
103
+ "scr_dir2_threshold_2": 0.039408798291137845,
104
+ "scr_dir1_threshold_5": 0.578124912688523,
105
+ "scr_metric_threshold_5": 0.07142852948301047,
106
+ "scr_dir2_threshold_5": 0.07142852948301047,
107
+ "scr_dir1_threshold_10": 0.5468753201420825,
108
+ "scr_metric_threshold_10": 0.07881774339173903,
109
+ "scr_dir2_threshold_10": 0.07881774339173903,
110
+ "scr_dir1_threshold_20": 0.6250002328306055,
111
+ "scr_metric_threshold_20": 0.11576351931645514,
112
+ "scr_dir2_threshold_20": 0.11576351931645514,
113
+ "scr_dir1_threshold_50": 0.39062549476503666,
114
+ "scr_metric_threshold_50": 0.24876848881678912,
115
+ "scr_dir2_threshold_50": 0.24876848881678912,
116
+ "scr_dir1_threshold_100": 0.3125005820765137,
117
+ "scr_metric_threshold_100": 0.2807880731991984,
118
+ "scr_dir2_threshold_100": 0.2807880731991984,
119
+ "scr_dir1_threshold_500": 0.04687532014208255,
120
+ "scr_metric_threshold_500": 0.20197032980745938,
121
+ "scr_dir2_threshold_500": 0.20197032980745938
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.32673256224996294,
126
+ "scr_metric_threshold_2": 0.19088327458058965,
127
+ "scr_dir2_threshold_2": 0.19088327458058965,
128
+ "scr_dir1_threshold_5": 0.31683170069737426,
129
+ "scr_metric_threshold_5": 0.19943017282119982,
130
+ "scr_dir2_threshold_5": 0.19943017282119982,
131
+ "scr_dir1_threshold_10": 0.47524784611852827,
132
+ "scr_metric_threshold_10": 0.2535613774140517,
133
+ "scr_dir2_threshold_10": 0.2535613774140517,
134
+ "scr_dir1_threshold_20": 0.5445544671315828,
135
+ "scr_metric_threshold_20": 0.3675213791325673,
136
+ "scr_dir2_threshold_20": 0.3675213791325673,
137
+ "scr_dir1_threshold_50": 0.5643567803816939,
138
+ "scr_metric_threshold_50": 0.3276353445683274,
139
+ "scr_dir2_threshold_50": 0.3276353445683274,
140
+ "scr_dir1_threshold_100": 0.6039602265920487,
141
+ "scr_metric_threshold_100": 0.4330484480462328,
142
+ "scr_dir2_threshold_100": 0.4330484480462328,
143
+ "scr_dir1_threshold_500": -0.37623746015784015,
144
+ "scr_metric_threshold_500": 0.23931637719923726,
145
+ "scr_dir2_threshold_500": 0.23931637719923726
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5555557658011382,
150
+ "scr_metric_threshold_2": 0.06329122328486143,
151
+ "scr_dir2_threshold_2": 0.06329122328486143,
152
+ "scr_dir1_threshold_5": 0.5873013770560047,
153
+ "scr_metric_threshold_5": 0.11139246847857727,
154
+ "scr_dir2_threshold_5": 0.11139246847857727,
155
+ "scr_dir1_threshold_10": 0.4444442341988618,
156
+ "scr_metric_threshold_10": 0.253164591343807,
157
+ "scr_dir2_threshold_10": 0.253164591343807,
158
+ "scr_dir1_threshold_20": 0.5873013770560047,
159
+ "scr_metric_threshold_20": 0.25822796767346196,
160
+ "scr_dir2_threshold_20": 0.25822796767346196,
161
+ "scr_dir1_threshold_50": -1.0793640281371664,
162
+ "scr_metric_threshold_50": 0.3164558146286684,
163
+ "scr_dir2_threshold_50": 0.3164558146286684,
164
+ "scr_dir1_threshold_100": -0.12698339112458784,
165
+ "scr_metric_threshold_100": 0.389873488777201,
166
+ "scr_dir2_threshold_100": 0.389873488777201,
167
+ "scr_dir1_threshold_500": -0.1111105854971546,
168
+ "scr_metric_threshold_500": 0.08607603952376044,
169
+ "scr_dir2_threshold_500": 0.08607603952376044
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.3779526598226128,
174
+ "scr_metric_threshold_2": 0.21301783078241324,
175
+ "scr_dir2_threshold_2": 0.21301783078241324,
176
+ "scr_dir1_threshold_5": 0.409448793029165,
177
+ "scr_metric_threshold_5": 0.28402371559484996,
178
+ "scr_dir2_threshold_5": 0.28402371559484996,
179
+ "scr_dir1_threshold_10": 0.5118111086184579,
180
+ "scr_metric_threshold_10": 0.33136091335477336,
181
+ "scr_dir2_threshold_10": 0.33136091335477336,
182
+ "scr_dir1_threshold_20": 0.24409421102676765,
183
+ "scr_metric_threshold_20": 0.1804733419989317,
184
+ "scr_dir2_threshold_20": 0.1804733419989317,
185
+ "scr_dir1_threshold_50": 0.7559053196452256,
186
+ "scr_metric_threshold_50": -0.00887562538586513,
187
+ "scr_dir2_threshold_50": -0.00887562538586513,
188
+ "scr_dir1_threshold_100": 0.19685024588094274,
189
+ "scr_metric_threshold_100": -0.04437865596463505,
190
+ "scr_dir2_threshold_100": -0.04437865596463505,
191
+ "scr_dir1_threshold_500": 0.5039367233208147,
192
+ "scr_metric_threshold_500": -0.11242599898178338,
193
+ "scr_dir2_threshold_500": -0.11242599898178338
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.00546440968122594,
198
+ "scr_metric_threshold_2": 0.4335936845163922,
199
+ "scr_dir2_threshold_2": 0.4335936845163922,
200
+ "scr_dir1_threshold_5": 0.0,
201
+ "scr_metric_threshold_5": 0.6484374272404357,
202
+ "scr_dir2_threshold_5": 0.6484374272404357,
203
+ "scr_dir1_threshold_10": 0.08196712234356307,
204
+ "scr_metric_threshold_10": 0.671875087311477,
205
+ "scr_dir2_threshold_10": 0.671875087311477,
206
+ "scr_dir1_threshold_20": 0.08196712234356307,
207
+ "scr_metric_threshold_20": 0.7070313445874334,
208
+ "scr_dir2_threshold_20": 0.7070313445874334,
209
+ "scr_dir1_threshold_50": 0.23497254766823733,
210
+ "scr_metric_threshold_50": 0.75781239813661,
211
+ "scr_dir2_threshold_50": 0.75781239813661,
212
+ "scr_dir1_threshold_100": 0.06557389329988525,
213
+ "scr_metric_threshold_100": 0.8242187136202179,
214
+ "scr_dir2_threshold_100": 0.8242187136202179,
215
+ "scr_dir1_threshold_500": -0.3825135633116857,
216
+ "scr_metric_threshold_500": 0.9374998835846973,
217
+ "scr_dir2_threshold_500": 0.9374998835846973
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.0410254200064373,
222
+ "scr_metric_threshold_2": 0.08064531634921589,
223
+ "scr_dir2_threshold_2": 0.08064531634921589,
224
+ "scr_dir1_threshold_5": 0.1333331499344196,
225
+ "scr_metric_threshold_5": 0.14112912335516434,
226
+ "scr_dir2_threshold_5": 0.14112912335516434,
227
+ "scr_dir1_threshold_10": 0.18461515419110838,
228
+ "scr_metric_threshold_10": 0.1653225980892868,
229
+ "scr_dir2_threshold_10": 0.1653225980892868,
230
+ "scr_dir1_threshold_20": 0.28205102341178834,
231
+ "scr_metric_threshold_20": 0.2983870696388872,
232
+ "scr_dir2_threshold_20": 0.2983870696388872,
233
+ "scr_dir1_threshold_50": 0.369230614047073,
234
+ "scr_metric_threshold_50": 0.2661291834404855,
235
+ "scr_dir2_threshold_50": 0.2661291834404855,
236
+ "scr_dir1_threshold_100": 0.36410247475437535,
237
+ "scr_metric_threshold_100": 0.3064517214444511,
238
+ "scr_dir2_threshold_100": 0.3064517214444511,
239
+ "scr_dir1_threshold_500": 0.24615374269804866,
240
+ "scr_metric_threshold_500": 0.18951607282340924,
241
+ "scr_dir2_threshold_500": 0.18951607282340924
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.07239833160947363,
246
+ "scr_metric_threshold_2": 0.28761059496245833,
247
+ "scr_dir2_threshold_2": 0.28761059496245833,
248
+ "scr_dir1_threshold_5": 0.18552038761895642,
249
+ "scr_metric_threshold_5": 0.4247788100750834,
250
+ "scr_dir2_threshold_5": 0.4247788100750834,
251
+ "scr_dir1_threshold_10": 0.24886879292511796,
252
+ "scr_metric_threshold_10": 0.5265487250804465,
253
+ "scr_dir2_threshold_10": 0.5265487250804465,
254
+ "scr_dir1_threshold_20": 0.4434388371431307,
255
+ "scr_metric_threshold_20": 0.6592919548766487,
256
+ "scr_dir2_threshold_20": 0.6592919548766487,
257
+ "scr_dir1_threshold_50": 0.5656108194559256,
258
+ "scr_metric_threshold_50": 0.7079644197211189,
259
+ "scr_dir2_threshold_50": 0.7079644197211189,
260
+ "scr_dir1_threshold_100": 0.4434388371431307,
261
+ "scr_metric_threshold_100": 0.752212426723873,
262
+ "scr_dir2_threshold_100": 0.752212426723873,
263
+ "scr_dir1_threshold_500": 0.36199084893460076,
264
+ "scr_metric_threshold_500": 0.8141593200429048,
265
+ "scr_dir2_threshold_500": 0.8141593200429048
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.03433482762748996,
270
+ "scr_metric_threshold_2": 0.03433482762748996,
271
+ "scr_dir2_threshold_2": 0.05714296256601923,
272
+ "scr_dir1_threshold_5": 0.042918406627421406,
273
+ "scr_metric_threshold_5": 0.042918406627421406,
274
+ "scr_dir2_threshold_5": 0.11428564130044823,
275
+ "scr_dir1_threshold_10": 0.13304726519611204,
276
+ "scr_metric_threshold_10": 0.13304726519611204,
277
+ "scr_dir2_threshold_10": 0.1571427922670651,
278
+ "scr_dir1_threshold_20": 0.15021467900985702,
279
+ "scr_metric_threshold_20": 0.15021467900985702,
280
+ "scr_dir2_threshold_20": 0.1619046348563358,
281
+ "scr_dir1_threshold_50": 0.2703863198921898,
282
+ "scr_metric_threshold_50": 0.2703863198921898,
283
+ "scr_dir2_threshold_50": 0.24285709420029883,
284
+ "scr_dir1_threshold_100": 0.21888407845095484,
285
+ "scr_metric_threshold_100": 0.21888407845095484,
286
+ "scr_dir2_threshold_100": 0.2761905599883743,
287
+ "scr_dir1_threshold_500": 0.1673818370097199,
288
+ "scr_metric_threshold_500": 0.1673818370097199,
289
+ "scr_dir2_threshold_500": 0.019047654188673074
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_4",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.12.hook_resid_post__trainer_5_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732139159437,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.11454379661634051,
76
+ "scr_metric_threshold_2": 0.1489821401365793,
77
+ "scr_dir2_threshold_2": 0.15486810073730156,
78
+ "scr_dir1_threshold_5": -0.465099992875216,
79
+ "scr_metric_threshold_5": 0.18278838783299153,
80
+ "scr_dir2_threshold_5": 0.19075386156666824,
81
+ "scr_dir1_threshold_10": -0.13074244987691044,
82
+ "scr_metric_threshold_10": 0.246814651572078,
83
+ "scr_dir2_threshold_10": 0.2481660717148974,
84
+ "scr_dir1_threshold_20": -0.8334494555465638,
85
+ "scr_metric_threshold_20": 0.23807729958164023,
86
+ "scr_dir2_threshold_20": 0.20194402654387103,
87
+ "scr_dir1_threshold_50": -0.7915135001680044,
88
+ "scr_metric_threshold_50": 0.2601991046073314,
89
+ "scr_dir2_threshold_50": 0.16614385022737582,
90
+ "scr_dir1_threshold_100": -1.5777099202986475,
91
+ "scr_metric_threshold_100": 0.3084274131092598,
92
+ "scr_dir2_threshold_100": 0.15734425289434567,
93
+ "scr_dir1_threshold_500": -2.0165053348042465,
94
+ "scr_metric_threshold_500": 0.09799072896505467,
95
+ "scr_dir2_threshold_500": 0.09170621584385272
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.4531256111803394,
102
+ "scr_metric_threshold_2": 0.024630517283144072,
103
+ "scr_dir2_threshold_2": 0.024630517283144072,
104
+ "scr_dir1_threshold_5": 0.3749997671693945,
105
+ "scr_metric_threshold_5": 0.12068956404929862,
106
+ "scr_dir2_threshold_5": 0.12068956404929862,
107
+ "scr_dir1_threshold_10": -0.21874994179234863,
108
+ "scr_metric_threshold_10": 0.06157629320786018,
109
+ "scr_dir2_threshold_10": 0.06157629320786018,
110
+ "scr_dir1_threshold_20": 0.2343756693879908,
111
+ "scr_metric_threshold_20": 0.10837430540772658,
112
+ "scr_dir2_threshold_20": 0.10837430540772658,
113
+ "scr_dir1_threshold_50": -2.249999534338789,
114
+ "scr_metric_threshold_50": 0.24630531964090405,
115
+ "scr_dir2_threshold_50": 0.24630531964090405,
116
+ "scr_dir1_threshold_100": -6.406249359715835,
117
+ "scr_metric_threshold_100": 0.2832512423750835,
118
+ "scr_dir2_threshold_100": 0.2832512423750835,
119
+ "scr_dir1_threshold_500": -6.171873690327844,
120
+ "scr_metric_threshold_500": 0.24630531964090405,
121
+ "scr_dir2_threshold_500": 0.24630531964090405
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": -0.17821786852633137,
126
+ "scr_metric_threshold_2": 0.16239327415096072,
127
+ "scr_dir2_threshold_2": 0.16239327415096072,
128
+ "scr_dir1_threshold_5": -1.3663365986052514,
129
+ "scr_metric_threshold_5": 0.04558413653844423,
130
+ "scr_dir2_threshold_5": 0.04558413653844423,
131
+ "scr_dir1_threshold_10": 0.09900979581575439,
132
+ "scr_metric_threshold_10": 0.0826212050224808,
133
+ "scr_dir2_threshold_10": 0.0826212050224808,
134
+ "scr_dir1_threshold_20": -2.089108934263166,
135
+ "scr_metric_threshold_20": 0.15669517217675646,
136
+ "scr_dir2_threshold_20": 0.15669517217675646,
137
+ "scr_dir1_threshold_50": 0.5346536055789941,
138
+ "scr_metric_threshold_50": 0.03703706848403657,
139
+ "scr_dir2_threshold_50": 0.03703706848403657,
140
+ "scr_dir1_threshold_100": -0.7227723356579142,
141
+ "scr_metric_threshold_100": -0.028490000429628907,
142
+ "scr_dir2_threshold_100": -0.028490000429628907,
143
+ "scr_dir1_threshold_500": -3.3366334238025517,
144
+ "scr_metric_threshold_500": 0.08831913718288759,
145
+ "scr_dir2_threshold_500": 0.08831913718288759
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3492064543291405,
150
+ "scr_metric_threshold_2": 0.09113926495559606,
151
+ "scr_dir2_threshold_2": 0.09113926495559606,
152
+ "scr_dir1_threshold_5": -1.2380949227268643,
153
+ "scr_metric_threshold_5": 0.09367087767151386,
154
+ "scr_dir2_threshold_5": 0.09367087767151386,
155
+ "scr_dir1_threshold_10": 0.5555557658011382,
156
+ "scr_metric_threshold_10": 0.1898735189567649,
157
+ "scr_dir2_threshold_10": 0.1898735189567649,
158
+ "scr_dir1_threshold_20": -3.682538210820604,
159
+ "scr_metric_threshold_20": 0.23037977510490792,
160
+ "scr_dir2_threshold_20": 0.23037977510490792,
161
+ "scr_dir1_threshold_50": -6.095235887659478,
162
+ "scr_metric_threshold_50": 0.25822796767346196,
163
+ "scr_dir2_threshold_50": 0.25822796767346196,
164
+ "scr_dir1_threshold_100": -6.190473667529199,
165
+ "scr_metric_threshold_100": 0.14683549919488467,
166
+ "scr_dir2_threshold_100": 0.14683549919488467,
167
+ "scr_dir1_threshold_500": -5.3333317564914635,
168
+ "scr_metric_threshold_500": -0.11645554301259348,
169
+ "scr_dir2_threshold_500": -0.11645554301259348
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.05511788111546126,
174
+ "scr_metric_threshold_2": 0.07988168654340497,
175
+ "scr_dir2_threshold_2": 0.07988168654340497,
176
+ "scr_dir1_threshold_5": -2.133858918123852,
177
+ "scr_metric_threshold_5": 0.15088757135584166,
178
+ "scr_dir2_threshold_5": 0.15088757135584166,
179
+ "scr_dir1_threshold_10": -2.259843450950061,
180
+ "scr_metric_threshold_10": 0.21301783078241324,
181
+ "scr_dir2_threshold_10": 0.21301783078241324,
182
+ "scr_dir1_threshold_20": -1.3543313812417106,
183
+ "scr_metric_threshold_20": 0.13609468603429664,
184
+ "scr_dir2_threshold_20": 0.13609468603429664,
185
+ "scr_dir1_threshold_50": 0.7716536209125051,
186
+ "scr_metric_threshold_50": 0.3491125168167099,
187
+ "scr_dir2_threshold_50": 0.3491125168167099,
188
+ "scr_dir1_threshold_100": -0.07874056768038387,
189
+ "scr_metric_threshold_100": 0.4822486610557182,
190
+ "scr_dir2_threshold_100": 0.4822486610557182,
191
+ "scr_dir1_threshold_500": -1.314961332065522,
192
+ "scr_metric_threshold_500": -0.33136091335477336,
193
+ "scr_dir2_threshold_500": -0.33136091335477336
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.0,
198
+ "scr_metric_threshold_2": 0.40234385913934634,
199
+ "scr_dir2_threshold_2": 0.40234385913934634,
200
+ "scr_dir1_threshold_5": 0.17486338975796936,
201
+ "scr_metric_threshold_5": 0.5390624563442614,
202
+ "scr_dir2_threshold_5": 0.5390624563442614,
203
+ "scr_dir1_threshold_10": 0.19672135419126444,
204
+ "scr_metric_threshold_10": 0.63281239813661,
205
+ "scr_dir2_threshold_10": 0.63281239813661,
206
+ "scr_dir1_threshold_20": -0.14754101564344832,
207
+ "scr_metric_threshold_20": 0.7734374272404357,
208
+ "scr_dir2_threshold_20": 0.7734374272404357,
209
+ "scr_dir1_threshold_50": 0.08743153202478901,
210
+ "scr_metric_threshold_50": 0.8359375436557386,
211
+ "scr_dir2_threshold_50": 0.8359375436557386,
212
+ "scr_dir1_threshold_100": 0.19672135419126444,
213
+ "scr_metric_threshold_100": 0.871093800931695,
214
+ "scr_dir2_threshold_100": 0.871093800931695,
215
+ "scr_dir1_threshold_500": 0.08743153202478901,
216
+ "scr_metric_threshold_500": 0.503906199068305,
217
+ "scr_dir2_threshold_500": 0.503906199068305
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.07179486709233554,
222
+ "scr_metric_threshold_2": 0.1008065853511987,
223
+ "scr_dir2_threshold_2": 0.1008065853511987,
224
+ "scr_dir1_threshold_5": 0.1179487320563267,
225
+ "scr_metric_threshold_5": 0.08064531634921589,
226
+ "scr_dir2_threshold_5": 0.08064531634921589,
227
+ "scr_dir1_threshold_10": 0.10256400851337757,
228
+ "scr_metric_threshold_10": 0.16935480382142643,
229
+ "scr_dir2_threshold_10": 0.16935480382142643,
230
+ "scr_dir1_threshold_20": 0.3179486097903842,
231
+ "scr_metric_threshold_20": 0.18145166135912996,
232
+ "scr_dir2_threshold_20": 0.18145166135912996,
233
+ "scr_dir1_threshold_50": 0.09230742426312609,
234
+ "scr_metric_threshold_50": 0.2943548639067476,
235
+ "scr_dir2_threshold_50": 0.2943548639067476,
236
+ "scr_dir1_threshold_100": 0.03589728071373967,
237
+ "scr_metric_threshold_100": 0.32661299044643394,
238
+ "scr_dir2_threshold_100": 0.32661299044643394,
239
+ "scr_dir1_threshold_500": 0.36410247475437535,
240
+ "scr_metric_threshold_500": -0.600806345009914,
241
+ "scr_dir2_threshold_500": -0.600806345009914
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.12669681061232305,
246
+ "scr_metric_threshold_2": 0.2920353165415277,
247
+ "scr_dir2_threshold_2": 0.2920353165415277,
248
+ "scr_dir1_threshold_5": 0.2895927870293829,
249
+ "scr_metric_threshold_5": 0.37168135991419043,
250
+ "scr_dir2_threshold_5": 0.37168135991419043,
251
+ "scr_dir1_threshold_10": 0.38009043183696917,
252
+ "scr_metric_threshold_10": 0.5265487250804465,
253
+ "scr_dir2_threshold_10": 0.5265487250804465,
254
+ "scr_dir1_threshold_20": 0.04072399410426497,
255
+ "scr_metric_threshold_20": 0.3053097450160893,
256
+ "scr_dir2_threshold_20": 0.3053097450160893,
257
+ "scr_dir1_threshold_50": 0.4841628312473957,
258
+ "scr_metric_threshold_50": 0.017699150053630992,
259
+ "scr_dir2_threshold_50": 0.017699150053630992,
260
+ "scr_dir1_threshold_100": 0.14479639351469145,
261
+ "scr_metric_threshold_100": -0.013274428474561608,
262
+ "scr_dir2_threshold_100": -0.013274428474561608,
263
+ "scr_dir1_threshold_500": -0.6199095681630308,
264
+ "scr_metric_threshold_500": 0.8008848915683432,
265
+ "scr_dir2_threshold_500": 0.8008848915683432
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.03862661712745569,
270
+ "scr_metric_threshold_2": 0.03862661712745569,
271
+ "scr_dir2_threshold_2": 0.08571430193323373,
272
+ "scr_dir1_threshold_5": 0.060085820441166386,
273
+ "scr_metric_threshold_5": 0.060085820441166386,
274
+ "scr_dir2_threshold_5": 0.12380961031057988,
275
+ "scr_dir1_threshold_10": 0.09871243756862208,
276
+ "scr_metric_threshold_10": 0.09871243756862208,
277
+ "scr_dir2_threshold_10": 0.10952379871117751,
278
+ "scr_dir1_threshold_20": 0.012875624313779262,
279
+ "scr_metric_threshold_20": 0.012875624313779262,
280
+ "scr_dir2_threshold_20": -0.2761905599883743,
281
+ "scr_dir1_threshold_50": 0.042918406627421406,
282
+ "scr_metric_threshold_50": 0.042918406627421406,
283
+ "scr_dir2_threshold_50": -0.7095236284122234,
284
+ "scr_dir1_threshold_100": 0.399141539774454,
285
+ "scr_metric_threshold_100": 0.399141539774454,
286
+ "scr_dir2_threshold_100": -0.8095237419448594,
287
+ "scr_dir1_threshold_500": 0.19313308563727843,
288
+ "scr_metric_threshold_500": 0.19313308563727843,
289
+ "scr_dir2_threshold_500": 0.14285698066766273
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.12.hook_resid_post__trainer_5",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_0_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732141876741,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.14458449051880568,
76
+ "scr_metric_threshold_2": 0.20055652374876445,
77
+ "scr_dir2_threshold_2": 0.20055652374876445,
78
+ "scr_dir1_threshold_5": 0.11786576759301849,
79
+ "scr_metric_threshold_5": 0.2917249288374464,
80
+ "scr_dir2_threshold_5": 0.2917249288374464,
81
+ "scr_dir1_threshold_10": -0.03768217345852144,
82
+ "scr_metric_threshold_10": 0.3549268546902908,
83
+ "scr_dir2_threshold_10": 0.3549268546902908,
84
+ "scr_dir1_threshold_20": -0.13720514853629118,
85
+ "scr_metric_threshold_20": 0.3829567080983869,
86
+ "scr_dir2_threshold_20": 0.3829567080983869,
87
+ "scr_dir1_threshold_50": -0.20741696621276426,
88
+ "scr_metric_threshold_50": 0.35737036871670036,
89
+ "scr_dir2_threshold_50": 0.35737036871670036,
90
+ "scr_dir1_threshold_100": -0.7828226825137328,
91
+ "scr_metric_threshold_100": 0.3663790179220527,
92
+ "scr_dir2_threshold_100": 0.3663790179220527,
93
+ "scr_dir1_threshold_500": -1.0826240675365764,
94
+ "scr_metric_threshold_500": 0.26227172194728066,
95
+ "scr_dir2_threshold_500": 0.26227172194728066
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.27941247367097016,
102
+ "scr_metric_threshold_2": 0.08101266319410545,
103
+ "scr_dir2_threshold_2": 0.08101266319410545,
104
+ "scr_dir1_threshold_5": 0.2500002191346635,
105
+ "scr_metric_threshold_5": 0.1518987246267203,
106
+ "scr_dir2_threshold_5": 0.1518987246267203,
107
+ "scr_dir1_threshold_10": 0.2352945301358372,
108
+ "scr_metric_threshold_10": 0.12405068295598567,
109
+ "scr_dir2_threshold_10": 0.12405068295598567,
110
+ "scr_dir1_threshold_20": 0.14705951960422528,
111
+ "scr_metric_threshold_20": 0.1518987246267203,
112
+ "scr_dir2_threshold_20": 0.1518987246267203,
113
+ "scr_dir1_threshold_50": -0.30882297512996876,
114
+ "scr_metric_threshold_50": 0.14177227376304907,
115
+ "scr_dir2_threshold_50": 0.14177227376304907,
116
+ "scr_dir1_threshold_100": -1.7058818373302036,
117
+ "scr_metric_threshold_100": 0.1898735189567649,
118
+ "scr_dir2_threshold_100": 0.1898735189567649,
119
+ "scr_dir1_threshold_500": -1.823528225859468,
120
+ "scr_metric_threshold_500": 0.04810139609153522,
121
+ "scr_dir2_threshold_500": 0.04810139609153522
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.11711721870769615,
126
+ "scr_metric_threshold_2": 0.2323529071461404,
127
+ "scr_dir2_threshold_2": 0.2323529071461404,
128
+ "scr_dir1_threshold_5": 0.11711721870769615,
129
+ "scr_metric_threshold_5": 0.3441175655922755,
130
+ "scr_dir2_threshold_5": 0.3441175655922755,
131
+ "scr_dir1_threshold_10": 0.2162161146256372,
132
+ "scr_metric_threshold_10": 0.4323529422076927,
133
+ "scr_dir2_threshold_10": 0.4323529422076927,
134
+ "scr_dir1_threshold_20": 0.26126111613186265,
135
+ "scr_metric_threshold_20": 0.473529404546151,
136
+ "scr_dir2_threshold_20": 0.473529404546151,
137
+ "scr_dir1_threshold_50": 0.25225222322637253,
138
+ "scr_metric_threshold_50": 0.4352940805230623,
139
+ "scr_dir2_threshold_50": 0.4352940805230623,
140
+ "scr_dir1_threshold_100": -0.7297299909626472,
141
+ "scr_metric_threshold_100": 0.42941180389232314,
142
+ "scr_dir2_threshold_100": 0.42941180389232314,
143
+ "scr_dir1_threshold_500": -0.7657655625846077,
144
+ "scr_metric_threshold_500": 0.07941178636154701,
145
+ "scr_dir2_threshold_500": 0.07941178636154701
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.37036987979734426,
150
+ "scr_metric_threshold_2": 0.03921562039086623,
151
+ "scr_dir2_threshold_2": 0.03921562039086623,
152
+ "scr_dir1_threshold_5": 0.16666666666666666,
153
+ "scr_metric_threshold_5": 0.09558817513948656,
154
+ "scr_dir2_threshold_5": 0.09558817513948656,
155
+ "scr_dir1_threshold_10": 0.05555481969601642,
156
+ "scr_metric_threshold_10": 0.22058817513948656,
157
+ "scr_dir2_threshold_10": 0.22058817513948656,
158
+ "scr_dir1_threshold_20": -0.2777774098480082,
159
+ "scr_metric_threshold_20": 0.28921562039086623,
160
+ "scr_dir2_threshold_20": 0.28921562039086623,
161
+ "scr_dir1_threshold_50": -0.4814817267679945,
162
+ "scr_metric_threshold_50": 0.09313722625689837,
163
+ "scr_dir2_threshold_50": 0.09313722625689837,
164
+ "scr_dir1_threshold_100": -2.481480622978686,
165
+ "scr_metric_threshold_100": 0.036764671508278036,
166
+ "scr_dir2_threshold_100": 0.036764671508278036,
167
+ "scr_dir1_threshold_500": -4.648147289645353,
168
+ "scr_metric_threshold_500": 0.0882351824019036,
169
+ "scr_dir2_threshold_500": 0.0882351824019036
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.17968748544808716,
174
+ "scr_metric_threshold_2": 0.2835821426639679,
175
+ "scr_dir2_threshold_2": 0.2835821426639679,
176
+ "scr_dir1_threshold_5": -0.06250011641530274,
177
+ "scr_metric_threshold_5": 0.3552238928127126,
178
+ "scr_dir2_threshold_5": 0.3552238928127126,
179
+ "scr_dir1_threshold_10": -0.08593731082513303,
180
+ "scr_metric_threshold_10": 0.44179100016226697,
181
+ "scr_dir2_threshold_10": 0.44179100016226697,
182
+ "scr_dir1_threshold_20": -0.22656233992895872,
183
+ "scr_metric_threshold_20": 0.3701492500135222,
184
+ "scr_dir2_threshold_20": 0.3701492500135222,
185
+ "scr_dir1_threshold_50": -0.33593731082513306,
186
+ "scr_metric_threshold_50": 0.519402999945911,
187
+ "scr_dir2_threshold_50": 0.519402999945911,
188
+ "scr_dir1_threshold_100": -0.2421876018633899,
189
+ "scr_metric_threshold_100": 0.5701491432589467,
190
+ "scr_dir2_threshold_100": 0.5701491432589467,
191
+ "scr_dir1_threshold_500": -0.07812491268852294,
192
+ "scr_metric_threshold_500": 0.3761193217241291,
193
+ "scr_dir2_threshold_500": 0.3761193217241291
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.0178572632321391,
198
+ "scr_metric_threshold_2": 0.21402222219289643,
199
+ "scr_dir2_threshold_2": 0.21402222219289643,
200
+ "scr_dir1_threshold_5": 0.08928560658177027,
201
+ "scr_metric_threshold_5": 0.4132840484350373,
202
+ "scr_dir2_threshold_5": 0.4132840484350373,
203
+ "scr_dir1_threshold_10": 0.10119056699968389,
204
+ "scr_metric_threshold_10": 0.5424354235426446,
205
+ "scr_dir2_threshold_10": 0.5424354235426446,
206
+ "scr_dir1_threshold_20": -0.053571434906954686,
207
+ "scr_metric_threshold_20": 0.5867159515649627,
208
+ "scr_dir2_threshold_20": 0.5867159515649627,
209
+ "scr_dir1_threshold_50": 0.011904960417913604,
210
+ "scr_metric_threshold_50": 0.6273062706279338,
211
+ "scr_dir2_threshold_50": 0.6273062706279338,
212
+ "scr_dir1_threshold_100": 0.023809566046364597,
213
+ "scr_metric_threshold_100": 0.6088561056044636,
214
+ "scr_dir2_threshold_100": 0.6088561056044636,
215
+ "scr_dir1_threshold_500": 0.16071430472086407,
216
+ "scr_metric_threshold_500": 0.531365236551236,
217
+ "scr_dir2_threshold_500": 0.531365236551236
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.09356743899412459,
222
+ "scr_metric_threshold_2": 0.04135351824215616,
223
+ "scr_dir2_threshold_2": 0.04135351824215616,
224
+ "scr_dir1_threshold_5": 0.12865505433433813,
225
+ "scr_metric_threshold_5": 0.12781968028610227,
226
+ "scr_dir2_threshold_5": 0.12781968028610227,
227
+ "scr_dir1_threshold_10": 0.06432770144975224,
228
+ "scr_metric_threshold_10": 0.22556389124170353,
229
+ "scr_dir2_threshold_10": 0.22556389124170353,
230
+ "scr_dir1_threshold_20": 0.11695929874265573,
231
+ "scr_metric_threshold_20": 0.29323308102197304,
232
+ "scr_dir2_threshold_20": 0.29323308102197304,
233
+ "scr_dir1_threshold_50": 0.1871345294230828,
234
+ "scr_metric_threshold_50": 0.22932346495650655,
235
+ "scr_dir2_threshold_50": 0.22932346495650655,
236
+ "scr_dir1_threshold_100": 0.25146223087283504,
237
+ "scr_metric_threshold_100": 0.22932346495650655,
238
+ "scr_dir2_threshold_100": 0.22932346495650655,
239
+ "scr_dir1_threshold_500": -0.0175436333875236,
240
+ "scr_metric_threshold_500": 0.1804512474399136,
241
+ "scr_dir2_threshold_500": 0.1804512474399136
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.026548849947199797,
246
+ "scr_metric_threshold_2": 0.5562309138316113,
247
+ "scr_dir2_threshold_2": 0.5562309138316113,
248
+ "scr_dir1_threshold_5": 0.12389375396265269,
249
+ "scr_metric_threshold_5": 0.6200607858682278,
250
+ "scr_dir2_threshold_5": 0.6200607858682278,
251
+ "scr_dir1_threshold_10": -1.070796054068253,
252
+ "scr_metric_threshold_10": 0.5623100441617884,
253
+ "scr_dir2_threshold_10": 0.5623100441617884,
254
+ "scr_dir1_threshold_20": -1.026548322472632,
255
+ "scr_metric_threshold_20": 0.5440728343403922,
256
+ "scr_dir2_threshold_20": 0.5440728343403922,
257
+ "scr_dir1_threshold_50": -0.9026545685099793,
258
+ "scr_metric_threshold_50": 0.5592703884121323,
259
+ "scr_dir2_threshold_50": 0.5592703884121323,
260
+ "scr_dir1_threshold_100": -1.2920347120463587,
261
+ "scr_metric_threshold_100": 0.5440728343403922,
262
+ "scr_dir2_threshold_100": 0.5440728343403922,
263
+ "scr_dir1_threshold_500": -1.353981852764969,
264
+ "scr_metric_threshold_500": 0.5410333597598712,
265
+ "scr_dir2_threshold_500": 0.5410333597598712
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.07211531435288414,
270
+ "scr_metric_threshold_2": 0.1566822023283716,
271
+ "scr_dir2_threshold_2": 0.1566822023283716,
272
+ "scr_dir1_threshold_5": 0.1298077377716632,
273
+ "scr_metric_threshold_5": 0.2258065579390088,
274
+ "scr_dir2_threshold_5": 0.2258065579390088,
275
+ "scr_dir1_threshold_10": 0.18269224431828765,
276
+ "scr_metric_threshold_10": 0.29032267811075807,
277
+ "scr_dir2_threshold_10": 0.29032267811075807,
278
+ "scr_dir1_threshold_20": -0.03846161561251938,
279
+ "scr_metric_threshold_20": 0.3548387982825073,
280
+ "scr_dir2_threshold_20": 0.3548387982825073,
281
+ "scr_dir1_threshold_50": -0.08173086153640712,
282
+ "scr_metric_threshold_50": 0.2534562452481091,
283
+ "scr_dir2_threshold_50": 0.2534562452481091,
284
+ "scr_dir1_threshold_100": -0.08653849184777547,
285
+ "scr_metric_threshold_100": 0.32258060085874635,
286
+ "scr_dir2_threshold_100": 0.32258060085874635,
287
+ "scr_dir1_threshold_500": -0.13461536808303157,
288
+ "scr_metric_threshold_500": 0.2534562452481091,
289
+ "scr_dir2_threshold_500": 0.2534562452481091
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_0",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_1_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732142872647,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1500318486841559,
76
+ "scr_metric_threshold_2": 0.23437684895957797,
77
+ "scr_dir2_threshold_2": 0.23437684895957797,
78
+ "scr_dir1_threshold_5": 0.23027745053329945,
79
+ "scr_metric_threshold_5": 0.3273933704590746,
80
+ "scr_dir2_threshold_5": 0.3273933704590746,
81
+ "scr_dir1_threshold_10": 0.17329589193546283,
82
+ "scr_metric_threshold_10": 0.393808921383005,
83
+ "scr_dir2_threshold_10": 0.393808921383005,
84
+ "scr_dir1_threshold_20": -0.03420168968066933,
85
+ "scr_metric_threshold_20": 0.4495775900839376,
86
+ "scr_dir2_threshold_20": 0.4495775900839376,
87
+ "scr_dir1_threshold_50": -0.1876258659665765,
88
+ "scr_metric_threshold_50": 0.45159496584742553,
89
+ "scr_dir2_threshold_50": 0.45159496584742553,
90
+ "scr_dir1_threshold_100": -0.19983593362915053,
91
+ "scr_metric_threshold_100": 0.4316220346225999,
92
+ "scr_dir2_threshold_100": 0.4316220346225999,
93
+ "scr_dir1_threshold_500": -1.390358879503125,
94
+ "scr_metric_threshold_500": 0.25842460662299116,
95
+ "scr_dir2_threshold_500": 0.25842460662299116
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.3088238516686228,
102
+ "scr_metric_threshold_2": 0.08101266319410545,
103
+ "scr_dir2_threshold_2": 0.08101266319410545,
104
+ "scr_dir1_threshold_5": 0.3088238516686228,
105
+ "scr_metric_threshold_5": 0.1443038864789669,
106
+ "scr_dir2_threshold_5": 0.1443038864789669,
107
+ "scr_dir1_threshold_10": 0.2352945301358372,
108
+ "scr_metric_threshold_10": 0.11392408119449507,
109
+ "scr_dir2_threshold_10": 0.11392408119449507,
110
+ "scr_dir1_threshold_20": 0.029412254536306668,
111
+ "scr_metric_threshold_20": 0.1265822956719035,
112
+ "scr_dir2_threshold_20": 0.1265822956719035,
113
+ "scr_dir1_threshold_50": -0.35294091866510174,
114
+ "scr_metric_threshold_50": 0.22784816238899014,
115
+ "scr_dir2_threshold_50": 0.22784816238899014,
116
+ "scr_dir1_threshold_100": -0.3970579856615807,
117
+ "scr_metric_threshold_100": 0.3164558146286684,
118
+ "scr_dir2_threshold_100": 0.3164558146286684,
119
+ "scr_dir1_threshold_500": -2.294116409592488,
120
+ "scr_metric_threshold_500": 0.2101265715819267,
121
+ "scr_dir2_threshold_500": 0.2101265715819267
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.05405389441171559,
126
+ "scr_metric_threshold_2": 0.14117639221535366,
127
+ "scr_dir2_threshold_2": 0.14117639221535366,
128
+ "scr_dir1_threshold_5": 0.3153155475223531,
129
+ "scr_metric_threshold_5": 0.22352931689227024,
130
+ "scr_dir2_threshold_5": 0.22352931689227024,
131
+ "scr_dir1_threshold_10": 0.2792794389216177,
132
+ "scr_metric_threshold_10": 0.394117618184604,
133
+ "scr_dir2_threshold_10": 0.394117618184604,
134
+ "scr_dir1_threshold_20": 0.3063061176380881,
135
+ "scr_metric_threshold_20": 0.4764705428615205,
136
+ "scr_dir2_threshold_20": 0.4764705428615205,
137
+ "scr_dir1_threshold_50": 0.2792794389216177,
138
+ "scr_metric_threshold_50": 0.4411763571538014,
139
+ "scr_dir2_threshold_50": 0.4411763571538014,
140
+ "scr_dir1_threshold_100": 0.27027000903735277,
141
+ "scr_metric_threshold_100": 0.4411763571538014,
142
+ "scr_dir2_threshold_100": 0.4411763571538014,
143
+ "scr_dir1_threshold_500": 0.05405389441171559,
144
+ "scr_metric_threshold_500": -0.15588243440772448,
145
+ "scr_dir2_threshold_500": -0.15588243440772448
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.3518516065653388,
150
+ "scr_metric_threshold_2": 0.05637255474862033,
151
+ "scr_dir2_threshold_2": 0.05637255474862033,
152
+ "scr_dir1_threshold_5": 0.5,
153
+ "scr_metric_threshold_5": 0.13970583938534753,
154
+ "scr_dir2_threshold_5": 0.13970583938534753,
155
+ "scr_dir1_threshold_10": 0.20370321313067763,
156
+ "scr_metric_threshold_10": 0.26470583938534753,
157
+ "scr_dir2_threshold_10": 0.26470583938534753,
158
+ "scr_dir1_threshold_20": -0.07407419671733059,
159
+ "scr_metric_threshold_20": 0.3848039416201712,
160
+ "scr_dir2_threshold_20": 0.3848039416201712,
161
+ "scr_dir1_threshold_50": -0.4259258032826694,
162
+ "scr_metric_threshold_50": 0.19117635027897312,
163
+ "scr_dir2_threshold_50": 0.19117635027897312,
164
+ "scr_dir1_threshold_100": -0.7222225901519918,
165
+ "scr_metric_threshold_100": 0.07843124078173246,
166
+ "scr_dir2_threshold_100": 0.07843124078173246,
167
+ "scr_dir1_threshold_500": -5.685183836109363,
168
+ "scr_metric_threshold_500": 0.046568613128449184,
169
+ "scr_dir2_threshold_500": 0.046568613128449184
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.14062502910382568,
174
+ "scr_metric_threshold_2": 0.5074626786004048,
175
+ "scr_dir2_threshold_2": 0.5074626786004048,
176
+ "scr_dir1_threshold_5": -0.007812398136610098,
177
+ "scr_metric_threshold_5": 0.5641790715483399,
178
+ "scr_dir2_threshold_5": 0.5641790715483399,
179
+ "scr_dir1_threshold_10": -0.09374970896174313,
180
+ "scr_metric_threshold_10": 0.6238805003515784,
181
+ "scr_dir2_threshold_10": 0.6238805003515784,
182
+ "scr_dir1_threshold_20": -0.32031251455191284,
183
+ "scr_metric_threshold_20": 0.5791044287491495,
184
+ "scr_dir2_threshold_20": 0.5791044287491495,
185
+ "scr_dir1_threshold_50": -0.4453122817213074,
186
+ "scr_metric_threshold_50": 0.6507461788978942,
187
+ "scr_dir2_threshold_50": 0.6507461788978942,
188
+ "scr_dir1_threshold_100": -0.21093754365573852,
189
+ "scr_metric_threshold_100": 0.6149253927856682,
190
+ "scr_dir2_threshold_100": 0.6149253927856682,
191
+ "scr_dir1_threshold_500": -1.2890624563442614,
192
+ "scr_metric_threshold_500": 0.48955228554429175,
193
+ "scr_dir2_threshold_500": 0.48955228554429175
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.041666829278503695,
198
+ "scr_metric_threshold_2": 0.29151287130280784,
199
+ "scr_dir2_threshold_2": 0.29151287130280784,
200
+ "scr_dir1_threshold_5": 0.08333330376754479,
201
+ "scr_metric_threshold_5": 0.4833949394562033,
202
+ "scr_dir2_threshold_5": 0.4833949394562033,
203
+ "scr_dir1_threshold_10": 0.08928560658177027,
204
+ "scr_metric_threshold_10": 0.623616281611903,
205
+ "scr_dir2_threshold_10": 0.623616281611903,
206
+ "scr_dir1_threshold_20": 0.18452387076722868,
207
+ "scr_metric_threshold_20": 0.6531364136834655,
208
+ "scr_dir2_threshold_20": 0.6531364136834655,
209
+ "scr_dir1_threshold_50": -0.08333330376754479,
210
+ "scr_metric_threshold_50": 0.6789667766823134,
211
+ "scr_dir2_threshold_50": 0.6789667766823134,
212
+ "scr_dir1_threshold_100": -0.059523737721180185,
213
+ "scr_metric_threshold_100": 0.6937269526897528,
214
+ "scr_dir2_threshold_100": 0.6937269526897528,
215
+ "scr_dir1_threshold_500": -0.5952377320012645,
216
+ "scr_metric_threshold_500": 0.5756457645735541,
217
+ "scr_dir2_threshold_500": 0.5756457645735541
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.09356743899412459,
222
+ "scr_metric_threshold_2": 0.06015049050583274,
223
+ "scr_dir2_threshold_2": 0.06015049050583274,
224
+ "scr_dir1_threshold_5": 0.26900586426035866,
225
+ "scr_metric_threshold_5": 0.1766918978026952,
226
+ "scr_dir2_threshold_5": 0.1766918978026952,
227
+ "scr_dir1_threshold_10": 0.15204691408286927,
228
+ "scr_metric_threshold_10": 0.22180454160448515,
229
+ "scr_dir2_threshold_10": 0.22180454160448515,
230
+ "scr_dir1_threshold_20": 0.19298275578409038,
231
+ "scr_metric_threshold_20": 0.3533835715278058,
232
+ "scr_dir2_threshold_20": 0.3533835715278058,
233
+ "scr_dir1_threshold_50": 0.12865505433433813,
234
+ "scr_metric_threshold_50": 0.40225578904439874,
235
+ "scr_dir2_threshold_50": 0.40225578904439874,
236
+ "scr_dir1_threshold_100": 0.1754387738314004,
237
+ "scr_metric_threshold_100": 0.2631580598466413,
238
+ "scr_dir2_threshold_100": 0.2631580598466413,
239
+ "scr_dir1_threshold_500": 0.046783719497062295,
240
+ "scr_metric_threshold_500": 0.2443608635053801,
241
+ "scr_dir2_threshold_500": 0.2443608635053801
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.13274319478686336,
246
+ "scr_metric_threshold_2": 0.5714284679033513,
247
+ "scr_dir2_threshold_2": 0.5714284679033513,
248
+ "scr_dir1_threshold_5": 0.23893806710109472,
249
+ "scr_metric_threshold_5": 0.6200607858682278,
250
+ "scr_dir2_threshold_5": 0.6200607858682278,
251
+ "scr_dir1_threshold_10": 0.3185840894681263,
252
+ "scr_metric_threshold_10": 0.6595744989224059,
253
+ "scr_dir2_threshold_10": 0.6595744989224059,
254
+ "scr_dir1_threshold_20": -0.8761057185627795,
255
+ "scr_metric_threshold_20": 0.7051671611376262,
256
+ "scr_dir2_threshold_20": 0.7051671611376262,
257
+ "scr_dir1_threshold_50": -0.7168136738287163,
258
+ "scr_metric_threshold_50": 0.6565348431727498,
259
+ "scr_dir2_threshold_50": 0.6565348431727498,
260
+ "scr_dir1_threshold_100": -0.6017698881648421,
261
+ "scr_metric_threshold_100": 0.662613973502927,
262
+ "scr_dir2_threshold_100": 0.662613973502927,
263
+ "scr_dir1_threshold_500": -1.283185271222148,
264
+ "scr_metric_threshold_500": 0.46808506398169175,
265
+ "scr_dir2_threshold_500": 0.46808506398169175
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.0769229446642525,
270
+ "scr_metric_threshold_2": 0.16589867320614746,
271
+ "scr_dir2_threshold_2": 0.16589867320614746,
272
+ "scr_dir1_threshold_5": 0.13461536808303157,
273
+ "scr_metric_threshold_5": 0.2672812262405457,
274
+ "scr_dir2_threshold_5": 0.2672812262405457,
275
+ "scr_dir1_threshold_10": 0.20192305212454734,
276
+ "scr_metric_threshold_10": 0.2488480098092212,
277
+ "scr_dir2_threshold_10": 0.2488480098092212,
278
+ "scr_dir1_threshold_20": 0.28365391366095444,
279
+ "scr_metric_threshold_20": 0.3179723654198584,
280
+ "scr_dir2_threshold_20": 0.3179723654198584,
281
+ "scr_dir1_threshold_50": 0.11538456027677187,
282
+ "scr_metric_threshold_50": 0.3640552691602832,
283
+ "scr_dir2_threshold_50": 0.3640552691602832,
284
+ "scr_dir1_threshold_100": -0.052884506546624445,
285
+ "scr_metric_threshold_100": 0.38248848559160764,
286
+ "scr_dir2_threshold_100": 0.38248848559160764,
287
+ "scr_dir1_threshold_500": -0.0769229446642525,
288
+ "scr_metric_threshold_500": 0.18894012507635988,
289
+ "scr_dir2_threshold_500": 0.18894012507635988
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_1",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_2_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732143879638,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.210608663603325,
76
+ "scr_metric_threshold_2": 0.21123766115466516,
77
+ "scr_dir2_threshold_2": 0.21123766115466516,
78
+ "scr_dir1_threshold_5": 0.3054830249748369,
79
+ "scr_metric_threshold_5": 0.30151253991874893,
80
+ "scr_dir2_threshold_5": 0.30151253991874893,
81
+ "scr_dir1_threshold_10": 0.24060670715705054,
82
+ "scr_metric_threshold_10": 0.37910257305367073,
83
+ "scr_dir2_threshold_10": 0.37910257305367073,
84
+ "scr_dir1_threshold_20": 0.19978685269497795,
85
+ "scr_metric_threshold_20": 0.4742922841802943,
86
+ "scr_dir2_threshold_20": 0.4742922841802943,
87
+ "scr_dir1_threshold_50": -0.030872031139375148,
88
+ "scr_metric_threshold_50": 0.47555207457400644,
89
+ "scr_dir2_threshold_50": 0.47555207457400644,
90
+ "scr_dir1_threshold_100": -0.25477187759205167,
91
+ "scr_metric_threshold_100": 0.4029347837484713,
92
+ "scr_dir2_threshold_100": 0.4029347837484713,
93
+ "scr_dir1_threshold_500": -1.4758027643497236,
94
+ "scr_metric_threshold_500": 0.32955370038930976,
95
+ "scr_dir2_threshold_500": 0.32955370038930976
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.27941247367097016,
102
+ "scr_metric_threshold_2": 0.04810139609153522,
103
+ "scr_dir2_threshold_2": 0.04810139609153522,
104
+ "scr_dir1_threshold_5": 0.3676474842025821,
105
+ "scr_metric_threshold_5": 0.09620264128525105,
106
+ "scr_dir2_threshold_5": 0.09620264128525105,
107
+ "scr_dir1_threshold_10": 0.4117654277377151,
108
+ "scr_metric_threshold_10": 0.13164567200155847,
109
+ "scr_dir2_threshold_10": 0.13164567200155847,
110
+ "scr_dir1_threshold_20": 0.3235295406674491,
111
+ "scr_metric_threshold_20": 0.2101265715819267,
112
+ "scr_dir2_threshold_20": 0.2101265715819267,
113
+ "scr_dir1_threshold_50": -0.11764638852926455,
114
+ "scr_metric_threshold_50": 0.2860760093441966,
115
+ "scr_dir2_threshold_50": 0.2860760093441966,
116
+ "scr_dir1_threshold_100": -0.3235286641287951,
117
+ "scr_metric_threshold_100": 0.29113923477603215,
118
+ "scr_dir2_threshold_100": 0.29113923477603215,
119
+ "scr_dir1_threshold_500": -2.308822975129969,
120
+ "scr_metric_threshold_500": 0.21265833519566393,
121
+ "scr_dir2_threshold_500": 0.21265833519566393
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.07207221720147068,
126
+ "scr_metric_threshold_2": 0.15882357272309403,
127
+ "scr_dir2_threshold_2": 0.15882357272309403,
128
+ "scr_dir1_threshold_5": 0.5135133393582352,
129
+ "scr_metric_threshold_5": 0.2264706305154013,
130
+ "scr_dir2_threshold_5": 0.2264706305154013,
131
+ "scr_dir1_threshold_10": 0.45045055204102946,
132
+ "scr_metric_threshold_10": 0.35000001753077614,
133
+ "scr_dir2_threshold_10": 0.35000001753077614,
134
+ "scr_dir1_threshold_20": 0.4684683378520097,
135
+ "scr_metric_threshold_20": 0.4794116811768901,
136
+ "scr_dir2_threshold_20": 0.4794116811768901,
137
+ "scr_dir1_threshold_50": 0.4594594449465196,
138
+ "scr_metric_threshold_50": 0.5441176006538279,
139
+ "scr_dir2_threshold_50": 0.5441176006538279,
140
+ "scr_dir1_threshold_100": 0.4324322292512744,
141
+ "scr_metric_threshold_100": 0.25882350259998943,
142
+ "scr_dir2_threshold_100": 0.25882350259998943,
143
+ "scr_dir1_threshold_500": -0.3063061176380881,
144
+ "scr_metric_threshold_500": -0.2470587740307497,
145
+ "scr_dir2_threshold_500": -0.2470587740307497
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.4444440765146749,
150
+ "scr_metric_threshold_2": 0.09068627737431016,
151
+ "scr_dir2_threshold_2": 0.09068627737431016,
152
+ "scr_dir1_threshold_5": 0.4629623497466804,
153
+ "scr_metric_threshold_5": 0.19117635027897312,
154
+ "scr_dir2_threshold_5": 0.19117635027897312,
155
+ "scr_dir1_threshold_10": 0.3518516065653388,
156
+ "scr_metric_threshold_10": 0.34558817513948653,
157
+ "scr_dir2_threshold_10": 0.34558817513948653,
158
+ "scr_dir1_threshold_20": -0.018518273232005476,
159
+ "scr_metric_threshold_20": 0.43872540139638494,
160
+ "scr_dir2_threshold_20": 0.43872540139638494,
161
+ "scr_dir1_threshold_50": -0.07407419671733059,
162
+ "scr_metric_threshold_50": 0.25,
163
+ "scr_dir2_threshold_50": 0.25,
164
+ "scr_dir1_threshold_100": -1.8148139563120191,
165
+ "scr_metric_threshold_100": 0.07843124078173246,
166
+ "scr_dir2_threshold_100": 0.07843124078173246,
167
+ "scr_dir1_threshold_500": -6.629627912624039,
168
+ "scr_metric_threshold_500": 0.046568613128449184,
169
+ "scr_dir2_threshold_500": 0.046568613128449184
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.4531251455191284,
174
+ "scr_metric_threshold_2": 0.19402982153481765,
175
+ "scr_dir2_threshold_2": 0.19402982153481765,
176
+ "scr_dir1_threshold_5": 0.42968748544808716,
177
+ "scr_metric_threshold_5": 0.3432835714672064,
178
+ "scr_dir2_threshold_5": 0.3432835714672064,
179
+ "scr_dir1_threshold_10": 0.07031251455191284,
180
+ "scr_metric_threshold_10": 0.4388059643069635,
181
+ "scr_dir2_threshold_10": 0.4388059643069635,
182
+ "scr_dir1_threshold_20": -0.03906245634426147,
183
+ "scr_metric_threshold_20": 0.5104477144557082,
184
+ "scr_dir2_threshold_20": 0.5104477144557082,
185
+ "scr_dir1_threshold_50": -0.046874854480871565,
186
+ "scr_metric_threshold_50": 0.620895464496275,
187
+ "scr_dir2_threshold_50": 0.620895464496275,
188
+ "scr_dir1_threshold_100": 0.03125005820765137,
189
+ "scr_metric_threshold_100": 0.46567164285327933,
190
+ "scr_dir2_threshold_100": 0.46567164285327933,
191
+ "scr_dir1_threshold_500": 0.015625261934431176,
192
+ "scr_metric_threshold_500": 0.5611940356930365,
193
+ "scr_dir2_threshold_500": 0.5611940356930365
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.059523737721180185,
198
+ "scr_metric_threshold_2": 0.468634763448764,
199
+ "scr_dir2_threshold_2": 0.468634763448764,
200
+ "scr_dir1_threshold_5": 0.08928560658177027,
201
+ "scr_metric_threshold_5": 0.5645755775821455,
202
+ "scr_dir2_threshold_5": 0.5645755775821455,
203
+ "scr_dir1_threshold_10": 0.08928560658177027,
204
+ "scr_metric_threshold_10": 0.6273062706279338,
205
+ "scr_dir2_threshold_10": 0.6273062706279338,
206
+ "scr_dir1_threshold_20": 0.20238113399936777,
207
+ "scr_metric_threshold_20": 0.690036963673722,
208
+ "scr_dir2_threshold_20": 0.690036963673722,
209
+ "scr_dir1_threshold_50": 0.20238113399936777,
210
+ "scr_metric_threshold_50": 0.7121771177132229,
211
+ "scr_dir2_threshold_50": 0.7121771177132229,
212
+ "scr_dir1_threshold_100": 0.17261891034931506,
213
+ "scr_metric_threshold_100": 0.7306272827366931,
214
+ "scr_dir2_threshold_100": 0.7306272827366931,
215
+ "scr_dir1_threshold_500": -0.6607141273261328,
216
+ "scr_metric_threshold_500": 0.6420664466353732,
217
+ "scr_dir2_threshold_500": 0.6420664466353732
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.13450293213017933,
222
+ "scr_metric_threshold_2": 0.048872217516592945,
223
+ "scr_dir2_threshold_2": 0.048872217516592945,
224
+ "scr_dir1_threshold_5": 0.25146223087283504,
225
+ "scr_metric_threshold_5": 0.14285730291256044,
226
+ "scr_dir2_threshold_5": 0.14285730291256044,
227
+ "scr_dir1_threshold_10": 0.22222249332846272,
228
+ "scr_metric_threshold_10": 0.21428584233004835,
229
+ "scr_dir2_threshold_10": 0.21428584233004835,
230
+ "scr_dir1_threshold_20": 0.14035115849118687,
231
+ "scr_metric_threshold_20": 0.3796992430659191,
232
+ "scr_dir2_threshold_20": 0.3796992430659191,
233
+ "scr_dir1_threshold_50": 0.10526319458580699,
234
+ "scr_metric_threshold_50": 0.43233081021973047,
235
+ "scr_dir2_threshold_50": 0.43233081021973047,
236
+ "scr_dir1_threshold_100": 0.15204691408286927,
237
+ "scr_metric_threshold_100": 0.37593989342870077,
238
+ "scr_dir2_threshold_100": 0.37593989342870077,
239
+ "scr_dir1_threshold_500": -0.5614032397040821,
240
+ "scr_metric_threshold_500": 0.48120302773632345,
241
+ "scr_dir2_threshold_500": 0.48120302773632345
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.15044260390985248,
246
+ "scr_metric_threshold_2": 0.5471124900900483,
247
+ "scr_dir2_threshold_2": 0.5471124900900483,
248
+ "scr_dir1_threshold_5": 0.1946903355054736,
249
+ "scr_metric_threshold_5": 0.6079027063770086,
250
+ "scr_dir2_threshold_5": 0.6079027063770086,
251
+ "scr_dir1_threshold_10": 0.30973464864391564,
252
+ "scr_metric_threshold_10": 0.67173239724449,
253
+ "scr_dir2_threshold_10": 0.67173239724449,
254
+ "scr_dir1_threshold_20": 0.47787613420218944,
255
+ "scr_metric_threshold_20": 0.7264436643704083,
256
+ "scr_dir2_threshold_20": 0.7264436643704083,
257
+ "scr_dir1_threshold_50": -0.7610614054243374,
258
+ "scr_metric_threshold_50": 0.6869301324853652,
259
+ "scr_dir2_threshold_50": 0.6869301324853652,
260
+ "scr_dir1_threshold_100": -0.7699113737231159,
261
+ "scr_metric_threshold_100": 0.7325227947005853,
262
+ "scr_dir2_threshold_100": 0.7325227947005853,
263
+ "scr_dir1_threshold_500": -1.4513267567804218,
264
+ "scr_metric_threshold_500": 0.5987841014663106,
265
+ "scr_dir2_threshold_500": 0.5987841014663106
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.09134612215914382,
270
+ "scr_metric_threshold_2": 0.13364075045815918,
271
+ "scr_dir2_threshold_2": 0.13364075045815918,
272
+ "scr_dir1_threshold_5": 0.13461536808303157,
273
+ "scr_metric_threshold_5": 0.23963153893144531,
274
+ "scr_dir2_threshold_5": 0.23963153893144531,
275
+ "scr_dir1_threshold_10": 0.01923080780625969,
276
+ "scr_metric_threshold_10": 0.2534562452481091,
277
+ "scr_dir2_threshold_10": 0.2534562452481091,
278
+ "scr_dir1_threshold_20": 0.043269245923887735,
279
+ "scr_metric_threshold_20": 0.35944703372139525,
280
+ "scr_dir2_threshold_20": 0.35944703372139525,
281
+ "scr_dir1_threshold_50": -0.014423177494891337,
282
+ "scr_metric_threshold_50": 0.2718894616794336,
283
+ "scr_dir2_threshold_50": 0.2718894616794336,
284
+ "scr_dir1_threshold_100": 0.08173086153640712,
285
+ "scr_metric_threshold_100": 0.29032267811075807,
286
+ "scr_dir2_threshold_100": 0.29032267811075807,
287
+ "scr_dir1_threshold_500": 0.09615375247051218,
288
+ "scr_metric_threshold_500": 0.3410138172900708,
289
+ "scr_dir2_threshold_500": 0.3410138172900708
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_2",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_3_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732144856038,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.18087536879007637,
76
+ "scr_metric_threshold_2": 0.22773996380612815,
77
+ "scr_dir2_threshold_2": 0.22773996380612815,
78
+ "scr_dir1_threshold_5": 0.31845544638135076,
79
+ "scr_metric_threshold_5": 0.2935527575992469,
80
+ "scr_dir2_threshold_5": 0.2935527575992469,
81
+ "scr_dir1_threshold_10": 0.3333252846239444,
82
+ "scr_metric_threshold_10": 0.390917932553593,
83
+ "scr_dir2_threshold_10": 0.390917932553593,
84
+ "scr_dir1_threshold_20": 0.34235983512501744,
85
+ "scr_metric_threshold_20": 0.460138080221529,
86
+ "scr_dir2_threshold_20": 0.460138080221529,
87
+ "scr_dir1_threshold_50": 0.1403827660320841,
88
+ "scr_metric_threshold_50": 0.5362589453856931,
89
+ "scr_dir2_threshold_50": 0.5362589453856931,
90
+ "scr_dir1_threshold_100": 0.05579661873807497,
91
+ "scr_metric_threshold_100": 0.3595386762630908,
92
+ "scr_dir2_threshold_100": 0.3595386762630908,
93
+ "scr_dir1_threshold_500": -0.7726715266241494,
94
+ "scr_metric_threshold_500": 0.32925153180743905,
95
+ "scr_dir2_threshold_500": 0.32925153180743905
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2500002191346635,
102
+ "scr_metric_threshold_2": 0.07088606143261485,
103
+ "scr_dir2_threshold_2": 0.07088606143261485,
104
+ "scr_dir1_threshold_5": 0.3676474842025821,
105
+ "scr_metric_threshold_5": 0.14177227376304907,
106
+ "scr_dir2_threshold_5": 0.14177227376304907,
107
+ "scr_dir1_threshold_10": 0.38235317320140844,
108
+ "scr_metric_threshold_10": 0.13417728471747628,
109
+ "scr_dir2_threshold_10": 0.13417728471747628,
110
+ "scr_dir1_threshold_20": 0.39705886220023473,
111
+ "scr_metric_threshold_20": 0.2101265715819267,
112
+ "scr_dir2_threshold_20": 0.2101265715819267,
113
+ "scr_dir1_threshold_50": -0.08823501053161192,
114
+ "scr_metric_threshold_50": 0.28354439662827874,
115
+ "scr_dir2_threshold_50": 0.28354439662827874,
116
+ "scr_dir1_threshold_100": -0.13235207752809086,
117
+ "scr_metric_threshold_100": 0.3417722435834852,
118
+ "scr_dir2_threshold_100": 0.3417722435834852,
119
+ "scr_dir1_threshold_500": -1.955881179926213,
120
+ "scr_metric_threshold_500": 0.41265830501610007,
121
+ "scr_dir2_threshold_500": 0.41265830501610007
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.045045001506225466,
126
+ "scr_metric_threshold_2": 0.2470587740307497,
127
+ "scr_dir2_threshold_2": 0.2470587740307497,
128
+ "scr_dir1_threshold_5": 0.47747723075749987,
129
+ "scr_metric_threshold_5": 0.33529415064616686,
130
+ "scr_dir2_threshold_5": 0.33529415064616686,
131
+ "scr_dir1_threshold_10": 0.5585583408644607,
132
+ "scr_metric_threshold_10": 0.552941190907698,
133
+ "scr_dir2_threshold_10": 0.552941190907698,
134
+ "scr_dir1_threshold_20": 0.5585583408644607,
135
+ "scr_metric_threshold_20": 0.632352977269245,
136
+ "scr_dir2_threshold_20": 0.632352977269245,
137
+ "scr_dir1_threshold_50": 0.4594594449465196,
138
+ "scr_metric_threshold_50": 0.7205881785769007,
139
+ "scr_dir2_threshold_50": 0.7205881785769007,
140
+ "scr_dir1_threshold_100": 0.4864866606417648,
141
+ "scr_metric_threshold_100": 0.11470579676150466,
142
+ "scr_dir2_threshold_100": 0.11470579676150466,
143
+ "scr_dir1_threshold_500": -0.1621622202139216,
144
+ "scr_metric_threshold_500": -0.15588243440772448,
145
+ "scr_dir2_threshold_500": -0.15588243440772448
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.5,
150
+ "scr_metric_threshold_2": 0.11764700726241704,
151
+ "scr_dir2_threshold_2": 0.11764700726241704,
152
+ "scr_dir1_threshold_5": 0.5185182732320055,
153
+ "scr_metric_threshold_5": 0.18137255474862032,
154
+ "scr_dir2_threshold_5": 0.18137255474862032,
155
+ "scr_dir1_threshold_10": 0.5,
156
+ "scr_metric_threshold_10": 0.2916665692734544,
157
+ "scr_dir2_threshold_10": 0.2916665692734544,
158
+ "scr_dir1_threshold_20": 0.40740753005066394,
159
+ "scr_metric_threshold_20": 0.37009795614500524,
160
+ "scr_dir2_threshold_20": 0.37009795614500524,
161
+ "scr_dir1_threshold_50": 0.24074086338399725,
162
+ "scr_metric_threshold_50": 0.4460783941339679,
163
+ "scr_dir2_threshold_50": 0.4460783941339679,
164
+ "scr_dir1_threshold_100": -0.1296301202026557,
165
+ "scr_metric_threshold_100": 0.019607737150523937,
166
+ "scr_dir2_threshold_100": 0.019607737150523937,
167
+ "scr_dir1_threshold_500": -2.6481472896453524,
168
+ "scr_metric_threshold_500": -0.07352948910637443,
169
+ "scr_dir2_threshold_500": -0.07352948910637443
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.15624982537704588,
174
+ "scr_metric_threshold_2": 0.2567164641176521,
175
+ "scr_dir2_threshold_2": 0.2567164641176521,
176
+ "scr_dir1_threshold_5": 0.3671873690327844,
177
+ "scr_metric_threshold_5": 0.33432828597700365,
178
+ "scr_dir2_threshold_5": 0.33432828597700365,
179
+ "scr_dir1_threshold_10": 0.2421876018633899,
180
+ "scr_metric_threshold_10": 0.4238806071061539,
181
+ "scr_dir2_threshold_10": 0.4238806071061539,
182
+ "scr_dir1_threshold_20": 0.3671873690327844,
183
+ "scr_metric_threshold_20": 0.49850739311020204,
184
+ "scr_dir2_threshold_20": 0.49850739311020204,
185
+ "scr_dir1_threshold_50": 0.0,
186
+ "scr_metric_threshold_50": 0.5940297859499591,
187
+ "scr_dir2_threshold_50": 0.5940297859499591,
188
+ "scr_dir1_threshold_100": 0.21093754365573852,
189
+ "scr_metric_threshold_100": 0.5074626786004048,
190
+ "scr_dir2_threshold_100": 0.5074626786004048,
191
+ "scr_dir1_threshold_500": 0.4609375436557385,
192
+ "scr_metric_threshold_500": 0.480597000054089,
193
+ "scr_dir2_threshold_500": 0.480597000054089
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.059523737721180185,
198
+ "scr_metric_threshold_2": 0.4169742573943843,
199
+ "scr_dir2_threshold_2": 0.4169742573943843,
200
+ "scr_dir1_threshold_5": 0.07142869813909379,
201
+ "scr_metric_threshold_5": 0.5387454345266138,
202
+ "scr_dir2_threshold_5": 0.5387454345266138,
203
+ "scr_dir1_threshold_10": 0.053571434906954686,
204
+ "scr_metric_threshold_10": 0.6088561056044636,
205
+ "scr_dir2_threshold_10": 0.6088561056044636,
206
+ "scr_dir1_threshold_20": -0.029761868860590093,
207
+ "scr_metric_threshold_20": 0.6789667766823134,
208
+ "scr_dir2_threshold_20": 0.6789667766823134,
209
+ "scr_dir1_threshold_50": 0.18452387076722868,
210
+ "scr_metric_threshold_50": 0.7084871286971922,
211
+ "scr_dir2_threshold_50": 0.7084871286971922,
212
+ "scr_dir1_threshold_100": 0.047619132092729194,
213
+ "scr_metric_threshold_100": 0.690036963673722,
214
+ "scr_dir2_threshold_100": 0.690036963673722,
215
+ "scr_dir1_threshold_500": -0.33928551788440464,
216
+ "scr_metric_threshold_500": 0.7970479647985121,
217
+ "scr_dir2_threshold_500": 0.7970479647985121
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.1695908960355592,
222
+ "scr_metric_threshold_2": 0.07894746276950931,
223
+ "scr_dir2_threshold_2": 0.07894746276950931,
224
+ "scr_dir1_threshold_5": 0.2923977240088898,
225
+ "scr_metric_threshold_5": 0.13909772919775742,
226
+ "scr_dir2_threshold_5": 0.13909772919775742,
227
+ "scr_dir1_threshold_10": 0.3391814435059521,
228
+ "scr_metric_threshold_10": 0.2556391364946199,
229
+ "scr_dir2_threshold_10": 0.2556391364946199,
230
+ "scr_dir1_threshold_20": 0.3274856879142697,
231
+ "scr_metric_threshold_20": 0.3345865992641292,
232
+ "scr_dir2_threshold_20": 0.3345865992641292,
233
+ "scr_dir1_threshold_50": 0.32163746155326217,
234
+ "scr_metric_threshold_50": 0.4436090832089703,
235
+ "scr_dir2_threshold_50": 0.4436090832089703,
236
+ "scr_dir1_threshold_100": 0.3333335657101109,
237
+ "scr_metric_threshold_100": 0.24812043722018312,
238
+ "scr_dir2_threshold_100": 0.24812043722018312,
239
+ "scr_dir1_threshold_500": -0.08771921263311704,
240
+ "scr_metric_threshold_500": 0.5676691897802695,
241
+ "scr_dir2_threshold_500": 0.5676691897802695
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.14159316308564182,
246
+ "scr_metric_threshold_2": 0.4954406975446509,
247
+ "scr_dir2_threshold_2": 0.4954406975446509,
248
+ "scr_dir1_threshold_5": 0.2654869170482945,
249
+ "scr_metric_threshold_5": 0.5349544105988292,
250
+ "scr_dir2_threshold_5": 0.5349544105988292,
251
+ "scr_dir1_threshold_10": 0.45132728425498964,
252
+ "scr_metric_threshold_10": 0.5744681236530075,
253
+ "scr_dir2_threshold_10": 0.5744681236530075,
254
+ "scr_dir1_threshold_20": 0.45132728425498964,
255
+ "scr_metric_threshold_20": 0.6200607858682278,
256
+ "scr_dir2_threshold_20": 0.6200607858682278,
257
+ "scr_dir1_threshold_50": -0.1681414855582738,
258
+ "scr_metric_threshold_50": 0.7112461102986682,
259
+ "scr_dir2_threshold_50": 0.7112461102986682,
260
+ "scr_dir1_threshold_100": -0.5575216290946532,
261
+ "scr_metric_threshold_100": 0.6504558940117079,
262
+ "scr_dir2_threshold_100": 0.6504558940117079,
263
+ "scr_dir1_threshold_500": -1.6991142647057271,
264
+ "scr_metric_threshold_500": 0.40729484769473145,
265
+ "scr_dir2_threshold_500": 0.40729484769473145
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.12500010746029486,
270
+ "scr_metric_threshold_2": 0.13824898589704712,
271
+ "scr_dir2_threshold_2": 0.13824898589704712,
272
+ "scr_dir1_threshold_5": 0.187499874629656,
273
+ "scr_metric_threshold_5": 0.14285722133593506,
274
+ "scr_dir2_threshold_5": 0.14285722133593506,
275
+ "scr_dir1_threshold_10": 0.13942299839439992,
276
+ "scr_metric_threshold_10": 0.28571444267187013,
277
+ "scr_dir2_threshold_10": 0.28571444267187013,
278
+ "scr_dir1_threshold_20": 0.2596154755433264,
279
+ "scr_metric_threshold_20": 0.33640558185118286,
280
+ "scr_dir2_threshold_20": 0.33640558185118286,
281
+ "scr_dir1_threshold_50": 0.17307698369555094,
282
+ "scr_metric_threshold_50": 0.38248848559160764,
283
+ "scr_dir2_threshold_50": 0.38248848559160764,
284
+ "scr_dir1_threshold_100": 0.187499874629656,
285
+ "scr_metric_threshold_100": 0.3041476591031946,
286
+ "scr_dir2_threshold_100": 0.3041476591031946,
287
+ "scr_dir1_threshold_500": 0.24999992835980342,
288
+ "scr_metric_threshold_500": 0.19815687062990844,
289
+ "scr_dir2_threshold_500": 0.19815687062990844
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_3",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_4_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732145853455,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.1487348436061094,
76
+ "scr_metric_threshold_2": 0.1904979758150757,
77
+ "scr_dir2_threshold_2": 0.1904979758150757,
78
+ "scr_dir1_threshold_5": 0.2503695547847299,
79
+ "scr_metric_threshold_5": 0.28531656655567184,
80
+ "scr_dir2_threshold_5": 0.28531656655567184,
81
+ "scr_dir1_threshold_10": 0.2776098957093683,
82
+ "scr_metric_threshold_10": 0.3756547962696375,
83
+ "scr_dir2_threshold_10": 0.3756547962696375,
84
+ "scr_dir1_threshold_20": 0.03629303994968739,
85
+ "scr_metric_threshold_20": 0.45175090834087617,
86
+ "scr_dir2_threshold_20": 0.45175090834087617,
87
+ "scr_dir1_threshold_50": -0.037417212791884265,
88
+ "scr_metric_threshold_50": 0.539490678094635,
89
+ "scr_dir2_threshold_50": 0.539490678094635,
90
+ "scr_dir1_threshold_100": -0.10469282763530133,
91
+ "scr_metric_threshold_100": 0.4377089616333399,
92
+ "scr_dir2_threshold_100": 0.4377089616333399,
93
+ "scr_dir1_threshold_500": -0.8526927639338856,
94
+ "scr_metric_threshold_500": 0.3341871581729607,
95
+ "scr_dir2_threshold_500": 0.3341871581729607
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2647059081334898,
102
+ "scr_metric_threshold_2": 0.06835444871669703,
103
+ "scr_dir2_threshold_2": 0.06835444871669703,
104
+ "scr_dir1_threshold_5": 0.1764708976018779,
105
+ "scr_metric_threshold_5": 0.13164567200155847,
106
+ "scr_dir2_threshold_5": 0.13164567200155847,
107
+ "scr_dir1_threshold_10": 0.2647059081334898,
108
+ "scr_metric_threshold_10": 0.23037977510490792,
109
+ "scr_dir2_threshold_10": 0.23037977510490792,
110
+ "scr_dir1_threshold_20": -0.3970579856615807,
111
+ "scr_metric_threshold_20": 0.29367099838976934,
112
+ "scr_dir2_threshold_20": 0.29367099838976934,
113
+ "scr_dir1_threshold_50": -0.823529102398122,
114
+ "scr_metric_threshold_50": 0.38227849973162825,
115
+ "scr_dir2_threshold_50": 0.38227849973162825,
116
+ "scr_dir1_threshold_100": -1.5147052507294994,
117
+ "scr_metric_threshold_100": 0.40759492868644503,
118
+ "scr_dir2_threshold_100": 0.40759492868644503,
119
+ "scr_dir1_threshold_500": -1.7941168478618155,
120
+ "scr_metric_threshold_500": 0.5493670515516748,
121
+ "scr_dir2_threshold_500": 0.5493670515516748
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.027027215695245212,
126
+ "scr_metric_threshold_2": 0.12941166364611392,
127
+ "scr_dir2_threshold_2": 0.12941166364611392,
128
+ "scr_dir1_threshold_5": 0.45045055204102946,
129
+ "scr_metric_threshold_5": 0.26176464091535895,
130
+ "scr_dir2_threshold_5": 0.26176464091535895,
131
+ "scr_dir1_threshold_10": 0.612612772254951,
132
+ "scr_metric_threshold_10": 0.37647061298462514,
133
+ "scr_dir2_threshold_10": 0.37647061298462514,
134
+ "scr_dir1_threshold_20": 0.5855855565597059,
135
+ "scr_metric_threshold_20": 0.4941175480614994,
136
+ "scr_dir2_threshold_20": 0.4941175480614994,
137
+ "scr_dir1_threshold_50": 0.45045055204102946,
138
+ "scr_metric_threshold_50": 0.6676469876692027,
139
+ "scr_dir2_threshold_50": 0.6676469876692027,
140
+ "scr_dir1_threshold_100": 0.19819832881465693,
141
+ "scr_metric_threshold_100": 0.31470583182305695,
142
+ "scr_dir2_threshold_100": 0.31470583182305695,
143
+ "scr_dir1_threshold_500": 0.12612611161318626,
144
+ "scr_metric_threshold_500": 0.1794117162384424,
145
+ "scr_dir2_threshold_500": 0.1794117162384424
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.2777774098480082,
150
+ "scr_metric_threshold_2": 0.09068627737431016,
151
+ "scr_dir2_threshold_2": 0.09068627737431016,
152
+ "scr_dir1_threshold_5": 0.4259258032826694,
153
+ "scr_metric_threshold_5": 0.19117635027897312,
154
+ "scr_dir2_threshold_5": 0.19117635027897312,
155
+ "scr_dir1_threshold_10": 0.31481395631201914,
156
+ "scr_metric_threshold_10": 0.36764700726241706,
157
+ "scr_dir2_threshold_10": 0.36764700726241706,
158
+ "scr_dir1_threshold_20": -0.018518273232005476,
159
+ "scr_metric_threshold_20": 0.4264705108936256,
160
+ "scr_dir2_threshold_20": 0.4264705108936256,
161
+ "scr_dir1_threshold_50": -0.29629678686932237,
162
+ "scr_metric_threshold_50": 0.4754902189944813,
163
+ "scr_dir2_threshold_50": 0.4754902189944813,
164
+ "scr_dir1_threshold_100": -0.31481506010132787,
165
+ "scr_metric_threshold_100": 0.16176467150827803,
166
+ "scr_dir2_threshold_100": 0.16176467150827803,
167
+ "scr_dir1_threshold_500": -5.296294579290705,
168
+ "scr_metric_threshold_500": -0.12254905111741181,
169
+ "scr_dir2_threshold_500": -0.12254905111741181
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.11718736903278441,
174
+ "scr_metric_threshold_2": 0.09850742869506053,
175
+ "scr_dir2_threshold_2": 0.09850742869506053,
176
+ "scr_dir1_threshold_5": 0.19531274738251833,
177
+ "scr_metric_threshold_5": 0.28955221437457473,
178
+ "scr_dir2_threshold_5": 0.28955221437457473,
179
+ "scr_dir1_threshold_10": 0.16406268917486697,
180
+ "scr_metric_threshold_10": 0.3880596430696353,
181
+ "scr_dir2_threshold_10": 0.3880596430696353,
182
+ "scr_dir1_threshold_20": 0.15624982537704588,
183
+ "scr_metric_threshold_20": 0.4477612497971663,
184
+ "scr_dir2_threshold_20": 0.4477612497971663,
185
+ "scr_dir1_threshold_50": 0.5546877182786927,
186
+ "scr_metric_threshold_50": 0.641791071331984,
187
+ "scr_dir2_threshold_50": 0.641791071331984,
188
+ "scr_dir1_threshold_100": 0.5546877182786927,
189
+ "scr_metric_threshold_100": 0.17910446433400803,
190
+ "scr_dir2_threshold_100": 0.17910446433400803,
191
+ "scr_dir1_threshold_500": 0.007812398136610098,
192
+ "scr_metric_threshold_500": 0.14626871407708533,
193
+ "scr_dir2_threshold_500": 0.14626871407708533
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.059523737721180185,
198
+ "scr_metric_threshold_2": 0.47601474148082557,
199
+ "scr_dir2_threshold_2": 0.47601474148082557,
200
+ "scr_dir1_threshold_5": 0.0178572632321391,
201
+ "scr_metric_threshold_5": 0.5239852585191744,
202
+ "scr_dir2_threshold_5": 0.5239852585191744,
203
+ "scr_dir1_threshold_10": 0.011904960417913604,
204
+ "scr_metric_threshold_10": 0.5535056105340531,
205
+ "scr_dir2_threshold_10": 0.5535056105340531,
206
+ "scr_dir1_threshold_20": 0.023809566046364597,
207
+ "scr_metric_threshold_20": 0.686346754714375,
208
+ "scr_dir2_threshold_20": 0.686346754714375,
209
+ "scr_dir1_threshold_50": 0.25595256890632245,
210
+ "scr_metric_threshold_50": 0.7712176017996641,
211
+ "scr_dir2_threshold_50": 0.7712176017996641,
212
+ "scr_dir1_threshold_100": -0.01785690844267649,
213
+ "scr_metric_threshold_100": 0.7970479647985121,
214
+ "scr_dir2_threshold_100": 0.7970479647985121,
215
+ "scr_dir1_threshold_500": -0.01785690844267649,
216
+ "scr_metric_threshold_500": -0.154981518163139,
217
+ "scr_dir2_threshold_500": -0.154981518163139
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.15204691408286927,
222
+ "scr_metric_threshold_2": 0.12781968028610227,
223
+ "scr_dir2_threshold_2": 0.12781968028610227,
224
+ "scr_dir1_threshold_5": 0.23391824892014512,
225
+ "scr_metric_threshold_5": 0.1766918978026952,
226
+ "scr_dir2_threshold_5": 0.1766918978026952,
227
+ "scr_dir1_threshold_10": 0.26900586426035866,
228
+ "scr_metric_threshold_10": 0.21428584233004835,
229
+ "scr_dir2_threshold_10": 0.21428584233004835,
230
+ "scr_dir1_threshold_20": 0.4444446380917591,
231
+ "scr_metric_threshold_20": 0.2556391364946199,
232
+ "scr_dir2_threshold_20": 0.2556391364946199,
233
+ "scr_dir1_threshold_50": 0.28654984621304863,
234
+ "scr_metric_threshold_50": 0.34210529853856597,
235
+ "scr_dir2_threshold_50": 0.34210529853856597,
236
+ "scr_dir1_threshold_100": 0.31578958375742094,
237
+ "scr_metric_threshold_100": 0.5075189233520214,
238
+ "scr_dir2_threshold_100": 0.5075189233520214,
239
+ "scr_dir1_threshold_500": 0.22222249332846272,
240
+ "scr_metric_threshold_500": 0.6804512474399136,
241
+ "scr_dir2_threshold_500": 0.6804512474399136
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.18584089468126294,
246
+ "scr_metric_threshold_2": 0.4133737968557734,
247
+ "scr_dir2_threshold_2": 0.4133737968557734,
248
+ "scr_dir1_threshold_5": 0.35398238023953676,
249
+ "scr_metric_threshold_5": 0.6109421809575296,
250
+ "scr_dir2_threshold_5": 0.6109421809575296,
251
+ "scr_dir1_threshold_10": 0.30973464864391564,
252
+ "scr_metric_threshold_10": 0.6352583399399678,
253
+ "scr_dir2_threshold_10": 0.6352583399399678,
254
+ "scr_dir1_threshold_20": -0.8407074277913691,
255
+ "scr_metric_threshold_20": 0.6413372891010097,
256
+ "scr_dir2_threshold_20": 0.6413372891010097,
257
+ "scr_dir1_threshold_50": -1.0973449040154528,
258
+ "scr_metric_threshold_50": 0.7082066357181472,
259
+ "scr_dir2_threshold_50": 0.7082066357181472,
260
+ "scr_dir1_threshold_100": -0.4867255750264001,
261
+ "scr_metric_threshold_100": 0.829787249461203,
262
+ "scr_dir2_threshold_100": 0.829787249461203,
263
+ "scr_dir1_threshold_500": -0.5309733066220212,
264
+ "scr_metric_threshold_500": 0.7872340618265038,
265
+ "scr_dir2_threshold_500": 0.7872340618265038
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.10576929965403516,
270
+ "scr_metric_threshold_2": 0.11981576946572266,
271
+ "scr_dir2_threshold_2": 0.11981576946572266,
272
+ "scr_dir1_threshold_5": 0.1490385455779229,
273
+ "scr_metric_threshold_5": 0.09677431759551026,
274
+ "scr_dir2_threshold_5": 0.09677431759551026,
275
+ "scr_dir1_threshold_10": 0.27403836647743146,
276
+ "scr_metric_threshold_10": 0.23963153893144531,
277
+ "scr_dir2_threshold_10": 0.23963153893144531,
278
+ "scr_dir1_threshold_20": 0.3365384202075789,
279
+ "scr_metric_threshold_20": 0.36866377927494387,
280
+ "scr_dir2_threshold_20": 0.36866377927494387,
281
+ "scr_dir1_threshold_50": 0.37019240550872995,
282
+ "scr_metric_threshold_50": 0.32718911097340697,
283
+ "scr_dir2_threshold_50": 0.32718911097340697,
284
+ "scr_dir1_threshold_100": 0.42788454236672274,
285
+ "scr_metric_threshold_100": 0.3041476591031946,
286
+ "scr_dir2_threshold_100": 0.3041476591031946,
287
+ "scr_dir1_threshold_500": 0.46153852766787373,
288
+ "scr_metric_threshold_500": 0.6082950435306165,
289
+ "scr_dir2_threshold_500": 0.6082950435306165
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_4",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.19.hook_resid_post__trainer_5_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "5e7e590f-bd66-41a2-a2fe-413bdea7a8ab",
72
+ "datetime_epoch_millis": 1732146886035,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.13795080876117913,
76
+ "scr_metric_threshold_2": 0.2183604102542713,
77
+ "scr_dir2_threshold_2": 0.2183604102542713,
78
+ "scr_dir1_threshold_5": 0.13378400129280402,
79
+ "scr_metric_threshold_5": 0.3198818187319437,
80
+ "scr_dir2_threshold_5": 0.3198818187319437,
81
+ "scr_dir1_threshold_10": 0.03379327721535122,
82
+ "scr_metric_threshold_10": 0.38861359764867975,
83
+ "scr_dir2_threshold_10": 0.38861359764867975,
84
+ "scr_dir1_threshold_20": 0.010725515832623114,
85
+ "scr_metric_threshold_20": 0.46514682675194846,
86
+ "scr_dir2_threshold_20": 0.46514682675194846,
87
+ "scr_dir1_threshold_50": -0.13467863292681165,
88
+ "scr_metric_threshold_50": 0.4879581060454324,
89
+ "scr_dir2_threshold_50": 0.4879581060454324,
90
+ "scr_dir1_threshold_100": -0.7578726322438009,
91
+ "scr_metric_threshold_100": 0.3568196936040967,
92
+ "scr_dir2_threshold_100": 0.3568196936040967,
93
+ "scr_dir1_threshold_500": -1.2343346445982344,
94
+ "scr_metric_threshold_500": 0.3434469728174057,
95
+ "scr_dir2_threshold_500": 0.3434469728174057
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.2352945301358372,
102
+ "scr_metric_threshold_2": 0.04810139609153522,
103
+ "scr_dir2_threshold_2": 0.04810139609153522,
104
+ "scr_dir1_threshold_5": -0.02941137799765263,
105
+ "scr_metric_threshold_5": 0.14683549919488467,
106
+ "scr_dir2_threshold_5": 0.14683549919488467,
107
+ "scr_dir1_threshold_10": -0.3676466076639281,
108
+ "scr_metric_threshold_10": 0.19240513167268272,
109
+ "scr_dir2_threshold_10": 0.19240513167268272,
110
+ "scr_dir1_threshold_20": -0.8382347913969485,
111
+ "scr_metric_threshold_20": 0.3063292128671778,
112
+ "scr_dir2_threshold_20": 0.3063292128671778,
113
+ "scr_dir1_threshold_50": -1.6029402612611112,
114
+ "scr_metric_threshold_50": 0.3316456418219946,
115
+ "scr_dir2_threshold_50": 0.3316456418219946,
116
+ "scr_dir1_threshold_100": -2.2499993425960096,
117
+ "scr_metric_threshold_100": 0.38481011244754604,
118
+ "scr_dir2_threshold_100": 0.38481011244754604,
119
+ "scr_dir1_threshold_500": -2.6764695827938967,
120
+ "scr_metric_threshold_500": 0.4734177646872243,
121
+ "scr_dir2_threshold_500": 0.4734177646872243
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.0,
126
+ "scr_metric_threshold_2": 0.26176464091535895,
127
+ "scr_dir2_threshold_2": 0.26176464091535895,
128
+ "scr_dir1_threshold_5": 0.25225222322637253,
129
+ "scr_metric_threshold_5": 0.31176469350768743,
130
+ "scr_dir2_threshold_5": 0.31176469350768743,
131
+ "scr_dir1_threshold_10": 0.2162161146256372,
132
+ "scr_metric_threshold_10": 0.37647061298462514,
133
+ "scr_dir2_threshold_10": 0.37647061298462514,
134
+ "scr_dir1_threshold_20": 0.4594594449465196,
135
+ "scr_metric_threshold_20": 0.4588235376615417,
136
+ "scr_dir2_threshold_20": 0.4588235376615417,
137
+ "scr_dir1_threshold_50": 0.612612772254951,
138
+ "scr_metric_threshold_50": 0.5882352013076556,
139
+ "scr_dir2_threshold_50": 0.5882352013076556,
140
+ "scr_dir1_threshold_100": 0.6486488808556864,
141
+ "scr_metric_threshold_100": 0.6382352538999841,
142
+ "scr_dir2_threshold_100": 0.6382352538999841,
143
+ "scr_dir1_threshold_500": -0.39639665762931386,
144
+ "scr_metric_threshold_500": 0.6852939928691815,
145
+ "scr_dir2_threshold_500": 0.6852939928691815
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.20370321313067763,
150
+ "scr_metric_threshold_2": 0.16421562039086623,
151
+ "scr_dir2_threshold_2": 0.16421562039086623,
152
+ "scr_dir1_threshold_5": 0.2222214863626831,
153
+ "scr_metric_threshold_5": 0.24019605837982885,
154
+ "scr_dir2_threshold_5": 0.24019605837982885,
155
+ "scr_dir1_threshold_10": -0.46296345353598906,
156
+ "scr_metric_threshold_10": 0.375,
157
+ "scr_dir2_threshold_10": 0.375,
158
+ "scr_dir1_threshold_20": -0.5,
159
+ "scr_metric_threshold_20": 0.41421562039086623,
160
+ "scr_dir2_threshold_20": 0.41421562039086623,
161
+ "scr_dir1_threshold_50": -0.6666666666666666,
162
+ "scr_metric_threshold_50": 0.47058817513948653,
163
+ "scr_dir2_threshold_50": 0.47058817513948653,
164
+ "scr_dir1_threshold_100": -4.999998896210691,
165
+ "scr_metric_threshold_100": 0.41421562039086623,
166
+ "scr_dir2_threshold_100": 0.41421562039086623,
167
+ "scr_dir1_threshold_500": -7.722220382573374,
168
+ "scr_metric_threshold_500": -0.1397059854751659,
169
+ "scr_dir2_threshold_500": -0.1397059854751659
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.10937497089617432,
174
+ "scr_metric_threshold_2": 0.24179110691684247,
175
+ "scr_dir2_threshold_2": 0.24179110691684247,
176
+ "scr_dir1_threshold_5": -0.06250011641530274,
177
+ "scr_metric_threshold_5": 0.4208955712508505,
178
+ "scr_dir2_threshold_5": 0.4208955712508505,
179
+ "scr_dir1_threshold_10": -0.30468725261748164,
180
+ "scr_metric_threshold_10": 0.4716417145638862,
181
+ "scr_dir2_threshold_10": 0.4716417145638862,
182
+ "scr_dir1_threshold_20": -0.578124912688523,
183
+ "scr_metric_threshold_20": 0.5492537143475302,
184
+ "scr_dir2_threshold_20": 0.5492537143475302,
185
+ "scr_dir1_threshold_50": -0.5312500582076514,
186
+ "scr_metric_threshold_50": 0.34626860732250986,
187
+ "scr_dir2_threshold_50": 0.34626860732250986,
188
+ "scr_dir1_threshold_100": 0.015625261934431176,
189
+ "scr_metric_threshold_100": 0.30149253572008095,
190
+ "scr_dir2_threshold_100": 0.30149253572008095,
191
+ "scr_dir1_threshold_500": -0.8124996507540918,
192
+ "scr_metric_threshold_500": -0.11641782175117359,
193
+ "scr_dir2_threshold_500": -0.11641782175117359
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.0,
198
+ "scr_metric_threshold_2": 0.4206642464104151,
199
+ "scr_dir2_threshold_2": 0.4206642464104151,
200
+ "scr_dir1_threshold_5": -0.11904747544236037,
201
+ "scr_metric_threshold_5": 0.5682657865414925,
202
+ "scr_dir2_threshold_5": 0.5682657865414925,
203
+ "scr_dir1_threshold_10": 0.2857144377669125,
204
+ "scr_metric_threshold_10": 0.5977859186130551,
205
+ "scr_dir2_threshold_10": 0.5977859186130551,
206
+ "scr_dir1_threshold_20": 0.17857156795300316,
207
+ "scr_metric_threshold_20": 0.6752767876662826,
208
+ "scr_dir2_threshold_20": 0.6752767876662826,
209
+ "scr_dir1_threshold_50": -0.4107142160234984,
210
+ "scr_metric_threshold_50": 0.4870849284722341,
211
+ "scr_dir2_threshold_50": 0.4870849284722341,
212
+ "scr_dir1_threshold_100": -0.2916663857916754,
213
+ "scr_metric_threshold_100": -0.4944649065042957,
214
+ "scr_dir2_threshold_100": -0.4944649065042957,
215
+ "scr_dir1_threshold_500": -0.04166647448904109,
216
+ "scr_metric_threshold_500": 0.22140220022495802,
217
+ "scr_dir2_threshold_500": 0.22140220022495802
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.21052638917161398,
222
+ "scr_metric_threshold_2": 0.10902270802242568,
223
+ "scr_dir2_threshold_2": 0.10902270802242568,
224
+ "scr_dir1_threshold_5": 0.23391824892014512,
225
+ "scr_metric_threshold_5": 0.13533837956053904,
226
+ "scr_dir2_threshold_5": 0.13533837956053904,
227
+ "scr_dir1_threshold_10": 0.42690065613906913,
228
+ "scr_metric_threshold_10": 0.1766918978026952,
229
+ "scr_dir2_threshold_10": 0.1766918978026952,
230
+ "scr_dir1_threshold_20": 0.461988620044449,
231
+ "scr_metric_threshold_20": 0.24060151386816173,
232
+ "scr_dir2_threshold_20": 0.24060151386816173,
233
+ "scr_dir1_threshold_50": 0.46783649784029024,
234
+ "scr_metric_threshold_50": 0.3045113540112128,
235
+ "scr_dir2_threshold_50": 0.3045113540112128,
236
+ "scr_dir1_threshold_100": 0.5438599548817249,
237
+ "scr_metric_threshold_100": 0.19924821970359016,
238
+ "scr_dir2_threshold_100": 0.19924821970359016,
239
+ "scr_dir1_threshold_500": 0.5146198687721861,
240
+ "scr_metric_threshold_500": 0.40601513868161715,
241
+ "scr_dir2_threshold_500": 0.40601513868161715
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.23893806710109472,
246
+ "scr_metric_threshold_2": 0.37689955838211614,
247
+ "scr_dir2_threshold_2": 0.37689955838211614,
248
+ "scr_dir1_threshold_5": 0.38053123018673657,
249
+ "scr_metric_threshold_5": 0.5744681236530075,
250
+ "scr_dir2_threshold_5": 0.5744681236530075,
251
+ "scr_dir1_threshold_10": 0.21238921715389492,
252
+ "scr_metric_threshold_10": 0.683890476735709,
253
+ "scr_dir2_threshold_10": 0.683890476735709,
254
+ "scr_dir1_threshold_20": 0.5752210382176424,
255
+ "scr_metric_threshold_20": 0.7264436643704083,
256
+ "scr_dir2_threshold_20": 0.7264436643704083,
257
+ "scr_dir1_threshold_50": 0.6017698881648421,
258
+ "scr_metric_threshold_50": 0.8085105650592858,
259
+ "scr_dir2_threshold_50": 0.8085105650592858,
260
+ "scr_dir1_threshold_100": -0.07079658154282091,
261
+ "scr_metric_threshold_100": 0.7750759823352846,
262
+ "scr_dir2_threshold_100": 0.7750759823352846,
263
+ "scr_dir1_threshold_500": 0.884955686861558,
264
+ "scr_metric_threshold_500": 0.7659573774245866,
265
+ "scr_dir2_threshold_500": 0.7659573774245866
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.10576929965403516,
270
+ "scr_metric_threshold_2": 0.1244240049046106,
271
+ "scr_dir2_threshold_2": 0.1244240049046106,
272
+ "scr_dir1_threshold_5": 0.19230779150181063,
273
+ "scr_metric_threshold_5": 0.16129043776725951,
274
+ "scr_dir2_threshold_5": 0.16129043776725951,
275
+ "scr_dir1_threshold_10": 0.26442310585469475,
276
+ "scr_metric_threshold_10": 0.23502302881678466,
277
+ "scr_dir2_threshold_10": 0.23502302881678466,
278
+ "scr_dir1_threshold_20": 0.3269231595848422,
279
+ "scr_metric_threshold_20": 0.35023056284361936,
280
+ "scr_dir2_threshold_20": 0.35023056284361936,
281
+ "scr_dir1_threshold_50": 0.45192298048435076,
282
+ "scr_metric_threshold_50": 0.5668203752290796,
283
+ "scr_dir2_threshold_50": 0.5668203752290796,
284
+ "scr_dir1_threshold_100": 0.34134605051894723,
285
+ "scr_metric_threshold_100": 0.6359447308397168,
286
+ "scr_dir2_threshold_100": 0.6359447308397168,
287
+ "scr_dir1_threshold_500": 0.3750000358200983,
288
+ "scr_metric_threshold_500": 0.4516131158780176,
289
+ "scr_dir2_threshold_500": 0.4516131158780176
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.19.hook_resid_post__trainer_5",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }
results_scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109/scr/sae_bench_gemma-2-2b_topk_width-2pow12_date-1109_blocks.5.hook_resid_post__trainer_0_eval_results.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_type_id": "scr",
3
+ "eval_config": {
4
+ "random_seed": 42,
5
+ "dataset_names": [
6
+ "LabHC/bias_in_bios_class_set1",
7
+ "canrager/amazon_reviews_mcauley_1and5"
8
+ ],
9
+ "perform_scr": true,
10
+ "early_stopping_patience": 20,
11
+ "train_set_size": 4000,
12
+ "test_set_size": 1000,
13
+ "context_length": 128,
14
+ "probe_train_batch_size": 16,
15
+ "probe_test_batch_size": 500,
16
+ "probe_epochs": 20,
17
+ "probe_lr": 0.001,
18
+ "probe_l1_penalty": 0.001,
19
+ "sae_batch_size": 125,
20
+ "llm_batch_size": 32,
21
+ "llm_dtype": "bfloat16",
22
+ "model_name": "gemma-2-2b",
23
+ "n_values": [
24
+ 2,
25
+ 5,
26
+ 10,
27
+ 20,
28
+ 50,
29
+ 100,
30
+ 500
31
+ ],
32
+ "column1_vals_lookup": {
33
+ "LabHC/bias_in_bios_class_set1": [
34
+ [
35
+ "professor",
36
+ "nurse"
37
+ ],
38
+ [
39
+ "architect",
40
+ "journalist"
41
+ ],
42
+ [
43
+ "surgeon",
44
+ "psychologist"
45
+ ],
46
+ [
47
+ "attorney",
48
+ "teacher"
49
+ ]
50
+ ],
51
+ "canrager/amazon_reviews_mcauley_1and5": [
52
+ [
53
+ "Books",
54
+ "CDs_and_Vinyl"
55
+ ],
56
+ [
57
+ "Software",
58
+ "Electronics"
59
+ ],
60
+ [
61
+ "Pet_Supplies",
62
+ "Office_Products"
63
+ ],
64
+ [
65
+ "Industrial_and_Scientific",
66
+ "Toys_and_Games"
67
+ ]
68
+ ]
69
+ }
70
+ },
71
+ "eval_id": "9778dc42-2b85-4878-8b53-f5a30c37b8e2",
72
+ "datetime_epoch_millis": 1732152694737,
73
+ "eval_result_metrics": {
74
+ "scr_metrics": {
75
+ "scr_dir1_threshold_2": 0.18580483274199258,
76
+ "scr_metric_threshold_2": 0.0469141886868403,
77
+ "scr_dir2_threshold_2": 0.0469141886868403,
78
+ "scr_dir1_threshold_5": 0.2666551622838423,
79
+ "scr_metric_threshold_5": 0.07788436405385432,
80
+ "scr_dir2_threshold_5": 0.07788436405385432,
81
+ "scr_dir1_threshold_10": 0.2587994476269541,
82
+ "scr_metric_threshold_10": 0.09760357322349086,
83
+ "scr_dir2_threshold_10": 0.09760357322349086,
84
+ "scr_dir1_threshold_20": 0.2115140205314303,
85
+ "scr_metric_threshold_20": 0.12741736090625402,
86
+ "scr_dir2_threshold_20": 0.12741736090625402,
87
+ "scr_dir1_threshold_50": 0.20468412514145365,
88
+ "scr_metric_threshold_50": 0.12772664742140202,
89
+ "scr_dir2_threshold_50": 0.12772664742140202,
90
+ "scr_dir1_threshold_100": 0.1654711080219602,
91
+ "scr_metric_threshold_100": 0.15616836665546446,
92
+ "scr_dir2_threshold_100": 0.15616836665546446,
93
+ "scr_dir1_threshold_500": -0.21731249300065633,
94
+ "scr_metric_threshold_500": 0.12217328960991185,
95
+ "scr_dir2_threshold_500": 0.12217328960991185
96
+ }
97
+ },
98
+ "eval_result_details": [
99
+ {
100
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_professor_nurse_results",
101
+ "scr_dir1_threshold_2": 0.32142872348125223,
102
+ "scr_metric_threshold_2": 0.014084465658350092,
103
+ "scr_dir2_threshold_2": 0.014084465658350092,
104
+ "scr_dir1_threshold_5": 0.32142872348125223,
105
+ "scr_metric_threshold_5": 0.035211234104378646,
106
+ "scr_dir2_threshold_5": 0.035211234104378646,
107
+ "scr_dir1_threshold_10": 0.10714361740626105,
108
+ "scr_metric_threshold_10": 0.05399061492684999,
109
+ "scr_dir2_threshold_10": 0.05399061492684999,
110
+ "scr_dir1_threshold_20": 0.10714361740626105,
111
+ "scr_metric_threshold_20": 0.07276999574932133,
112
+ "scr_dir2_threshold_20": 0.07276999574932133,
113
+ "scr_dir1_threshold_50": -0.03571382955624337,
114
+ "scr_metric_threshold_50": 0.01643199319891413,
115
+ "scr_dir2_threshold_50": 0.01643199319891413,
116
+ "scr_dir1_threshold_100": -0.10714361740626105,
117
+ "scr_metric_threshold_100": 0.035211234104378646,
118
+ "scr_dir2_threshold_100": 0.035211234104378646,
119
+ "scr_dir1_threshold_500": -1.0357138295562434,
120
+ "scr_metric_threshold_500": 0.10328645460658556,
121
+ "scr_dir2_threshold_500": 0.10328645460658556
122
+ },
123
+ {
124
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_architect_journalist_results",
125
+ "scr_dir1_threshold_2": 0.4153847846759204,
126
+ "scr_metric_threshold_2": 0.020618597877583123,
127
+ "scr_dir2_threshold_2": 0.020618597877583123,
128
+ "scr_dir1_threshold_5": 0.4153847846759204,
129
+ "scr_metric_threshold_5": 0.06701036629203817,
130
+ "scr_dir2_threshold_5": 0.06701036629203817,
131
+ "scr_dir1_threshold_10": 0.46153895530476774,
132
+ "scr_metric_threshold_10": 0.10309283576770166,
133
+ "scr_dir2_threshold_10": 0.10309283576770166,
134
+ "scr_dir1_threshold_20": 0.44615362043210616,
135
+ "scr_metric_threshold_20": 0.15979390312094827,
136
+ "scr_dir2_threshold_20": 0.15979390312094827,
137
+ "scr_dir1_threshold_50": 0.40000036679782747,
138
+ "scr_metric_threshold_50": 0.23711341473156403,
139
+ "scr_dir2_threshold_50": 0.23711341473156403,
140
+ "scr_dir1_threshold_100": 0.30769202554013275,
141
+ "scr_metric_threshold_100": 0.27061859787758313,
142
+ "scr_dir2_threshold_100": 0.27061859787758313,
143
+ "scr_dir1_threshold_500": -0.07692300638503319,
144
+ "scr_metric_threshold_500": 0.10567012209734605,
145
+ "scr_dir2_threshold_500": 0.10567012209734605
146
+ },
147
+ {
148
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_surgeon_psychologist_results",
149
+ "scr_dir1_threshold_2": 0.31818157188179863,
150
+ "scr_metric_threshold_2": 0.01526713042011719,
151
+ "scr_dir2_threshold_2": 0.01526713042011719,
152
+ "scr_dir1_threshold_5": 0.386363821088651,
153
+ "scr_metric_threshold_5": 0.04325689480129207,
154
+ "scr_dir2_threshold_5": 0.04325689480129207,
155
+ "scr_dir1_threshold_10": 0.386363821088651,
156
+ "scr_metric_threshold_10": 0.058524176887169474,
157
+ "scr_dir2_threshold_10": 0.058524176887169474,
158
+ "scr_dir1_threshold_20": 0.11363617891134899,
159
+ "scr_metric_threshold_20": 0.06870231438916767,
160
+ "scr_dir2_threshold_20": 0.06870231438916767,
161
+ "scr_dir1_threshold_50": -0.11363617891134899,
162
+ "scr_metric_threshold_50": 0.08396944480928485,
163
+ "scr_dir2_threshold_50": 0.08396944480928485,
164
+ "scr_dir1_threshold_100": -0.06818089455674493,
165
+ "scr_metric_threshold_100": 0.10687021627234075,
166
+ "scr_dir2_threshold_100": 0.10687021627234075,
167
+ "scr_dir1_threshold_500": -0.5681808945567449,
168
+ "scr_metric_threshold_500": 0.13231548419445613,
169
+ "scr_dir2_threshold_500": 0.13231548419445613
170
+ },
171
+ {
172
+ "dataset_name": "LabHC/bias_in_bios_class_set1_scr_attorney_teacher_results",
173
+ "scr_dir1_threshold_2": 0.09876559562310747,
174
+ "scr_metric_threshold_2": 0.024193554847883995,
175
+ "scr_dir2_threshold_2": 0.024193554847883995,
176
+ "scr_dir1_threshold_5": 0.2839505355217796,
177
+ "scr_metric_threshold_5": 0.09139794508201646,
178
+ "scr_dir2_threshold_5": 0.09139794508201646,
179
+ "scr_dir1_threshold_10": 0.29629605100978323,
180
+ "scr_metric_threshold_10": -0.018817120310841394,
181
+ "scr_dir2_threshold_10": -0.018817120310841394,
182
+ "scr_dir1_threshold_20": 0.24691325319822952,
183
+ "scr_metric_threshold_20": 0.01612914338360472,
184
+ "scr_dir2_threshold_20": 0.01612914338360472,
185
+ "scr_dir1_threshold_50": 0.35802436430934065,
186
+ "scr_metric_threshold_50": 0.03763440084920587,
187
+ "scr_dir2_threshold_50": 0.03763440084920587,
188
+ "scr_dir1_threshold_100": 0.40740716212089434,
189
+ "scr_metric_threshold_100": 0.053763544232810594,
190
+ "scr_dir2_threshold_100": 0.053763544232810594,
191
+ "scr_dir1_threshold_500": -0.6790121821546703,
192
+ "scr_metric_threshold_500": -0.021505257465601155,
193
+ "scr_dir2_threshold_500": -0.021505257465601155
194
+ },
195
+ {
196
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Books_CDs_and_Vinyl_results",
197
+ "scr_dir1_threshold_2": 0.028409063970008027,
198
+ "scr_metric_threshold_2": 0.10502281986527176,
199
+ "scr_dir2_threshold_2": 0.10502281986527176,
200
+ "scr_dir1_threshold_5": 0.10227276575713139,
201
+ "scr_metric_threshold_5": 0.19178076599302543,
202
+ "scr_dir2_threshold_5": 0.19178076599302543,
203
+ "scr_dir1_threshold_10": 0.14204565851279638,
204
+ "scr_metric_threshold_10": 0.3105023092032548,
205
+ "scr_dir2_threshold_10": 0.3105023092032548,
206
+ "scr_dir1_threshold_20": 0.02272731890855767,
207
+ "scr_metric_threshold_20": 0.35616435746341185,
208
+ "scr_dir2_threshold_20": 0.35616435746341185,
209
+ "scr_dir1_threshold_50": 0.07386370178712337,
210
+ "scr_metric_threshold_50": 0.38812768237861134,
211
+ "scr_dir2_threshold_50": 0.38812768237861134,
212
+ "scr_dir1_threshold_100": -0.011363490122900714,
213
+ "scr_metric_threshold_100": 0.43379000280604463,
214
+ "scr_dir2_threshold_100": 0.43379000280604463,
215
+ "scr_dir1_threshold_500": -0.04545463781711534,
216
+ "scr_metric_threshold_500": 0.23744281425318253,
217
+ "scr_dir2_threshold_500": 0.23744281425318253
218
+ },
219
+ {
220
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Software_Electronics_results",
221
+ "scr_dir1_threshold_2": 0.14728676439671556,
222
+ "scr_metric_threshold_2": 0.020161269001982816,
223
+ "scr_dir2_threshold_2": 0.020161269001982816,
224
+ "scr_dir1_threshold_5": 0.209302390053695,
225
+ "scr_metric_threshold_5": 0.04838718980952953,
226
+ "scr_dir2_threshold_5": 0.04838718980952953,
227
+ "scr_dir1_threshold_10": 0.20155055235944278,
228
+ "scr_metric_threshold_10": 0.10887099681547797,
229
+ "scr_dir2_threshold_10": 0.10887099681547797,
230
+ "scr_dir1_threshold_20": 0.21705422774794722,
231
+ "scr_metric_threshold_20": 0.14112912335516434,
232
+ "scr_dir2_threshold_20": 0.14112912335516434,
233
+ "scr_dir1_threshold_50": 0.31007766623341637,
234
+ "scr_metric_threshold_50": 0.16129039235714715,
235
+ "scr_dir2_threshold_50": 0.16129039235714715,
236
+ "scr_dir1_threshold_100": 0.3488373167561583,
237
+ "scr_metric_threshold_100": 0.17741945562699032,
238
+ "scr_dir2_threshold_100": 0.17741945562699032,
239
+ "scr_dir1_threshold_500": 0.3488373167561583,
240
+ "scr_metric_threshold_500": 0.1854838670912696,
241
+ "scr_dir2_threshold_500": 0.1854838670912696
242
+ },
243
+ {
244
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Pet_Supplies_Office_Products_results",
245
+ "scr_dir1_threshold_2": 0.13636386726998287,
246
+ "scr_metric_threshold_2": 0.17596567182353345,
247
+ "scr_dir2_threshold_2": 0.17596567182353345,
248
+ "scr_dir1_threshold_5": 0.3011364502262436,
249
+ "scr_metric_threshold_5": 0.11587985138236706,
250
+ "scr_dir2_threshold_5": 0.11587985138236706,
251
+ "scr_dir1_threshold_10": 0.3465910726495712,
252
+ "scr_metric_threshold_10": 0.12446368619618059,
253
+ "scr_dir2_threshold_10": 0.12446368619618059,
254
+ "scr_dir1_threshold_20": 0.4147726676219211,
255
+ "scr_metric_threshold_20": 0.18454950663734698,
256
+ "scr_dir2_threshold_20": 0.18454950663734698,
257
+ "scr_dir1_threshold_50": 0.5056819124685763,
258
+ "scr_metric_threshold_50": 0.10729627238243561,
259
+ "scr_dir2_threshold_50": 0.10729627238243561,
260
+ "scr_dir1_threshold_100": 0.3124999365007547,
261
+ "scr_metric_threshold_100": 0.17167388232356773,
262
+ "scr_dir2_threshold_100": 0.17167388232356773,
263
+ "scr_dir1_threshold_500": 0.20454546224233278,
264
+ "scr_metric_threshold_500": 0.21459228895098914,
265
+ "scr_dir2_threshold_500": 0.21459228895098914
266
+ },
267
+ {
268
+ "dataset_name": "canrager/amazon_reviews_mcauley_1and5_scr_Industrial_and_Scientific_Toys_and_Games_results",
269
+ "scr_dir1_threshold_2": 0.0206182906371552,
270
+ "scr_metric_threshold_2": 0.0,
271
+ "scr_dir2_threshold_2": 0.0,
272
+ "scr_dir1_threshold_5": 0.1134018274660653,
273
+ "scr_metric_threshold_5": 0.030150664966187093,
274
+ "scr_dir2_threshold_5": 0.030150664966187093,
275
+ "scr_dir1_threshold_10": 0.12886585268435963,
276
+ "scr_metric_threshold_10": 0.04020108630213385,
277
+ "scr_dir2_threshold_10": 0.04020108630213385,
278
+ "scr_dir1_threshold_20": 0.12371128002507081,
279
+ "scr_metric_threshold_20": 0.020100543151066925,
280
+ "scr_dir2_threshold_20": 0.020100543151066925,
281
+ "scr_dir1_threshold_50": 0.13917499800293723,
282
+ "scr_metric_threshold_50": -0.010050421335946752,
283
+ "scr_dir2_threshold_50": -0.010050421335946752,
284
+ "scr_dir1_threshold_100": 0.1340204253436484,
285
+ "scr_metric_threshold_100": 0.0,
286
+ "scr_dir2_threshold_100": 0.0,
287
+ "scr_dir1_threshold_500": 0.1134018274660653,
288
+ "scr_metric_threshold_500": 0.020100543151066925,
289
+ "scr_dir2_threshold_500": 0.020100543151066925
290
+ }
291
+ ],
292
+ "sae_bench_commit_hash": "da9a2dc7bd29520761ed3a196f3912a42c010ef9",
293
+ "sae_lens_id": "blocks.5.hook_resid_post__trainer_0",
294
+ "sae_lens_release_id": "sae_bench_gemma-2-2b_topk_width-2pow12_date-1109",
295
+ "sae_lens_version": "4.3.5",
296
+ "eval_result_unstructured": null
297
+ }