Shu Yang
ckpt upload
f817726
raw
history blame contribute delete
859 Bytes
{"sae": {"expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false}, "batch_size": 8, "grad_acc_steps": 1, "micro_acc_steps": 1, "lr": null, "lr_warmup_steps": 1000, "auxk_alpha": 0.0, "dead_feature_threshold": 10000000, "hookpoints": ["layers.0", "layers.1", "layers.2", "layers.3", "layers.4", "layers.5", "layers.6", "layers.7", "layers.8", "layers.9", "layers.10", "layers.11"], "layers": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], "layer_stride": 1, "distribute_modules": false, "save_every": 1000, "log_to_wandb": true, "run_name": null, "wandb_log_frequency": 1, "model": "EleutherAI/pythia-160m", "dataset": "togethercomputer/RedPajama-Data-1T-Sample", "split": "train", "ctx_len": 2048, "hf_token": null, "load_in_8bit": false, "max_examples": null, "resume": false, "seed": 42, "data_preprocessing_num_proc": 32}