File size: 2,861 Bytes
c951445
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
{
  "meta.run_name": "LoveLace-M-nlu-dph_Juea",
  "meta.validate": true,
  "meta.evaluate": false,
  "finetune.trainable_backbone": true,
  "finetune.mode": "dph",
  "model.recompute_kv": false,
  "model.trainable_embeddings": true,
  "model.dropout_att_mat": 0.0,
  "model.dropout_att_out": 0.0,
  "model.dropout_ffn_int": 0.0,
  "model.dropout_ffn_out": 0.0,
  "model.reward_heads": [
    "reward_head"
  ],
  "model.reward_pooler": "swiglu_bert",
  "model.reward_dropout": 0.1,
  "train.batch_size": 64,
  "train.batch_size_step": 8,
  "train.batches_per_epoch": 512,
  "train.length_sequence": 2048,
  "train.length_cache": 2048,
  "train.lr_cooldown_tokens": 3000000000,
  "train.lr_warmup_steps": 200,
  "train.lr_max": 3e-06,
  "train.opt_max_grad_norm": 1.0,
  "train.opt_weight_decay": 0.5,
  "train.opt_decay_init": true,
  "train.opt_decay_mask": [],
  "dph.dpo_beta": 0.6,
  "dph.dpo_epsilon": 0.25,
  "dph.dpo_average_logprobs": false,
  "dph.dpo_weight": 1.0,
  "dph.dph_contrastive": false,
  "dph.dph_epsilon": 0.1,
  "dph.dph_weight": 1.0,
  "dph.dph_decay_init": false,
  "dph.dph_weight_decay": 0.1,
  "finetune.dph_mix": [
    "glue/cola",
    "glue/mnli",
    "glue/mrpc",
    "glue/qnli",
    "glue/qqp",
    "glue/rte",
    "glue/sst2",
    "glue/stsb",
    "mmlu/all",
    "race/all",
    "hellaswag/choice",
    "hellaswag/no_choice",
    "squad/v2",
    "obqa/main",
    "winogrande/no_choice",
    "arc/challenge",
    "arc/easy",
    "super_glue/boolq",
    "piqa/no_choice",
    "ultrafeedback/binarized",
    "orca/orca_dpo_pairs"
  ],
  "finetune.checkpoint": "LoveLace-M-nlu-sft_nfgN",
  "model.architectures": [
    "LSWTForCausalLM"
  ],
  "model.bos_token_id": 2,
  "model.pad_token_id": 1,
  "model.eos_token_id": 2,
  "model._name_or_path": "./checkpoints/LoveLace-M-vocab_X9Ge",
  "model.transformers_version": "4.37.2",
  "model.model_type": "lsw_transformer",
  "model.vocab_size": 50272,
  "model.d_vocab": 768,
  "model.d_model": 1536,
  "model.d_ffn": 4096,
  "model.n_heads": 24,
  "model.n_layers": 18,
  "model.n_registers": 0,
  "model.gated_ffn": true,
  "model.gated_att": false,
  "model.qk_norm": false,
  "model.init_std": 0.02,
  "model.enable_bias": true,
  "model.rope_base_freq": 500000,
  "model.rope_reversed": true,
  "model.rope_positions": 4096,
  "model.rope_dynamic": false,
  "model.rope_ntk_scale": 1.0,
  "model.rope_yarn_a": 0.07,
  "model.rope_yarn_b": 1.0,
  "model.dropout_layers": 0.0,
  "model.use_cache": true,
  "model.parent_embeddings": "facebook/opt-125m",
  "train.lr_cooldown_ratio": 0.1,
  "train.optimizer": "LaProp",
  "train.opt_beta_1": 0.9,
  "train.opt_beta_2": 0.95,
  "train.opt_eps": 1e-08,
  "train.opt_rho": 0.1,
  "train.loss_objective": "MLE",
  "train.loss_sim_margin": 0.5,
  "params.total": 569864192,
  "params.trainable": 569864192,
  "params.non_trainable": 0
}