File size: 992 Bytes
0607862
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e40be0
0607862
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
```
{
    "cache_dir": "/leonardo_work/EUHPC_E03_068/.cache",
    "method": "orpo",
    "dataset": "autoredteam",
    "model": "togethercomputer/RedPajama-INCITE-Base-3B-v1",
    "tokenizer": "togethercomputer/RedPajama-INCITE-Instruct-3B-v1",
    "lr": 0.01,
    "train_batch_size": 5,
    "eval_batch_size": 5,
    "num_epochs": 3,
    "seed": 42,
    "eval_only": false,
    "evaluation_size": 2000,
    "checkpoint_path": null,
    "experiment_name": "RedPajama3b_v1-autoredteam_v2-train-3",
    "experiment_group": "results",
    "reference_model": "togethercomputer/RedPajama-INCITE-Instruct-3B-v1",
    "context_length": 1024,
    "train_summarization": "",
    "dpo_beta": 0.1,
    "orpo_beta": 0.1,
    "kl_coef": 0.0,
    "reward_model": "",
    "bestofn_size": 4,
    "train_reward_model": ""
    "chat_template": ""{% for message in messages %}{{message['role'] + ': ' + message['content'] + '\n\n'}}{% endfor %}{% if add_generation_prompt %}{{ 'assistant: ' }}{% endif %}""
}
```