File size: 532 Bytes
e190ff4 88c66d0 e190ff4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"bits": 4,
"group_size": 128,
"damp_percent": 0.01,
"desc_act": false,
"static_groups": false,
"sym": false,
"true_sequential": false,
"lm_head": true,
"model_name_or_path": null,
"model_file_base_name": "model",
"quant_method": "gptq",
"checkpoint_format": "gptq",
"meta": {
"quantizer": "gptqmodel:0.9.0",
"auto_round": "0.1",
"iters": 400,
"lr": 0.0025,
"minmax_lr": 0.0025,
"enable_minmax_tuning": true,
"use_quant_input": false,
"scale_dtype": "torch.float16"
}
}
|