Or4cl3-1 commited on
Commit
76e41ef
·
verified ·
1 Parent(s): baaa469

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +23 -52
config.json CHANGED
@@ -1,55 +1,26 @@
1
  {
2
- "name_or_path": "Or4cl3-1/Agent_Gemma_7b",
3
- "model_type": "LazyMergekit",
4
  "architectures": [
5
- "LazyMergekit"
6
  ],
7
- "base_model": "google/gemma-7b",
8
- "merge_method": "slerp",
9
- "slices": [
10
- {
11
- "sources": [
12
- {
13
- "model": "google/gemma-7b",
14
- "layer_range": [
15
- 0,
16
- 32
17
- ]
18
- },
19
- {
20
- "model": "SuperAGI/SAM",
21
- "layer_range": [
22
- 0,
23
- 32
24
- ]
25
- }
26
- ],
27
- "parameters": {
28
- "t": {
29
- "filter": "self_attn",
30
- "value": [
31
- 0,
32
- 0.5,
33
- 0.3,
34
- 0.7,
35
- 1
36
- ]
37
- },
38
- "t": {
39
- "filter": "mlp",
40
- "value": [
41
- 1,
42
- 0.5,
43
- 0.7,
44
- 0.3,
45
- 0
46
- ]
47
- },
48
- "t": {
49
- "value": 0.5
50
- }
51
- },
52
- "dtype": "bfloat16"
53
- }
54
- ]
55
- }
 
1
  {
2
+ "_name_or_path": "mistralai/Mistral-7B-v0.1",
 
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 10000.0,
20
+ "sliding_window": 4096,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "float16",
23
+ "transformers_version": "4.36.2",
24
+ "use_cache": true,
25
+ "vocab_size": 32000
26
+ }