prithivMLmods commited on
Commit
6536e74
1 Parent(s): fbd94c7

Upload 11 files

Browse files
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_num_labels": 3,
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "add_final_layer_norm": false,
6
+ "architectures": [
7
+ "BartForConditionalGeneration"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "bos_token_id": 0,
11
+ "classif_dropout": 0.0,
12
+ "classifier_dropout": 0.0,
13
+ "d_model": 1024,
14
+ "decoder_attention_heads": 16,
15
+ "decoder_ffn_dim": 4096,
16
+ "decoder_layerdrop": 0.0,
17
+ "decoder_layers": 12,
18
+ "decoder_start_token_id": 2,
19
+ "dropout": 0.1,
20
+ "early_stopping": true,
21
+ "encoder_attention_heads": 16,
22
+ "encoder_ffn_dim": 4096,
23
+ "encoder_layerdrop": 0.0,
24
+ "encoder_layers": 12,
25
+ "eos_token_id": 2,
26
+ "force_bos_token_to_be_generated": true,
27
+ "forced_bos_token_id": 0,
28
+ "forced_eos_token_id": 2,
29
+ "gradient_checkpointing": false,
30
+ "id2label": {
31
+ "0": "LABEL_0",
32
+ "1": "LABEL_1",
33
+ "2": "LABEL_2"
34
+ },
35
+ "init_std": 0.02,
36
+ "is_encoder_decoder": true,
37
+ "label2id": {
38
+ "LABEL_0": 0,
39
+ "LABEL_1": 1,
40
+ "LABEL_2": 2
41
+ },
42
+ "length_penalty": 2.0,
43
+ "max_length": 142,
44
+ "max_position_embeddings": 1024,
45
+ "min_length": 56,
46
+ "model_type": "bart",
47
+ "no_repeat_ngram_size": 3,
48
+ "normalize_before": false,
49
+ "num_beams": 4,
50
+ "num_hidden_layers": 12,
51
+ "output_past": true,
52
+ "pad_token_id": 1,
53
+ "prefix": " ",
54
+ "scale_embedding": false,
55
+ "task_specific_params": {
56
+ "summarization": {
57
+ "early_stopping": true,
58
+ "length_penalty": 2.0,
59
+ "max_length": 142,
60
+ "min_length": 56,
61
+ "no_repeat_ngram_size": 3,
62
+ "num_beams": 4
63
+ }
64
+ },
65
+ "transformers_version": "4.7.0.dev0",
66
+ "use_cache": true,
67
+ "vocab_size": 50264
68
+ }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dda1068890b2e54a98369501ceebbed19edfe184a45581c1ecd228e3f224a4bf
3
+ size 1625380786
generation_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "decoder_start_token_id": 2,
5
+ "early_stopping": true,
6
+ "eos_token_id": 2,
7
+ "forced_bos_token_id": 0,
8
+ "forced_eos_token_id": 2,
9
+ "length_penalty": 2.0,
10
+ "max_length": 142,
11
+ "min_length": 56,
12
+ "no_repeat_ngram_size": 3,
13
+ "num_beams": 4,
14
+ "pad_token_id": 1,
15
+ "transformers_version": "4.27.0.dev0"
16
+ }
generation_config_for_summarization.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "decoder_start_token_id": 2,
5
+ "early_stopping": true,
6
+ "eos_token_id": 2,
7
+ "forced_bos_token_id": 0,
8
+ "forced_eos_token_id": 2,
9
+ "length_penalty": 2.0,
10
+ "max_length": 142,
11
+ "min_length": 56,
12
+ "no_repeat_ngram_size": 3,
13
+ "num_beams": 4,
14
+ "pad_token_id": 1,
15
+ "transformers_version": "4.27.0.dev0"
16
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40041830399afb5348525ef8354b007ecec4286fdf3524f7e6b54377e17096cb
3
+ size 1625222120
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ac2745c02ac987d82c78a14b426de58d5e4178ae8039ba1c6881eccff3e82f1
3
+ size 1625270765
rust_model.ot ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd0d1586babffa4e90ca71e230290b55b8ebf634319a1c4200c8506ddbae0ab0
3
+ size 2037049936
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d845bfe2bbbc0ea00ee11b6e032aa285a008caef008cbbac975454c24a4415f
3
+ size 1625692088
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab.json ADDED
The diff for this file is too large to render. See raw diff