raptorkwok commited on
Commit
c31c58b
·
verified ·
1 Parent(s): d79e30c

Upload BartForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +4 -3
  2. generation_config.json +2 -0
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "add_bias_logits": false,
6
  "add_final_layer_norm": false,
7
  "architectures": [
8
- "BartForCausalLM"
9
  ],
10
  "attention_dropout": 0.1,
11
  "bos_token_id": 101,
@@ -32,8 +32,7 @@
32
  "2": "LABEL_2"
33
  },
34
  "init_std": 0.02,
35
- "is_decoder": true,
36
- "is_encoder_decoder": false,
37
  "label2id": {
38
  "LABEL_0": 0,
39
  "LABEL_1": 1,
@@ -46,6 +45,8 @@
46
  "normalize_embedding": true,
47
  "num_beams": 4,
48
  "num_hidden_layers": 6,
 
 
49
  "pad_token_id": 0,
50
  "scale_embedding": false,
51
  "task_specific_params": {
 
5
  "add_bias_logits": false,
6
  "add_final_layer_norm": false,
7
  "architectures": [
8
+ "BartForConditionalGeneration"
9
  ],
10
  "attention_dropout": 0.1,
11
  "bos_token_id": 101,
 
32
  "2": "LABEL_2"
33
  },
34
  "init_std": 0.02,
35
+ "is_encoder_decoder": true,
 
36
  "label2id": {
37
  "LABEL_0": 0,
38
  "LABEL_1": 1,
 
45
  "normalize_embedding": true,
46
  "num_beams": 4,
47
  "num_hidden_layers": 6,
48
+ "output_attentions": true,
49
+ "output_hidden_states": true,
50
  "pad_token_id": 0,
51
  "scale_embedding": false,
52
  "task_specific_params": {
generation_config.json CHANGED
@@ -6,6 +6,8 @@
6
  "forced_eos_token_id": 102,
7
  "no_repeat_ngram_size": 3,
8
  "num_beams": 4,
 
 
9
  "pad_token_id": 0,
10
  "transformers_version": "4.28.1"
11
  }
 
6
  "forced_eos_token_id": 102,
7
  "no_repeat_ngram_size": 3,
8
  "num_beams": 4,
9
+ "output_attentions": true,
10
+ "output_hidden_states": true,
11
  "pad_token_id": 0,
12
  "transformers_version": "4.28.1"
13
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7191d4af7646cfba04ffcc66b1e04db369ac38836a3b38379e08c329f84984cd
3
- size 387558115
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e75788fe0877d4b99b8292fb904aeaf001a74a7200926123a38285ecf7aaacb1
3
+ size 561062835