Wonder-Griffin commited on
Commit
18d158c
1 Parent(s): ef04d98

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -8
config.json CHANGED
@@ -9,10 +9,10 @@
9
  "pruned_heads": {},
10
  "tie_word_embeddings": true,
11
  "chunk_size_feed_forward": 0,
12
- "is_encoder_decoder": false,
13
- "is_decoder": false,
14
  "cross_attention_hidden_size": null,
15
- "add_cross_attention": false,
16
  "tie_encoder_decoder": false,
17
  "max_length": 20,
18
  "min_length": 0,
@@ -32,14 +32,14 @@
32
  "bad_words_ids": null,
33
  "num_return_sequences": 1,
34
  "output_scores": false,
35
- "return_dict_in_generate": false,
36
- "forced_bos_token_id": null,
37
- "forced_eos_token_id": null,
38
  "remove_invalid_values": false,
39
  "exponential_decay_length_penalty": null,
40
  "suppress_tokens": null,
41
  "begin_suppress_tokens": null,
42
- "architectures": ["GPT2LMHeadModel"],
43
  "finetuning_task": null,
44
  "id2label": {
45
  "0": "LABEL_0",
@@ -69,5 +69,5 @@
69
  "num_heads": 12,
70
  "num_experts": 4,
71
  "initializer_range": 0.02,
72
- "model_type": "gpt2"
73
  }
 
9
  "pruned_heads": {},
10
  "tie_word_embeddings": true,
11
  "chunk_size_feed_forward": 0,
12
+ "is_encoder_decoder": True,
13
+ "is_decoder": True,
14
  "cross_attention_hidden_size": null,
15
+ "add_cross_attention": True,
16
  "tie_encoder_decoder": false,
17
  "max_length": 20,
18
  "min_length": 0,
 
32
  "bad_words_ids": null,
33
  "num_return_sequences": 1,
34
  "output_scores": false,
35
+ "return_dict_in_generate": True,
36
+ "forced_bos_token_id": True,
37
+ "forced_eos_token_id": True,
38
  "remove_invalid_values": false,
39
  "exponential_decay_length_penalty": null,
40
  "suppress_tokens": null,
41
  "begin_suppress_tokens": null,
42
+ "architectures": ["AutoModelForCausalLM"],
43
  "finetuning_task": null,
44
  "id2label": {
45
  "0": "LABEL_0",
 
69
  "num_heads": 12,
70
  "num_experts": 4,
71
  "initializer_range": 0.02,
72
+ "model_type": "Zeus"
73
  }