qgallouedec HF staff commited on
Commit
9ca7622
1 Parent(s): e9131cb

Upload model

Browse files
Files changed (3) hide show
  1. config.json +3 -5
  2. generation_config.json +2 -3
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,13 +1,12 @@
1
  {
2
- "_name_or_path": "trl-internal-testing/dummy-GPT2-correct-vocab",
3
- "activation_function": "gelu",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
- "bos_token_id": 0,
9
  "embd_pdrop": 0.1,
10
- "eos_token_id": 0,
11
  "initializer_range": 0.02,
12
  "is_decoder": true,
13
  "layer_norm_epsilon": 1e-05,
@@ -17,7 +16,6 @@
17
  "n_inner": 37,
18
  "n_layer": 5,
19
  "n_positions": 512,
20
- "pad_token_id": 1023,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
23
  "scale_attn_by_inverse_layer_idx": false,
 
1
  {
2
+ "activation_function": "gelu_new",
 
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
  "attn_pdrop": 0.1,
7
+ "bos_token_id": 50256,
8
  "embd_pdrop": 0.1,
9
+ "eos_token_id": 50256,
10
  "initializer_range": 0.02,
11
  "is_decoder": true,
12
  "layer_norm_epsilon": 1e-05,
 
16
  "n_inner": 37,
17
  "n_layer": 5,
18
  "n_positions": 512,
 
19
  "reorder_and_upcast_attn": false,
20
  "resid_pdrop": 0.1,
21
  "scale_attn_by_inverse_layer_idx": false,
generation_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "eos_token_id": 0,
5
- "pad_token_id": 1023,
6
  "transformers_version": "4.44.0"
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
 
5
  "transformers_version": "4.44.0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2f21541baec8035040416d0c5d64b84eafd209151718ebcc6163c57597df1348
3
  size 6640404
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfa7830bfc34434c3e30d43d58ac74da6b606bacdfc08e347bc89cbe8e6f0f38
3
  size 6640404