Danil commited on
Commit
cfb6b96
1 Parent(s): 2798777

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -7
config.json CHANGED
@@ -23,13 +23,6 @@
23
  "relative_attention_num_buckets": 32,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.15.0",
26
- "task_specific_params": {
27
- "text-key": {
28
- "do_sample": true,
29
- "max_length": 64,
30
- "top_p": 1.0
31
- }
32
- },
33
  "use_cache": true,
34
  "vocab_size": 32128
35
  }
 
23
  "relative_attention_num_buckets": 32,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.15.0",
 
 
 
 
 
 
 
26
  "use_cache": true,
27
  "vocab_size": 32128
28
  }