results / adapter_config.json
washimneupane's picture
Training in progress, step 6000
fb9d8bd verified
raw
history blame contribute delete
No virus
1.56 kB
{
"auto_mapping": null,
"base_model_name_or_path": "gpt2",
"fan_in_fan_out": true,
"feedforward_modules": [
"transformer.h.0.attn.c_proj",
"transformer.h.5.attn.c_proj",
"transformer.h.11.attn.c_proj",
"transformer.h.9.attn.c_proj",
"transformer.h.2.attn.c_proj",
"transformer.h.8.attn.c_proj",
"transformer.h.10.attn.c_proj",
"transformer.h.1.attn.c_proj",
"transformer.h.6.attn.c_proj",
"transformer.h.3.attn.c_proj",
"transformer.h.4.attn.c_proj",
"transformer.h.7.attn.c_proj"
],
"inference_mode": true,
"init_ia3_weights": true,
"modules_to_save": null,
"peft_type": "IA3",
"revision": null,
"target_modules": [
"transformer.h.3.attn.c_attn",
"transformer.h.5.attn.c_proj",
"transformer.h.6.attn.c_proj",
"transformer.h.4.attn.c_attn",
"transformer.h.11.attn.c_proj",
"transformer.h.5.attn.c_attn",
"transformer.h.8.attn.c_attn",
"transformer.h.0.attn.c_attn",
"transformer.h.0.attn.c_proj",
"transformer.h.6.attn.c_attn",
"transformer.h.9.attn.c_proj",
"transformer.h.2.attn.c_proj",
"transformer.h.1.attn.c_proj",
"transformer.h.11.attn.c_attn",
"transformer.h.1.attn.c_attn",
"transformer.h.7.attn.c_proj",
"transformer.h.9.attn.c_attn",
"transformer.h.7.attn.c_attn",
"transformer.h.8.attn.c_proj",
"transformer.h.10.attn.c_proj",
"transformer.h.2.attn.c_attn",
"transformer.h.3.attn.c_proj",
"transformer.h.4.attn.c_proj",
"transformer.h.10.attn.c_attn"
],
"task_type": "CAUSAL_LM"
}