distil-whisper-large-v3-ptbr / run_distillation_backup.sh
freds0's picture
Saving train state of step 1000
55e4ddb verified
raw
history blame
No virus
1.14 kB
accelerate launch run_distillation.py \
--model_name_or_path "./distil-large-v3-init" \
--teacher_model_name_or_path "openai/whisper-large-v3" \
--train_dataset_name "freds0/cml_tts_dataset_polish+freds0/cml_tts_dataset_polish" \
--train_split_name "train+test" \
--text_column_name "transcript+transcript" \
--eval_dataset_name "freds0/cml_tts_dataset_polish" \
--eval_text_column_name "transcript" \
--eval_steps 1000 \
--save_steps 1000 \
--warmup_steps 50 \
--learning_rate 0.0001 \
--lr_scheduler_type "constant_with_warmup" \
--timestamp_probability 0.2 \
--condition_on_prev_probability 0.2 \
--language "pl" \
--task "transcribe" \
--logging_steps 25 \
--save_total_limit 1 \
--max_steps 5000 \
--wer_threshold 20 \
--per_device_train_batch_size 32 \
--per_device_eval_batch_size 32 \
--dataloader_num_workers 8 \
--preprocessing_num_workers 8 \
--ddp_timeout 7200 \
--dtype "bfloat16" \
--output_dir "./" \
--do_train \
--do_eval \
--gradient_checkpointing \
--overwrite_output_dir \
--predict_with_generate \
--freeze_encoder \
--streaming False \
--push_to_hub