distil-whisper-large-v3-ptbr / run_distillation.sh
freds0's picture
Saving train state of step 1000
41d9eee verified
raw
history blame contribute delete
No virus
1.24 kB
#accelerate launch run_distillation.py \
python run_distillation.py \
--model_name_or_path "./distil-large-v3-init" \
--teacher_model_name_or_path "openai/whisper-large-v3" \
--train_dataset_name "mozilla-foundation/common_voice_13_0" \
--train_split_name "train" \
--train_dataset_config_name "pt" \
--text_column_name "sentence" \
--audio_column_name "audio" \
--eval_dataset_name "mozilla-foundation/common_voice_13_0" \
--eval_text_column_name "sentence" \
--eval_steps 1000 \
--save_steps 1000 \
--warmup_steps 50 \
--learning_rate 0.0001 \
--lr_scheduler_type "constant_with_warmup" \
--timestamp_probability 0.2 \
--condition_on_prev_probability 0.2 \
--language "pt" \
--task "transcribe" \
--logging_steps 25 \
--save_total_limit 1 \
--max_steps 10000 \
--wer_threshold 20 \
--per_device_train_batch_size 30 \
--per_device_eval_batch_size 30 \
--dataloader_num_workers 8 \
--preprocessing_num_workers 8 \
--ddp_timeout 7200 \
--dtype "float32" \
--output_dir "./" \
--do_train \
--do_eval \
--gradient_checkpointing \
--overwrite_output_dir \
--predict_with_generate \
--freeze_encoder \
--streaming False \
--use_pseudo_labels=False \
--push_to_hub