pretrain_t5.sh 1.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. #!/bin/bash
  2. RANK=0
  3. WORLD_SIZE=1
  4. DATA_PATH=<Specify path and file prefix>
  5. VOCAB_FILE=<Specify path to vocab.txt>
  6. CHECKPOINT_PATH=<Specify path>
  7. python pretrain_t5.py \
  8. --num-layers 12 \
  9. --hidden-size 768 \
  10. --num-attention-heads 12 \
  11. --kv-channels 64 \
  12. --ffn-hidden-size 3072 \
  13. --encoder-seq-length 512 \
  14. --decoder-seq-length 128 \
  15. --micro-batch-size 16 \
  16. --global-batch-size 16 \
  17. --max-position-embeddings 512 \
  18. --train-iters 1000000 \
  19. --lr-decay-iters 1000000 \
  20. --save $CHECKPOINT_PATH \
  21. --load $CHECKPOINT_PATH \
  22. --data-path $DATA_PATH \
  23. --vocab-file $VOCAB_FILE \
  24. --data-impl mmap \
  25. --split 949,50,1 \
  26. --lr 0.0001 \
  27. --min-lr 0.00001 \
  28. --lr-decay-style linear \
  29. --lr-warmup-fraction .01 \
  30. --weight-decay 1e-2 \
  31. --clip-grad 1.0 \
  32. --log-interval 100 \
  33. --save-interval 10000 \
  34. --eval-interval 1000 \
  35. --eval-iters 10 \
  36. --fp16 \
  37. --vocab-extra-ids 100