generate_text.sh 693 B

1234567891011121314151617181920212223242526
  1. #!/bin/bash
  2. CHECKPOINT_PATH=checkpoints/gpt2_345m
  3. VOCAB_FILE=gpt2-vocab.json
  4. MERGE_FILE=gpt2-merges.txt
  5. python tools/generate_samples_gpt2.py \
  6. --tensor-model-parallel-size 1 \
  7. --num-layers 24 \
  8. --hidden-size 1024 \
  9. --load $CHECKPOINT_PATH \
  10. --num-attention-heads 16 \
  11. --max-position-embeddings 1024 \
  12. --tokenizer-type GPT2BPETokenizer \
  13. --fp16 \
  14. --batch-size 2 \
  15. --seq-length 1024 \
  16. --out-seq-length 1024 \
  17. --temperature 1.0 \
  18. --vocab-file $VOCAB_FILE \
  19. --merge-file $MERGE_FILE \
  20. --genfile unconditional_samples.json \
  21. --num-samples 2 \
  22. --top_p 0.9 \
  23. --recompute