1234567891011121314151617181920212223242526 |
- #!/bin/bash
- CHECKPOINT_PATH=checkpoints/gpt2_345m
- VOCAB_FILE=gpt2-vocab.json
- MERGE_FILE=gpt2-merges.txt
- python tools/generate_samples_gpt2.py \
- --tensor-model-parallel-size 1 \
- --num-layers 24 \
- --hidden-size 1024 \
- --load $CHECKPOINT_PATH \
- --num-attention-heads 16 \
- --max-position-embeddings 1024 \
- --tokenizer-type GPT2BPETokenizer \
- --fp16 \
- --batch-size 2 \
- --seq-length 1024 \
- --out-seq-length 1024 \
- --temperature 1.0 \
- --vocab-file $VOCAB_FILE \
- --merge-file $MERGE_FILE \
- --genfile unconditional_samples.json \
- --num-samples 2 \
- --top_p 0.9 \
- --recompute
|