finetune_resnet_v1_50_on_flowers.sh 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. #!/bin/bash
  2. #
  3. # This script performs the following operations:
  4. # 1. Downloads the Flowers dataset
  5. # 2. Fine-tunes a ResNetV1-50 model on the Flowers training set.
  6. # 3. Evaluates the model on the Flowers validation set.
  7. #
  8. # Usage:
  9. # cd slim
  10. # ./slim/scripts/finetune_resnet_v1_50_on_flowers.sh
  11. # Where the pre-trained ResNetV1-50 checkpoint is saved to.
  12. PRETRAINED_CHECKPOINT_DIR=/tmp/checkpoints
  13. # Where the training (fine-tuned) checkpoint and logs will be saved to.
  14. TRAIN_DIR=/tmp/flowers-models/resnet_v1_50
  15. # Where the dataset is saved to.
  16. DATASET_DIR=/tmp/flowers
  17. # Download the pre-trained checkpoint.
  18. if [ ! -d "$PRETRAINED_CHECKPOINT_DIR" ]; then
  19. mkdir ${PRETRAINED_CHECKPOINT_DIR}
  20. fi
  21. if [ ! -f ${PRETRAINED_CHECKPOINT_DIR}/resnet_v1_50.ckpt ]; then
  22. wget http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz
  23. tar -xvf resnet_v1_50_2016_08_28.tar.gz
  24. mv resnet_v1_50.ckpt ${PRETRAINED_CHECKPOINT_DIR}/resnet_v1_50.ckpt
  25. rm resnet_v1_50_2016_08_28.tar.gz
  26. fi
  27. # Download the dataset
  28. python download_and_convert_data.py \
  29. --dataset_name=flowers \
  30. --dataset_dir=${DATASET_DIR}
  31. # Fine-tune only the new layers for 3000 steps.
  32. python train_image_classifier.py \
  33. --train_dir=${TRAIN_DIR} \
  34. --dataset_name=flowers \
  35. --dataset_split_name=train \
  36. --dataset_dir=${DATASET_DIR} \
  37. --model_name=resnet_v1_50 \
  38. --checkpoint_path=${PRETRAINED_CHECKPOINT_DIR}/resnet_v1_50.ckpt \
  39. --checkpoint_exclude_scopes=resnet_v1_50/logits \
  40. --trainable_scopes=resnet_v1_50/logits \
  41. --max_number_of_steps=3000 \
  42. --batch_size=32 \
  43. --learning_rate=0.01 \
  44. --save_interval_secs=60 \
  45. --save_summaries_secs=60 \
  46. --log_every_n_steps=100 \
  47. --optimizer=rmsprop \
  48. --weight_decay=0.00004
  49. # Run evaluation.
  50. python eval_image_classifier.py \
  51. --checkpoint_path=${TRAIN_DIR} \
  52. --eval_dir=${TRAIN_DIR} \
  53. --dataset_name=flowers \
  54. --dataset_split_name=validation \
  55. --dataset_dir=${DATASET_DIR} \
  56. --model_name=resnet_v1_50
  57. # Fine-tune all the new layers for 1000 steps.
  58. python train_image_classifier.py \
  59. --train_dir=${TRAIN_DIR}/all \
  60. --dataset_name=flowers \
  61. --dataset_split_name=train \
  62. --dataset_dir=${DATASET_DIR} \
  63. --checkpoint_path=${TRAIN_DIR} \
  64. --model_name=resnet_v1_50 \
  65. --max_number_of_steps=1000 \
  66. --batch_size=32 \
  67. --learning_rate=0.001 \
  68. --save_interval_secs=60 \
  69. --save_summaries_secs=60 \
  70. --log_every_n_steps=100 \
  71. --optimizer=rmsprop \
  72. --weight_decay=0.00004
  73. # Run evaluation.
  74. python eval_image_classifier.py \
  75. --checkpoint_path=${TRAIN_DIR}/all \
  76. --eval_dir=${TRAIN_DIR}/all \
  77. --dataset_name=flowers \
  78. --dataset_split_name=validation \
  79. --dataset_dir=${DATASET_DIR} \
  80. --model_name=resnet_v1_50