ソースを参照

Merge pull request #760 from stakemura/master

Python 3 support for some inception scripts
Neal Wu 8 年 前
コミット
99462f6dd5

+ 4 - 4
inception/inception/data/build_image_data.py

@@ -247,7 +247,7 @@ def _process_image_files_batch(coder, thread_index, ranges, name, filenames,
   num_files_in_thread = ranges[thread_index][1] - ranges[thread_index][0]
 
   counter = 0
-  for s in xrange(num_shards_per_batch):
+  for s in range(num_shards_per_batch):
     # Generate a sharded version of the file name, e.g. 'train-00002-of-00010'
     shard = thread_index * num_shards_per_batch + s
     output_filename = '%s-%.5d-of-%.5d' % (name, shard, num_shards)
@@ -300,7 +300,7 @@ def _process_image_files(name, filenames, texts, labels, num_shards):
   # Break all images into batches with a [ranges[i][0], ranges[i][1]].
   spacing = np.linspace(0, len(filenames), FLAGS.num_threads + 1).astype(np.int)
   ranges = []
-  for i in xrange(len(spacing) - 1):
+  for i in range(len(spacing) - 1):
     ranges.append([spacing[i], spacing[i+1]])
 
   # Launch a thread for each batch.
@@ -314,7 +314,7 @@ def _process_image_files(name, filenames, texts, labels, num_shards):
   coder = ImageCoder()
 
   threads = []
-  for thread_index in xrange(len(ranges)):
+  for thread_index in range(len(ranges)):
     args = (coder, thread_index, ranges, name, filenames,
             texts, labels, num_shards)
     t = threading.Thread(target=_process_image_files_batch, args=args)
@@ -386,7 +386,7 @@ def _find_image_files(data_dir, labels_file):
   # Shuffle the ordering of all image files in order to guarantee
   # random ordering of the images with respect to label in the
   # saved TFRecord files. Make the randomization repeatable.
-  shuffled_index = range(len(filenames))
+  shuffled_index = list(range(len(filenames)))
   random.seed(12345)
   random.shuffle(shuffled_index)
 

+ 4 - 4
inception/inception/data/build_imagenet_data.py

@@ -370,7 +370,7 @@ def _process_image_files_batch(coder, thread_index, ranges, name, filenames,
   num_files_in_thread = ranges[thread_index][1] - ranges[thread_index][0]
 
   counter = 0
-  for s in xrange(num_shards_per_batch):
+  for s in range(num_shards_per_batch):
     # Generate a sharded version of the file name, e.g. 'train-00002-of-00010'
     shard = thread_index * num_shards_per_batch + s
     output_filename = '%s-%.5d-of-%.5d' % (name, shard, num_shards)
@@ -434,7 +434,7 @@ def _process_image_files(name, filenames, synsets, labels, humans,
   spacing = np.linspace(0, len(filenames), FLAGS.num_threads + 1).astype(np.int)
   ranges = []
   threads = []
-  for i in xrange(len(spacing) - 1):
+  for i in range(len(spacing) - 1):
     ranges.append([spacing[i], spacing[i+1]])
 
   # Launch a thread for each batch.
@@ -448,7 +448,7 @@ def _process_image_files(name, filenames, synsets, labels, humans,
   coder = ImageCoder()
 
   threads = []
-  for thread_index in xrange(len(ranges)):
+  for thread_index in range(len(ranges)):
     args = (coder, thread_index, ranges, name, filenames,
             synsets, labels, humans, bboxes, num_shards)
     t = threading.Thread(target=_process_image_files_batch, args=args)
@@ -524,7 +524,7 @@ def _find_image_files(data_dir, labels_file):
   # Shuffle the ordering of all image files in order to guarantee
   # random ordering of the images with respect to label in the
   # saved TFRecord files. Make the randomization repeatable.
-  shuffled_index = range(len(filenames))
+  shuffled_index = list(range(len(filenames)))
   random.seed(12345)
   random.shuffle(shuffled_index)
 

+ 1 - 1
inception/inception/data/preprocess_imagenet_validation_data.py

@@ -72,7 +72,7 @@ if __name__ == '__main__':
     os.makedirs(labeled_data_dir)
 
   # Move all of the image to the appropriate sub-directory.
-  for i in xrange(len(labels)):
+  for i in range(len(labels)):
     basename = 'ILSVRC2012_val_000%.5d.JPEG' % (i + 1)
     original_filename = os.path.join(data_dir, basename)
     if not os.path.exists(original_filename):

+ 1 - 1
inception/inception/data/process_bounding_boxes.py

@@ -128,7 +128,7 @@ def ProcessXMLAnnotation(xml_file):
   num_boxes = FindNumberBoundingBoxes(root)
   boxes = []
 
-  for index in xrange(num_boxes):
+  for index in range(num_boxes):
     box = BoundingBox()
     # Grab the 'index' annotation.
     box.xmin = GetInt('xmin', root, index)

+ 2 - 2
inception/inception/inception_train.py

@@ -229,7 +229,7 @@ def train(dataset):
     # Calculate the gradients for each model tower.
     tower_grads = []
     reuse_variables = None
-    for i in xrange(FLAGS.num_gpus):
+    for i in range(FLAGS.num_gpus):
       with tf.device('/gpu:%d' % i):
         with tf.name_scope('%s_%d' % (inception.TOWER_NAME, i)) as scope:
           # Force all Variables to reside on the CPU.
@@ -333,7 +333,7 @@ def train(dataset):
         FLAGS.train_dir,
         graph_def=sess.graph.as_graph_def(add_shapes=True))
 
-    for step in xrange(FLAGS.max_steps):
+    for step in range(FLAGS.max_steps):
       start_time = time.time()
       _, loss_value = sess.run([train_op, loss])
       duration = time.time() - start_time