train_student.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. # Copyright 2016 Google Inc. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. from __future__ import absolute_import
  16. from __future__ import division
  17. from __future__ import print_function
  18. import numpy as np
  19. import tensorflow as tf
  20. from tensorflow.python.platform import gfile
  21. from tensorflow.python.platform import flags
  22. from tensorflow.python.platform import app
  23. import aggregation
  24. import deep_cnn
  25. import input
  26. import metrics
  27. FLAGS = flags.FLAGS
  28. flags.DEFINE_string('dataset', 'svhn', 'The name of the dataset to use')
  29. flags.DEFINE_integer('nb_labels', 10, 'Number of output classes')
  30. flags.DEFINE_string('data_dir','/tmp','Temporary storage')
  31. flags.DEFINE_string('train_dir','/tmp/train_dir','Where model chkpt are saved')
  32. flags.DEFINE_string('teachers_dir','/tmp/train_dir',
  33. 'Directory where teachers checkpoints are stored.')
  34. flags.DEFINE_integer('teachers_max_steps', 3000,
  35. """Number of steps teachers were ran.""")
  36. flags.DEFINE_integer('max_steps', 3000, """Number of steps to run student.""")
  37. flags.DEFINE_integer('nb_teachers', 10, """Teachers in the ensemble.""")
  38. tf.app.flags.DEFINE_integer('stdnt_share', 1000,
  39. """Student share (last index) of the test data""")
  40. flags.DEFINE_integer('lap_scale', 10,
  41. """Scale of the Laplacian noise added for privacy""")
  42. flags.DEFINE_boolean('save_labels', False,
  43. """Dump numpy arrays of labels and clean teacher votes""")
  44. flags.DEFINE_boolean('deeper', False, """Activate deeper CNN model""")
  45. def ensemble_preds(dataset, nb_teachers, stdnt_data):
  46. """
  47. Given a dataset, a number of teachers, and some input data, this helper
  48. function queries each teacher for predictions on the data and returns
  49. all predictions in a single array. (That can then be aggregated into
  50. one single prediction per input using aggregation.py (cf. function
  51. prepare_student_data() below)
  52. :param dataset: string corresponding to mnist, cifar10, or svhn
  53. :param nb_teachers: number of teachers (in the ensemble) to learn from
  54. :param stdnt_data: unlabeled student training data
  55. :return: 3d array (teacher id, sample id, probability per class)
  56. """
  57. # Compute shape of array that will hold probabilities produced by each
  58. # teacher, for each training point, and each output class
  59. result_shape = (nb_teachers, len(stdnt_data), FLAGS.nb_labels)
  60. # Create array that will hold result
  61. result = np.zeros(result_shape, dtype=np.float32)
  62. # Get predictions from each teacher
  63. for teacher_id in xrange(nb_teachers):
  64. # Compute path of checkpoint file for teacher model with ID teacher_id
  65. if FLAGS.deeper:
  66. ckpt_path = FLAGS.teachers_dir + '/' + str(dataset) + '_' + str(nb_teachers) + '_teachers_' + str(teacher_id) + '_deep.ckpt-' + str(FLAGS.teachers_max_steps - 1) #NOLINT(long-line)
  67. else:
  68. ckpt_path = FLAGS.teachers_dir + '/' + str(dataset) + '_' + str(nb_teachers) + '_teachers_' + str(teacher_id) + '.ckpt-' + str(FLAGS.teachers_max_steps - 1) # NOLINT(long-line)
  69. # Get predictions on our training data and store in result array
  70. result[teacher_id] = deep_cnn.softmax_preds(stdnt_data, ckpt_path)
  71. # This can take a while when there are a lot of teachers so output status
  72. print("Computed Teacher " + str(teacher_id) + " softmax predictions")
  73. return result
  74. def prepare_student_data(dataset, nb_teachers, save=False):
  75. """
  76. Takes a dataset name and the size of the teacher ensemble and prepares
  77. training data for the student model, according to parameters indicated
  78. in flags above.
  79. :param dataset: string corresponding to mnist, cifar10, or svhn
  80. :param nb_teachers: number of teachers (in the ensemble) to learn from
  81. :param save: if set to True, will dump student training labels predicted by
  82. the ensemble of teachers (with Laplacian noise) as npy files.
  83. It also dumps the clean votes for each class (without noise) and
  84. the labels assigned by teachers
  85. :return: pairs of (data, labels) to be used for student training and testing
  86. """
  87. assert input.create_dir_if_needed(FLAGS.train_dir)
  88. # Load the dataset
  89. if dataset == 'svhn':
  90. test_data, test_labels = input.ld_svhn(test_only=True)
  91. elif dataset == 'cifar10':
  92. test_data, test_labels = input.ld_cifar10(test_only=True)
  93. elif dataset == 'mnist':
  94. test_data, test_labels = input.ld_mnist(test_only=True)
  95. else:
  96. print("Check value of dataset flag")
  97. return False
  98. # Make sure there is data leftover to be used as a test set
  99. assert FLAGS.stdnt_share < len(test_data)
  100. # Prepare [unlabeled] student training data (subset of test set)
  101. stdnt_data = test_data[:FLAGS.stdnt_share]
  102. # Compute teacher predictions for student training data
  103. teachers_preds = ensemble_preds(dataset, nb_teachers, stdnt_data)
  104. # Aggregate teacher predictions to get student training labels
  105. if not save:
  106. stdnt_labels = aggregation.noisy_max(teachers_preds, FLAGS.lap_scale)
  107. else:
  108. # Request clean votes and clean labels as well
  109. stdnt_labels, clean_votes, labels_for_dump = aggregation.noisy_max(teachers_preds, FLAGS.lap_scale, return_clean_votes=True) #NOLINT(long-line)
  110. # Prepare filepath for numpy dump of clean votes
  111. filepath = FLAGS.data_dir + "/" + str(dataset) + '_' + str(nb_teachers) + '_student_clean_votes_lap_' + str(FLAGS.lap_scale) + '.npy' # NOLINT(long-line)
  112. # Prepare filepath for numpy dump of clean labels
  113. filepath_labels = FLAGS.data_dir + "/" + str(dataset) + '_' + str(nb_teachers) + '_teachers_labels_lap_' + str(FLAGS.lap_scale) + '.npy' # NOLINT(long-line)
  114. # Dump clean_votes array
  115. with gfile.Open(filepath, mode='w') as file_obj:
  116. np.save(file_obj, clean_votes)
  117. # Dump labels_for_dump array
  118. with gfile.Open(filepath_labels, mode='w') as file_obj:
  119. np.save(file_obj, labels_for_dump)
  120. # Print accuracy of aggregated labels
  121. ac_ag_labels = metrics.accuracy(stdnt_labels, test_labels[:FLAGS.stdnt_share])
  122. print("Accuracy of the aggregated labels: " + str(ac_ag_labels))
  123. # Store unused part of test set for use as a test set after student training
  124. stdnt_test_data = test_data[FLAGS.stdnt_share:]
  125. stdnt_test_labels = test_labels[FLAGS.stdnt_share:]
  126. if save:
  127. # Prepare filepath for numpy dump of labels produced by noisy aggregation
  128. filepath = FLAGS.data_dir + "/" + str(dataset) + '_' + str(nb_teachers) + '_student_labels_lap_' + str(FLAGS.lap_scale) + '.npy' #NOLINT(long-line)
  129. # Dump student noisy labels array
  130. with gfile.Open(filepath, mode='w') as file_obj:
  131. np.save(file_obj, stdnt_labels)
  132. return stdnt_data, stdnt_labels, stdnt_test_data, stdnt_test_labels
  133. def train_student(dataset, nb_teachers):
  134. """
  135. This function trains a student using predictions made by an ensemble of
  136. teachers. The student and teacher models are trained using the same
  137. neural network architecture.
  138. :param dataset: string corresponding to mnist, cifar10, or svhn
  139. :param nb_teachers: number of teachers (in the ensemble) to learn from
  140. :return: True if student training went well
  141. """
  142. assert input.create_dir_if_needed(FLAGS.train_dir)
  143. # Call helper function to prepare student data using teacher predictions
  144. stdnt_dataset = prepare_student_data(dataset, nb_teachers, save=True)
  145. # Unpack the student dataset
  146. stdnt_data, stdnt_labels, stdnt_test_data, stdnt_test_labels = stdnt_dataset
  147. # Prepare checkpoint filename and path
  148. if FLAGS.deeper:
  149. ckpt_path = FLAGS.train_dir + '/' + str(dataset) + '_' + str(nb_teachers) + '_student_deeper.ckpt' #NOLINT(long-line)
  150. else:
  151. ckpt_path = FLAGS.train_dir + '/' + str(dataset) + '_' + str(nb_teachers) + '_student.ckpt' # NOLINT(long-line)
  152. # Start student training
  153. assert deep_cnn.train(stdnt_data, stdnt_labels, ckpt_path)
  154. # Compute final checkpoint name for student (with max number of steps)
  155. ckpt_path_final = ckpt_path + '-' + str(FLAGS.max_steps - 1)
  156. # Compute student label predictions on remaining chunk of test set
  157. student_preds = deep_cnn.softmax_preds(stdnt_test_data, ckpt_path_final)
  158. # Compute teacher accuracy
  159. precision = metrics.accuracy(student_preds, stdnt_test_labels)
  160. print('Precision of student after training: ' + str(precision))
  161. return True
  162. def main(argv=None): # pylint: disable=unused-argument
  163. # Run student training according to values specified in flags
  164. assert train_student(FLAGS.dataset, FLAGS.nb_teachers)
  165. if __name__ == '__main__':
  166. app.run()