| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687 | """ K-Means.Implement K-Means algorithm with TensorFlow, and apply it to classifyhandwritten digit images. This example is using the MNIST database ofhandwritten digits as training samples (http://yann.lecun.com/exdb/mnist/).Note: This example requires TensorFlow v1.1.0 or over.Author: Aymeric DamienProject: https://github.com/aymericdamien/TensorFlow-Examples/"""from __future__ import print_functionimport numpy as npimport tensorflow as tffrom tensorflow.contrib.factorization import KMeans# Ignore all GPUs, tf random forest does not benefit from it.import osos.environ["CUDA_VISIBLE_DEVICES"] = ""# Import MNIST datafrom tensorflow.examples.tutorials.mnist import input_datamnist = input_data.read_data_sets("/tmp/data/", one_hot=True)full_data_x = mnist.train.images# Parametersnum_steps = 50 # Total steps to trainbatch_size = 1024 # The number of samples per batchk = 25 # The number of clustersnum_classes = 10 # The 10 digitsnum_features = 784 # Each image is 28x28 pixels# Input imagesX = tf.placeholder(tf.float32, shape=[None, num_features])# Labels (for assigning a label to a centroid and testing)Y = tf.placeholder(tf.float32, shape=[None, num_classes])# K-Means Parameterskmeans = KMeans(inputs=X, num_clusters=k, distance_metric='cosine',                use_mini_batch=True)# Build KMeans graph(all_scores, cluster_idx, scores, cluster_centers_initialized, init_op,train_op) = kmeans.training_graph()cluster_idx = cluster_idx[0] # fix for cluster_idx being a tupleavg_distance = tf.reduce_mean(scores)# Initialize the variables (i.e. assign their default value)init_vars = tf.global_variables_initializer()# Start TensorFlow sessionsess = tf.Session()# Run the initializersess.run(init_vars, feed_dict={X: full_data_x})sess.run(init_op, feed_dict={X: full_data_x})# Trainingfor i in range(1, num_steps + 1):    _, d, idx = sess.run([train_op, avg_distance, cluster_idx],                         feed_dict={X: full_data_x})    if i % 10 == 0 or i == 1:        print("Step %i, Avg Distance: %f" % (i, d))# Assign a label to each centroid# Count total number of labels per centroid, using the label of each training# sample to their closest centroid (given by 'idx')counts = np.zeros(shape=(k, num_classes))for i in range(len(idx)):    counts[idx[i]] += mnist.train.labels[i]# Assign the most frequent label to the centroidlabels_map = [np.argmax(c) for c in counts]labels_map = tf.convert_to_tensor(labels_map)# Evaluation ops# Lookup: centroid_id -> labelcluster_label = tf.nn.embedding_lookup(labels_map, cluster_idx)# Compute accuracycorrect_prediction = tf.equal(cluster_label, tf.cast(tf.argmax(Y, 1), tf.int32))accuracy_op = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))# Test Modeltest_x, test_y = mnist.test.images, mnist.test.labelsprint("Test Accuracy:", sess.run(accuracy_op, feed_dict={X: test_x, Y: test_y}))
 |