tensorflow - Dataset input from bmp images only 50% accurate -
i've created graph try:
- import bmp files , generate label based on filename (l/r).
- train network determine between left , right eye.
- evaluate network.
i'm using new framework , in dataset. code runs, 50% accuracy (no learning happening).
can check graph right , it's network need fix ?
""" routine processing eye image dataset determines left/right eye using tensorflow api v1.3 """ __future__ import absolute_import __future__ import division __future__ import print_function import os import fnmatch import tensorflow tf six.moves import xrange # pylint: disable=redefined-builtin import nnlayers nnlayer image_size = 460 scale_size = 100 num_classes = 2 image_depth = 3 flags = tf.app.flags.flags # basic model parameters. tf.app.flags.define_integer('batch_size', 200, """number of images process in batch.""") tf.app.flags.define_integer('num_epochs', 1001, """number of images process in batch.""") tf.app.flags.define_string('train_directory', './eyeimages', """directory of images process.""") tf.app.flags.define_string('test_directory', './eyetest', """directory of images process.""") tf.app.flags.define_string('log_dir', './logs', """logging directory""") def _parse_function(filename, label): """takes filenames , labels , returns 1 hot labels , image values""" #read file image_string = tf.read_file(filename) #decode bmp file image_decoded = tf.image.decode_bmp(image_string) #resize accordingly image = tf.image.resize_images(image_decoded, [scale_size, scale_size]) #convert label 1 hot one_hot = tf.one_hot(label, num_classes) return image, one_hot def inference(image): #shape image convolution tf.name_scope('input_reshape'): x_image = tf.reshape(image, [-1, scale_size, scale_size, image_depth]) #infer number of images, last dimension features tf.summary.image('input_images',x_image) #neural net layers #100x100x3 -> 50x50x32 h_pool1 = nnlayer.conv_layer(x_image, image_depth, 5, 32, 'hiddenlayer1', act=tf.nn.relu) #50x50x32 -> 25x25x64 h_pool2 = nnlayer.conv_layer(h_pool1, 32, 5, 64, 'hiddenlayer2', act=tf.nn.relu) #25x25x64 -> 1024x2 h_fc1 = nnlayer.fc_layer(h_pool2, 64, 25, 1024, 'fclayer1', act=tf.nn.relu) #1024x2 ->1x2 tf.name_scope('final-layer'): tf.name_scope('weights'): w_fc2 = nnlayer.weight_variable([1024,num_classes]) tf.name_scope('biases'): b_fc2 = nnlayer.bias_variable([num_classes]) y_conv = tf.matmul(h_fc1, w_fc2) + b_fc2 return y_conv def folderparser(folder): """output bmp file names in directory , label based on file name""" #create list of filenames in directory files = os.listdir(folder) #filter bmp files bmpfiles = fnmatch.filter(files, '*.bmp') #create empty lists labels = [] fullnames = [] #get length of filename , determine left/right label in range(len(bmpfiles)): length = len(bmpfiles[i]) fullnames.append(folder + '/' + bmpfiles[i]) if (bmpfiles[i][length-17])=='l': labels.append(1) else: labels.append(0) return fullnames,labels def main(argv=none): # pylint: disable=unused-argument #delete log files if present #if tf.gfile.exists(flags.log_dir): # tf.gfile.deleterecursively(flags.log_dir) #tf.gfile.makedirs(flags.log_dir) #get file names , labels trainnames, trainlabels = folderparser(flags.train_directory) testnames, testlabels = folderparser(flags.test_directory) # create dataset of file names , labels tr_data = tf.contrib.data.dataset.from_tensor_slices((trainnames, trainlabels)) ts_data = tf.contrib.data.dataset.from_tensor_slices((testnames, testlabels)) #map data set file names images tr_data = tr_data.map(_parse_function) ts_data = ts_data.map(_parse_function) #shuffle images tr_data = tr_data.shuffle(flags.batch_size*2) ts_data = ts_data.shuffle(flags.batch_size*2) #create batches tr_data = tr_data.batch(flags.batch_size) ts_data = ts_data.batch(flags.batch_size) #create handle datasets handle = tf.placeholder(tf.string, shape=[]) iterator = tf.contrib.data.iterator.from_string_handle(handle, tr_data.output_types, tr_data.output_shapes) next_element = iterator.get_next() #setup iterator training_iterator = tr_data.make_initializable_iterator() validation_iterator = ts_data.make_initializable_iterator() #retrieve next batch features, labels = iterator.get_next() #run network y_conv = inference(features) #determine softmax , loss function tf.variable_scope('softmax_linear') scope: diff = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=y_conv) tf.name_scope('total'): cross_entropy = tf.reduce_mean(diff) tf.summary.scalar('cross_entropy', cross_entropy) #run gradient descent tf.name_scope('train'): training_op = tf.train.gradientdescentoptimizer(1e-3).minimize(cross_entropy) #identify correct predictions tf.name_scope('correct_prediction'): correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(labels, 1)) #find accuracy of model tf.name_scope('accuracy'): accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) tf.summary.scalar('accuracy', accuracy) tf.session() sess: #initialization of variables training_handle = sess.run(training_iterator.string_handle()) validation_handle = sess.run(validation_iterator.string_handle()) sess.run(tf.global_variables_initializer()) #merge summaries , write test summaries merged = tf.summary.merge_all() train_writer = tf.summary.filewriter(flags.log_dir + '/train', sess.graph) test_writer = tf.summary.filewriter(flags.log_dir + '/test') #run through epochs epoch in range(flags.num_epochs): #initialize training set training epoch sess.run(training_iterator.initializer) if epoch % 2 ==0: #initialize validation set sess.run(validation_iterator.initializer) #test summary, acc = sess.run([merged, accuracy], feed_dict={handle: validation_handle}) train_writer.add_summary(summary, epoch) #write test file print('step %s, accuracy %s' % (epoch, acc)) else: #train sess.run(training_op, feed_dict={handle: training_handle}) #close log files train_writer.close() test_writer.close() if __name__ == '__main__': tf.app.run()
aaron
the answer image standardization:
image_std = tf.image.per_image_standardization (image_resized)
without image standardization neurons becoming saturated. improved outcome straight away.
thanks.
Comments
Post a Comment