Skip to content

Instantly share code, notes, and snippets.

@mchirico
Last active April 2, 2021 06:56

Revisions

  1. mchirico revised this gist Nov 23, 2015. 1 changed file with 26 additions and 6 deletions.
    32 changes: 26 additions & 6 deletions tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -71,10 +71,13 @@ def convertOneHot(data):


    # Recreate logging dir
    import shutil
    shutil.rmtree('./tenIrisSave')
    import os, sys
    os.mkdir('./tenIrisSave', 0755 )
    import shutil, os, sys
    TMPDir='./tenIrisSave'
    try:
    shutil.rmtree(TMPDir)
    except:
    print "Tmp Dir did not exist"
    os.mkdir(TMPDir, 0755 )

    # Initialize and run
    sess = tf.Session()
    @@ -83,7 +86,7 @@ def convertOneHot(data):
    sess.run(init)

    # Build the summary operation based on the TF collection of Summaries.
    tf.train.write_graph(sess.graph_def, './tenIrisSave/logsd','graph.pbtxt')
    tf.train.write_graph(sess.graph_def, TMPDir + '/logsd','graph.pbtxt')

    #acc = tf.scalar_summary("Accuracy:", tf_accuracy)
    tf.scalar_summary("Accuracy:", tf_accuracy)
    @@ -95,7 +98,7 @@ def convertOneHot(data):

    summary_op = tf.merge_all_summaries()
    #summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',graph_def=sess.graph_def)
    summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',sess.graph_def)
    summary_writer = tf.train.SummaryWriter(TMPDir + '/logs',sess.graph_def)

    # This will not work. You need the full path.
    # tensorboard --logdir=./tenIrisSave/ # BAD!
    @@ -118,3 +121,20 @@ def convertOneHot(data):
    summary_str = sess.run(summary_op,feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    summary_writer.add_summary(summary_str, i)
    if result == 1 and saved == 0:
    saved=1
    print "saving"
    saver.save(sess,"./tenIrisSave/saveOne")


    k=np.array(k)
    print(np.where(k==k.max()))
    print "Max: {}".format(k.max())

    print "\nTo see the output, run the following:"
    print "tensorboard --logdir=$(pwd)/tenIrisSave"






  2. mchirico revised this gist Nov 22, 2015. 1 changed file with 7 additions and 29 deletions.
    36 changes: 7 additions & 29 deletions tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -70,7 +70,7 @@ def convertOneHot(data):



    # Remove dir
    # Recreate logging dir
    import shutil
    shutil.rmtree('./tenIrisSave')
    import os, sys
    @@ -80,14 +80,8 @@ def convertOneHot(data):
    sess = tf.Session()
    #sess = tf.InteractiveSession()
    init = tf.initialize_all_variables()

    sess.run(init)






    # Build the summary operation based on the TF collection of Summaries.
    tf.train.write_graph(sess.graph_def, './tenIrisSave/logsd','graph.pbtxt')

    @@ -100,18 +94,16 @@ def convertOneHot(data):


    summary_op = tf.merge_all_summaries()
    summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',graph_def=sess.graph_def)

    # tensorboard --logdir=./tenIrisSave/



    #summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',graph_def=sess.graph_def)
    summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',sess.graph_def)

    # This will not work. You need the full path.
    # tensorboard --logdir=./tenIrisSave/ # BAD!
    # tensorboard --logdir=$(pwd)/tenIrisSave/ # Good!

    # This is for saving all our work
    saver = tf.train.Saver([tf_weight,tf_bias])



    print("...")
    # Run the training

    @@ -126,17 +118,3 @@ def convertOneHot(data):
    summary_str = sess.run(summary_op,feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    summary_writer.add_summary(summary_str, i)
    if result == 1 and saved == 0:
    saved=1
    print "saving"
    saver.save(sess,"./tenIrisSave/saveOne")
    k=np.array(k)
    print(np.where(k==k.max()))
    print "Max: {}".format(k.max())

    print "tensorboard --logdir=$(pwd)/tenIrisSave"






  3. mchirico revised this gist Nov 22, 2015. 1 changed file with 58 additions and 44 deletions.
    102 changes: 58 additions & 44 deletions tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -54,6 +54,9 @@ def convertOneHot(data):
    tf_bias = tf.Variable(tf.zeros([B]))
    tf_softmax = tf.nn.softmax(tf.matmul(tf_in,tf_weight) + tf_bias)




    # Training via backpropagation
    tf_softmax_correct = tf.placeholder("float", [None,B])
    tf_cross_entropy = -tf.reduce_sum(tf_softmax_correct*tf.log(tf_softmax))
    @@ -65,61 +68,72 @@ def convertOneHot(data):
    tf_correct_prediction = tf.equal(tf.argmax(tf_softmax,1), tf.argmax(tf_softmax_correct,1))
    tf_accuracy = tf.reduce_mean(tf.cast(tf_correct_prediction, "float"))



    # Remove dir
    import shutil
    shutil.rmtree('./tenIrisSave')
    import os, sys
    os.mkdir('./tenIrisSave', 0755 )

    # Initialize and run
    init = tf.initialize_all_variables()
    sess = tf.Session()
    #sess = tf.InteractiveSession()
    init = tf.initialize_all_variables()

    sess.run(init)

    print("...")
    # Run the training
    for i in range(30):
    sess.run(tf_train_step, feed_dict={tf_in: x_train, tf_softmax_correct: y_train_onehot})

    # Print accuracy
    result = sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    print "Run {},{}".format(i,result)


    """
    Below is the ouput
    ...
    Run 0,0.319999992847
    Run 1,0.300000011921
    Run 2,0.379999995232
    Run 3,0.319999992847
    Run 4,0.300000011921
    Run 5,0.699999988079
    Run 6,0.680000007153
    Run 7,0.699999988079
    Run 8,0.680000007153
    Run 9,0.699999988079
    Run 10,0.680000007153
    Run 11,0.680000007153
    Run 12,0.540000021458
    Run 13,0.419999986887
    Run 14,0.680000007153
    Run 15,0.699999988079
    Run 16,0.680000007153
    Run 17,0.699999988079
    Run 18,0.680000007153
    Run 19,0.699999988079
    Run 20,0.699999988079
    Run 21,0.699999988079
    Run 22,0.699999988079
    Run 23,0.699999988079
    Run 24,0.680000007153
    Run 25,0.699999988079
    Run 26,1.0
    Run 27,0.819999992847
    ...
    Ref:
    https://gist.github.com/mchirico/bcc376fb336b73f24b29#file-tensorflowiriscsv-py
    """


    # Build the summary operation based on the TF collection of Summaries.
    tf.train.write_graph(sess.graph_def, './tenIrisSave/logsd','graph.pbtxt')

    #acc = tf.scalar_summary("Accuracy:", tf_accuracy)
    tf.scalar_summary("Accuracy:", tf_accuracy)
    tf.histogram_summary('weights', tf_weight)
    tf.histogram_summary('bias', tf_bias)
    tf.histogram_summary('softmax', tf_softmax)
    tf.histogram_summary('accuracy', tf_accuracy)


    summary_op = tf.merge_all_summaries()
    summary_writer = tf.train.SummaryWriter('./tenIrisSave/logs',graph_def=sess.graph_def)

    # tensorboard --logdir=./tenIrisSave/





    saver = tf.train.Saver([tf_weight,tf_bias])



    print("...")
    # Run the training

    k=[]
    saved=0
    for i in range(100):
    sess.run(tf_train_step, feed_dict={tf_in: x_train, tf_softmax_correct: y_train_onehot})
    # Print accuracy
    result = sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    print "Run {},{}".format(i,result)
    k.append(result)
    summary_str = sess.run(summary_op,feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    summary_writer.add_summary(summary_str, i)
    if result == 1 and saved == 0:
    saved=1
    print "saving"
    saver.save(sess,"./tenIrisSave/saveOne")
    k=np.array(k)
    print(np.where(k==k.max()))
    print "Max: {}".format(k.max())

    print "tensorboard --logdir=$(pwd)/tenIrisSave"



  4. mchirico revised this gist Nov 20, 2015. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -114,7 +114,7 @@ def convertOneHot(data):
    ...
    Ref:
    https://gist.github.com/mchirico/bcc376fb336b73f24b29#file-tensorflowiriscsv-py
    """


  5. mchirico revised this gist Nov 20, 2015. 1 changed file with 45 additions and 9 deletions.
    54 changes: 45 additions & 9 deletions tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -3,7 +3,7 @@
    import numpy as np
    from numpy import genfromtxt

    # Build Example Data
    # Build Example Data is CSV format, but use Iris data
    from sklearn import datasets
    from sklearn.model_selection import train_test_split
    import sklearn
    @@ -45,7 +45,8 @@ def convertOneHot(data):
    y_test,y_test_onehot = convertOneHot(test_data)



    # A number of features, 4 in this example
    # B = 3 species of Iris (setosa, virginica and versicolor)
    A=data.shape[1]-1 # Number of features, Note first is y
    B=len(y_train_onehot[0])
    tf_in = tf.placeholder("float", [None, A]) # Features
    @@ -69,17 +70,52 @@ def convertOneHot(data):
    sess = tf.Session()
    sess.run(init)

    print("output")
    print("...")
    # Run the training
    for i in range(100):
    for i in range(30):
    sess.run(tf_train_step, feed_dict={tf_in: x_train, tf_softmax_correct: y_train_onehot})

    # Print accuracy
    print "Run {}".format(i)
    print sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})



    result = sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})
    print "Run {},{}".format(i,result)


    """
    Below is the ouput
    ...
    Run 0,0.319999992847
    Run 1,0.300000011921
    Run 2,0.379999995232
    Run 3,0.319999992847
    Run 4,0.300000011921
    Run 5,0.699999988079
    Run 6,0.680000007153
    Run 7,0.699999988079
    Run 8,0.680000007153
    Run 9,0.699999988079
    Run 10,0.680000007153
    Run 11,0.680000007153
    Run 12,0.540000021458
    Run 13,0.419999986887
    Run 14,0.680000007153
    Run 15,0.699999988079
    Run 16,0.680000007153
    Run 17,0.699999988079
    Run 18,0.680000007153
    Run 19,0.699999988079
    Run 20,0.699999988079
    Run 21,0.699999988079
    Run 22,0.699999988079
    Run 23,0.699999988079
    Run 24,0.680000007153
    Run 25,0.699999988079
    Run 26,1.0
    Run 27,0.819999992847
    ...
    Ref:
    """



  6. mchirico created this gist Nov 20, 2015.
    92 changes: 92 additions & 0 deletions tensorFlowIrisCSV.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,92 @@
    #!/usr/bin/env python
    import tensorflow as tf
    import numpy as np
    from numpy import genfromtxt

    # Build Example Data
    from sklearn import datasets
    from sklearn.model_selection import train_test_split
    import sklearn
    def buildDataFromIris():
    iris = datasets.load_iris()
    X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, test_size=0.33, random_state=42)
    f=open('cs-training.csv','w')
    for i,j in enumerate(X_train):
    k=np.append(np.array(y_train[i]),j )
    f.write(",".join([str(s) for s in k]) + '\n')
    f.close()
    f=open('cs-testing.csv','w')
    for i,j in enumerate(X_test):
    k=np.append(np.array(y_test[i]),j )
    f.write(",".join([str(s) for s in k]) + '\n')
    f.close()


    # Convert to one hot
    def convertOneHot(data):
    y=np.array([int(i[0]) for i in data])
    y_onehot=[0]*len(y)
    for i,j in enumerate(y):
    y_onehot[i]=[0]*(y.max() + 1)
    y_onehot[i][j]=1
    return (y,y_onehot)


    buildDataFromIris()


    data = genfromtxt('cs-training.csv',delimiter=',') # Training data
    test_data = genfromtxt('cs-testing.csv',delimiter=',') # Test data

    x_train=np.array([ i[1::] for i in data])
    y_train,y_train_onehot = convertOneHot(data)

    x_test=np.array([ i[1::] for i in test_data])
    y_test,y_test_onehot = convertOneHot(test_data)



    A=data.shape[1]-1 # Number of features, Note first is y
    B=len(y_train_onehot[0])
    tf_in = tf.placeholder("float", [None, A]) # Features
    tf_weight = tf.Variable(tf.zeros([A,B]))
    tf_bias = tf.Variable(tf.zeros([B]))
    tf_softmax = tf.nn.softmax(tf.matmul(tf_in,tf_weight) + tf_bias)

    # Training via backpropagation
    tf_softmax_correct = tf.placeholder("float", [None,B])
    tf_cross_entropy = -tf.reduce_sum(tf_softmax_correct*tf.log(tf_softmax))

    # Train using tf.train.GradientDescentOptimizer
    tf_train_step = tf.train.GradientDescentOptimizer(0.01).minimize(tf_cross_entropy)

    # Add accuracy checking nodes
    tf_correct_prediction = tf.equal(tf.argmax(tf_softmax,1), tf.argmax(tf_softmax_correct,1))
    tf_accuracy = tf.reduce_mean(tf.cast(tf_correct_prediction, "float"))

    # Initialize and run
    init = tf.initialize_all_variables()
    sess = tf.Session()
    sess.run(init)

    print("output")
    # Run the training
    for i in range(100):
    sess.run(tf_train_step, feed_dict={tf_in: x_train, tf_softmax_correct: y_train_onehot})

    # Print accuracy
    print "Run {}".format(i)
    print sess.run(tf_accuracy, feed_dict={tf_in: x_test, tf_softmax_correct: y_test_onehot})