Last active
September 6, 2017 18:52
-
-
Save ardamavi/22cbb0e7377a869504a98f8421b15e33 to your computer and use it in GitHub Desktop.
Keras Denses VS SKLearn MLP VS SKLearn Decision Tree
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Arda Mavi | |
import keras | |
import numpy as np | |
from sklearn import tree | |
from random import shuffle | |
from keras.layers import Dense | |
from keras.models import Sequential | |
from sklearn.neural_network import MLPClassifier | |
# Getting data: | |
# Y is always the third of X. | |
# Train data example X: [0,2,3,1] & Y: 3 | |
X, Y = [], [] | |
for a in range(4): | |
for b in range(4): | |
for c in range(4): | |
for d in range(4): | |
X.append([a,b,c,d]) | |
shuffle(X) | |
# Train-test split. | |
test = X[int(8*len(X)/10)+1:] | |
X = X[:int(8*len(X)/10)] | |
print('Data len:', len(X)+len(test)) | |
for i in X: | |
Y.append(i[2]) | |
print('Data Ready') | |
# Evaluate Rate: | |
def evaluate(): | |
sonuc = model.predict(test) | |
count = 0 | |
for i in range(len(test)): | |
if test[i][2] == int(sonuc[i]): | |
count += 1 | |
evaluate_rate = (count*100/len(test)) | |
return(evaluate_rate) | |
# Keras Sequential: | |
model = Sequential() | |
model.add(Dense(1, input_shape=(4,), activation='relu')) | |
model.compile(optimizer=keras.optimizers.rmsprop(lr=0.1, decay=1e-6), loss='mse') | |
model.fit(X, Y, epochs=80) | |
print('Keras Dense Accuracy: ',evaluate()) | |
# SKLearn MLP: | |
model = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(1), random_state=1) | |
model.fit(X, Y) | |
print('SKLearn MLP Accuracy: ',evaluate()) | |
# SKLearn Decision Tree | |
model = tree.DecisionTreeClassifier() | |
model.fit(X, Y) | |
print('SKLearn Decision Tree Accuracy: ',evaluate()) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment