-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmnist_mlp_baseline.py
137 lines (117 loc) · 3.7 KB
/
mnist_mlp_baseline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
# Baseline MLP for MNIST dataset
import numpy
import skimage.io as io
import os
import platform
import getpass
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.utils import np_utils
from keras.models import model_from_json
from os.path import isfile, join
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load data
platform = platform.system()
currentUser = getpass.getuser()
currentDirectory = os.getcwd()
if platform is 'Windows':
#path_image = 'C:\\Users\\' + currentUser
path_image = currentDirectory
else:
#path_image = '/user/' + currentUser
path_image = currentDirectory
fn = 'image.png'
img = io.imread(os.path.join(path_image, fn))
# prepare arrays
X_t = []
y_t = []
X_t.append(img)
y_t.append(3)
X_t = numpy.asarray(X_t)
y_t = numpy.asarray(y_t)
y_t = np_utils.to_categorical(y_t, 10)
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# flatten 28*28 images to a 784 vector for each image
num_pixels = X_train.shape[1] * X_train.shape[2]
X_train = X_train.reshape(X_train.shape[0], num_pixels).astype('float32')
X_test = X_test.reshape(X_test.shape[0], num_pixels).astype('float32')
X_t = X_t.reshape(X_t.shape[0], num_pixels).astype('float32')
# normalize inputs from 0-255 to 0-1
X_train = X_train / 255
X_test = X_test / 255
X_t /= 255
print('X_train shape:', X_train.shape)
print ('X_t shape:', X_t.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
print(X_t.shape[0], 'test images')
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
print(y_test.shape[1], 'number of classes')
# define baseline model
def baseline_model():
# create model
model = Sequential()
model.add(Dense(num_pixels, input_dim=num_pixels, init='normal', activation='relu'))
model.add(Dense(num_classes, init='normal', activation='softmax'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
def build_model(model):
# build the model
model = baseline_model()
# Fit the model
model.fit(X_train, y_train, validation_data=(X_test, y_test), nb_epoch=10, batch_size=200, verbose=2)
return model
def save_model(model):
# serialize model to JSON
model_json = model.to_json()
with open("model.json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights("model.h5")
print("Saved model to disk")
def load_model():
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("model.h5")
if loaded_model:
print("Loaded model")
else:
print("Model is not loaded correctly")
return loaded_model
def print_class(scores):
for index, score in numpy.ndenumerate(scores):
number = index[1]
print (number, "-", score)
for index, score in numpy.ndenumerate(scores):
if(score > 0.5):
number = index[1]
print ("\nNumber is: %d, probability is: %f" % (number, score))
model = baseline_model()
path = os.path.exists("model.json")
if not path:
model = build_model(model)
save_model(model)
# Final evaluation of the model
scores = model.predict(X_t)
print("Probabilities for each class\n")
print_class(scores)
else:
# Final evaluation of the model
loaded_model = load_model()
if loaded_model is not None:
loaded_model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
scores = loaded_model.predict(X_t)
print("Probabilities for each class\n")
print_class(scores)