320x100
320x100
케라스로 MNIST 데이터셋 분류 파이썬 예제 저장하기
# save to model
import tensorflow as tf
import matplotlib.pyplot as plt
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train/255.0, x_test / 255.0
plt.figure(figsize=(8,2)) # 8*2 inchs
for i in range(36):
plt.subplot(3,12,i+1)
plt.imshow(x_train[i], cmap="gray")
plt.axis("off")
plt.show()
# define model
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28,28)),
tf.keras.layers.Dense(10,input_dim=784, activation='softmax')
])
# model compile
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=["accuracy"])
model.summary()
# model fit
hist=model.fit(x_train, y_train,
validation_data=(x_test, y_test),
verbose=2, batch_size=100, epochs=15,
use_multiprocessing=True)
# model evaluate
model.evaluate(x_test, y_test,
verbose=2, batch_size=100, use_multiprocessing=True)
# save model to h5 file
file_name="softmax_mnist_model.h5"
model.save(file_name)
print(f"\nThis model has been saved to {file_name}.")
# Graph print
plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(hist.history['loss'])
plt.title("Cost Graph")
plt.ylabel("cost")
plt.subplot(1,2,2)
plt.title("Accuracy Graph")
plt.ylabel("accuracy")
plt.plot(hist.history['accuracy'], 'b-', label="training accuracy")
plt.plot(hist.history['val_accuracy'], 'r:', label="validation accuracy")
plt.legend()
plt.tight_layout()
plt.show()
# use model
prediction = model.predict(x_test[:1, :])
prediction_class = tf.argmax(prediction, 1)
print(f"\nPrediction Result:\n{prediction}")
print("Predicted class: ", prediction_class.numpy())
plt.imshow(x_test[prediction_class[0]])
plt.grid(False)
plt.axis("off")
plt.show()
- 모델을 저장하는 부분
# save model to h5 file
file_name="softmax_mnist_model.h5"
model.save(file_name)
print(f"\nThis model has been saved to {file_name}.")
저장한 모델을 로드하여 사용하는 파이썬 예제
# load to model
import tensorflow as tf
import matplotlib.pyplot as plt
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train/255.0, x_test / 255.0
plt.figure(figsize=(8,2)) # 8*2 inchs
for i in range(36):
plt.subplot(3,12,i+1)
plt.imshow(x_train[i], cmap="gray")
plt.axis("off")
plt.show()
# load model
file_name="softmax_mnist_model.h5"
model = tf.keras.models.load_model(file_name)
# model compile
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=["accuracy"])
# model fit
hist=model.fit(x_train, y_train,
validation_data=(x_test, y_test),
verbose=2, batch_size=100, epochs=15,
use_multiprocessing=True)
# model evaluate
model.evaluate(x_test, y_test,
verbose=2, batch_size=100, use_multiprocessing=True)
# Graph print
plt.figure(figsize=(8,4))
plt.subplot(1,2,1)
plt.plot(hist.history['loss'])
plt.title("Cost Graph")
plt.ylabel("cost")
plt.subplot(1,2,2)
plt.title("Accuracy Graph")
plt.ylabel("accuracy")
plt.plot(hist.history['accuracy'], 'b-', label="training accuracy")
plt.plot(hist.history['val_accuracy'], 'r:', label="validation accuracy")
plt.legend()
plt.tight_layout()
plt.show()
# use model
prediction = model.predict(x_test[:1, :])
prediction_class = tf.argmax(prediction, 1)
print(f"\nPrediction Result:\n{prediction}")
print("Predicted class: ", prediction_class.numpy())
plt.imshow(x_test[prediction_class[0]])
plt.grid(False)
plt.axis("off")
plt.show()
- 모델을 로드 하는 부분
# load model
file_name="softmax_mnist_model.h5"
model = tf.keras.models.load_model(file_name)
300x250
728x90
'Computer Science > DeepLearning' 카테고리의 다른 글
DeepLearning을 활용한 MNIST 데이터셋 분류 정확도 향상 (0) | 2020.12.16 |
---|---|
Tensorflow를 활용한 Neural Network 구현 (0) | 2020.12.16 |
머신러닝 기초 5 - Classification (분류) (0) | 2020.12.16 |
머신러닝 기초 4 - 다중 선형회귀 (0) | 2020.12.10 |
머신러닝 기초 3 - 회귀분석 (0) | 2020.12.10 |