예제 2의 소스 코드를 수정하여 로컬 하드 디스크에 있는 csv 데이터 파일을 로딩하고 분석해 보자.
multi-class classification

Epoch 별 train loss, val loss, accuracy 등을 차트로 보여 주는 기능은 계속 유지 하자.
raw 데이터에서 train data와 validation data를 분리하는 코드를 이해하고 label을 one hot코드로 수정하여 이용하는 부분을 주목하자.
import numpy
import pandas
from keras.utils import np_utils
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Activation
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
import numpy as np
seed = 3
np.random.seed(seed)
# generating data set
# load dataset
dataframe = pandas.read_csv("iris.csv", header=None)
dataset = dataframe.values
alldata = dataset[:,0:4].astype(float)
alllabel =dataset[:,4]
# X_train = data_train, X_test=data_val, y_train=label_train, y_test=label_val
data_train, data_val, label_train, label_val = train_test_split( alldata, alllabel, test_size=0.5, random_state=seed)
# below parts are done automatically by the previous line. ========
#train_rand_idxs = np.random.choice(len(list(data_train)),100) # c.f. total 150 raws
#val_rand_idxs = np.random.choice(len(list(data_val)),30)
#data_train = data_train[train_rand_idxs]
#label_train = label_train[train_rand_idxs]
#data_val = data_val[val_rand_idxs]
#label_val = label_val[val_rand_idxs]
# encode class labels as integers
encoder = LabelEncoder()
encoder.fit(label_train)
encoded_label_train = encoder.transform(label_train)
# convert integers to one hot representation
onehot_label_train = np_utils.to_categorical(encoded_label_train)
encoder2 = LabelEncoder()
encoder2.fit(label_val)
encoded_label_val = encoder2.transform(label_val)
# convert integers to one hot representation
onehot_label_val = np_utils.to_categorical(encoded_label_val)
model = Sequential()
model.add(Dense(units=8, input_dim=4, activation='relu'))
model.add(Dense(units=3, activation='softmax'))
# units = dimensionality of the output space
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# see https://keras.io/losses/, https://keras.io/optimizers/
# conduct learn
hist = model.fit(data_train, onehot_label_train, epochs=100, batch_size=10, validation_data=(data_val, onehot_label_val))
# show the learning process
%matplotlib inline
import matplotlib.pyplot as plt
fig, loss_ax = plt.subplots()
acc_ax = loss_ax.twinx()
loss_ax.plot(hist.history['loss'], 'y', label='train loss')
loss_ax.plot(hist.history['val_loss'], 'r', label='val loss')
acc_ax.plot(hist.history['acc'], 'b', label='train acc')
acc_ax.plot(hist.history['val_acc'], 'g', label='val acc')
loss_ax.set_xlabel('epoch')
loss_ax.set_ylabel('loss')
acc_ax.set_ylabel('accuracy')
loss_ax.legend(loc='upper left')
acc_ax.legend(loc='lower left')
plt.show()