오늘 주제 토론 (kNN) 발표 자료입니다.


발표자 : 최붕기 박사님




knn.key.pdf










CNN 실습 소스 코드 입니다. (from learnopencv.com )

cifar10 이미지 분류 dataset 대상






import numpy as np

import matplotlib.pyplot as plt

%matplotlib inline

from __future__ import print_function

import keras

from keras.models import Sequential

from keras.layers import Dense, Conv2D, MaxPooling2D, Dropout, Flatten




from keras.datasets import cifar10

(train_images, train_labels), (test_images, test_labels) = cifar10.load_data()



from keras.utils import to_categorical



print('Training data shape : ', train_images.shape, train_labels.shape)


print('Testing data shape : ', test_images.shape, test_labels.shape)


# Find the unique numbers from the train labels

classes = np.unique(train_labels)

nClasses = len(classes)

print('Total number of outputs : ', nClasses)

print('Output classes : ', classes)


plt.figure(figsize=[4,2])


# Display the first image in training data

plt.subplot(121)

plt.imshow(train_images[0,:,:], cmap='gray')

plt.title("Ground Truth : {}".format(train_labels[0]))


# Display the first image in testing data

plt.subplot(122)

plt.imshow(test_images[0,:,:], cmap='gray')

plt.title("Ground Truth : {}".format(test_labels[0]))



# Find the shape of input images and create the variable input_shape

nRows,nCols,nDims = train_images.shape[1:]

train_data = train_images.reshape(train_images.shape[0], nRows, nCols, nDims)

test_data = test_images.reshape(test_images.shape[0], nRows, nCols, nDims)

input_shape = (nRows, nCols, nDims)


# Change to float datatype

train_data = train_data.astype('float32')

test_data = test_data.astype('float32')


# Scale the data to lie between 0 to 1

train_data /= 255

test_data /= 255


# Change the labels from integer to categorical data

train_labels_one_hot = to_categorical(train_labels)

test_labels_one_hot = to_categorical(test_labels)




def createModel():

    model = Sequential()

    # The first two layers with 32 filters of window size 3x3

    model.add(Conv2D(32, (3, 3), padding='same', activation='relu', input_shape=input_shape))

    model.add(Conv2D(32, (3, 3), activation='relu'))

    model.add(MaxPooling2D(pool_size=(2, 2)))

    model.add(Dropout(0.25))


    model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))

    model.add(Conv2D(64, (3, 3), activation='relu'))

    model.add(MaxPooling2D(pool_size=(2, 2)))

    model.add(Dropout(0.25))


    model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))

    model.add(Conv2D(64, (3, 3), activation='relu'))

    model.add(MaxPooling2D(pool_size=(2, 2)))

    model.add(Dropout(0.25))


    model.add(Flatten())

    model.add(Dense(512, activation='relu'))

    model.add(Dropout(0.5))

    model.add(Dense(nClasses, activation='softmax'))

    

    return model



model1 = createModel()

batch_size = 256

epochs = 50

model1.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])


model1.summary()


history = model1.fit(train_data, train_labels_one_hot, batch_size=batch_size, epochs=epochs, verbose=1, 

                   validation_data=(test_data, test_labels_one_hot))

model1.evaluate(test_data, test_labels_one_hot)



plt.figure(figsize=[8,6])

plt.plot(history.history['loss'],'r',linewidth=3.0)

plt.plot(history.history['val_loss'],'b',linewidth=3.0)

plt.legend(['Training loss', 'Validation Loss'],fontsize=18)

plt.xlabel('Epochs ',fontsize=16)

plt.ylabel('Loss',fontsize=16)

plt.title('Loss Curves',fontsize=16)



plt.figure(figsize=[8,6])

plt.plot(history.history['acc'],'r',linewidth=3.0)

plt.plot(history.history['val_acc'],'b',linewidth=3.0)

plt.legend(['Training Accuracy', 'Validation Accuracy'],fontsize=18)

plt.xlabel('Epochs ',fontsize=16)

plt.ylabel('Accuracy',fontsize=16)

plt.title('Accuracy Curves',fontsize=16)








 

 

time series binary classification.

Home assignment용 데이터 (자세한 사항은 수업시간에 설명하겠습니다.)

 

출처: Alcoholism EEG dataset from UCI machine learning data repository

 

 

 

TR_allTrialEEGavgGlobalNormLabeled.csv.zip

 

 

 

 

 

 

 


6번째 수업 자료 입니다.


}주제 토론: Random Forest (서신원)
}딥러닝 실행 도구 (최붕기 박사님)
}동영상 시청
}Artificial Neural Networks
}MLP
}CNN
}Recursive NN
}Recurrent NN
}LSTM
}Sequence to Sequence
}Shallow neural networks (word2vec)

}




class06.pptx





주제 토론 발표 자료 입니다.


주제: Random Forest

발표자: 서신원





Ramdom Forest ver.2.pdf


+ Recent posts