#!/usr/bin/env python # coding: utf-8 # # 케라스로 중간 깊이 신경망 만들기 # 이 노트북에서 지금까지 다룬 이론을 적용해 [얕은 신경망](https://github.com/the-deep-learners/deep-learning-illustrated/blob/master/notebooks/5-1.shallow_net_in_keras.ipynb)을 발전시켜 보겠습니다. # [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/rickiepark/dl-illustrated/blob/master/notebooks/8-4.intermediate_net_in_keras.ipynb) # #### 라이브러리를 적재합니다. # In[1]: from tensorflow import keras from tensorflow.keras.datasets import mnist from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense from tensorflow.keras.optimizers import SGD # #### 데이터를 적재합니다. # In[2]: (X_train, y_train), (X_valid, y_valid) = mnist.load_data() # #### 데이터를 전처리합니다. # In[3]: X_train = X_train.reshape(60000, 784).astype('float32') X_valid = X_valid.reshape(10000, 784).astype('float32') # In[4]: X_train /= 255 X_valid /= 255 # In[5]: n_classes = 10 y_train = keras.utils.to_categorical(y_train, n_classes) y_valid = keras.utils.to_categorical(y_valid, n_classes) # #### 신경망을 만듭니다. # In[6]: model = Sequential() model.add(Dense(64, activation='relu', input_shape=(784,))) model.add(Dense(64, activation='relu')) model.add(Dense(10, activation='softmax')) # In[7]: model.summary() # #### 모델을 컴파일합니다. # In[8]: model.compile(loss='categorical_crossentropy', optimizer=SGD(learning_rate=0.1), metrics=['accuracy']) # #### 훈련! # In[9]: model.fit(X_train, y_train, batch_size=128, epochs=20, verbose=1, validation_data=(X_valid, y_valid))