import keras
from keras.layers import SimpleRNN
print( keras.__version__ )
2.9.0
from keras.models import Sequential
from keras.layers import Embedding, SimpleRNN
model = Sequential()
model.add(Embedding(10000, 32))
model.add(SimpleRNN(32, return_sequences=False))
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding (Embedding) (None, None, 32) 320000 simple_rnn (SimpleRNN) (None, 32) 2080 ================================================================= Total params: 322,080 Trainable params: 322,080 Non-trainable params: 0 _________________________________________________________________
from keras.models import Sequential
from keras.layers import Embedding, SimpleRNN
model = Sequential()
model.add(Embedding(10000, 32))
model.add(SimpleRNN(32, return_sequences=True))
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding_1 (Embedding) (None, None, 32) 320000 simple_rnn_1 (SimpleRNN) (None, None, 32) 2080 ================================================================= Total params: 322,080 Trainable params: 322,080 Non-trainable params: 0 _________________________________________________________________
model = Sequential()
model.add(Embedding(10000, 32))
model.add(SimpleRNN(32, return_sequences=True))
model.add(SimpleRNN(32, return_sequences=True))
model.add(SimpleRNN(32, return_sequences=True))
model.add(SimpleRNN(32)) # 맨 위 층만 마지막 출력을 반환합니다.
model.summary()
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding_2 (Embedding) (None, None, 32) 320000 simple_rnn_2 (SimpleRNN) (None, None, 32) 2080 simple_rnn_3 (SimpleRNN) (None, None, 32) 2080 simple_rnn_4 (SimpleRNN) (None, None, 32) 2080 simple_rnn_5 (SimpleRNN) (None, 32) 2080 ================================================================= Total params: 328,320 Trainable params: 328,320 Non-trainable params: 0 _________________________________________________________________
from tensorflow.keras.datasets import imdb
from tensorflow.keras.preprocessing import sequence
max_features = 10000 # 특성으로 사용할 단어의 수
maxlen = 500 # 사용할 텍스트의 길이(가장 빈번한 max_features 개의 단어만 사용합니다)
batch_size = 32
print('데이터 로딩...')
(input_train, y_train), (input_test, y_test) = imdb.load_data(num_words=max_features)
print(len(input_train), '훈련 시퀀스')
print(len(input_test), '테스트 시퀀스')
데이터 로딩... 25000 훈련 시퀀스 25000 테스트 시퀀스
# 문장에서 maxlen 이후의 있는 단어들을 pad_sequences()함수로 잘라낸다.
# 문장 길이가 maxlen보다 작으면 부족한 부분을 0으로 채웁니다.
print('시퀀스 패딩 (samples x time)')
input_train = sequence.pad_sequences(input_train, maxlen=maxlen)
input_test = sequence.pad_sequences(input_test, maxlen=maxlen)
print('input_train 크기:', input_train.shape)
print('input_test 크기:', input_test.shape)
시퀀스 패딩 (samples x time) input_train 크기: (25000, 500) input_test 크기: (25000, 500)
from keras.layers import Dense
model = Sequential()
model.add(Embedding(max_features, 32)) # max_features = 10000
model.add(SimpleRNN(32))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
model.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= embedding_3 (Embedding) (None, None, 32) 320000 simple_rnn_6 (SimpleRNN) (None, 32) 2080 dense (Dense) (None, 1) 33 ================================================================= Total params: 322,113 Trainable params: 322,113 Non-trainable params: 0 _________________________________________________________________
%%time
history = model.fit(input_train, y_train,
epochs=10,
batch_size=128,
validation_split=0.2)
Epoch 1/10 157/157 [==============================] - 14s 86ms/step - loss: 0.5730 - acc: 0.6963 - val_loss: 0.4303 - val_acc: 0.8126 Epoch 2/10 157/157 [==============================] - 14s 86ms/step - loss: 0.3595 - acc: 0.8504 - val_loss: 0.3528 - val_acc: 0.8576 Epoch 3/10 157/157 [==============================] - 14s 90ms/step - loss: 0.2868 - acc: 0.8852 - val_loss: 0.3337 - val_acc: 0.8648 Epoch 4/10 157/157 [==============================] - 14s 92ms/step - loss: 0.2306 - acc: 0.9116 - val_loss: 0.4230 - val_acc: 0.8186 Epoch 5/10 157/157 [==============================] - 14s 91ms/step - loss: 0.1933 - acc: 0.9305 - val_loss: 0.3624 - val_acc: 0.8604 Epoch 6/10 157/157 [==============================] - 14s 91ms/step - loss: 0.1641 - acc: 0.9424 - val_loss: 0.3716 - val_acc: 0.8590 Epoch 7/10 157/157 [==============================] - 15s 93ms/step - loss: 0.1194 - acc: 0.9578 - val_loss: 0.3875 - val_acc: 0.8574 Epoch 8/10 157/157 [==============================] - 14s 89ms/step - loss: 0.0928 - acc: 0.9696 - val_loss: 0.5483 - val_acc: 0.8148 Epoch 9/10 157/157 [==============================] - 14s 89ms/step - loss: 0.0660 - acc: 0.9773 - val_loss: 0.6824 - val_acc: 0.7710 Epoch 10/10 157/157 [==============================] - 17s 109ms/step - loss: 0.0538 - acc: 0.9827 - val_loss: 0.4944 - val_acc: 0.8372 CPU times: total: 10min 49s Wall time: 2min 24s
model.evaluate(input_test, y_test)
782/782 [==============================] - 14s 17ms/step - loss: 0.5044 - acc: 0.8382
[0.5043584704399109, 0.8381999731063843]
import matplotlib.pyplot as plt
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(acc) + 1)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.figure()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()