import tensorflow as tf
import keras
print(tf.__version__)
print(keras.__version__)
2.9.1 2.9.0
import numpy as np
for i in range(10):
lst = list(range(i, i+5))
print(lst)
[0, 1, 2, 3, 4] [1, 2, 3, 4, 5] [2, 3, 4, 5, 6] [3, 4, 5, 6, 7] [4, 5, 6, 7, 8] [5, 6, 7, 8, 9] [6, 7, 8, 9, 10] [7, 8, 9, 10, 11] [8, 9, 10, 11, 12] [9, 10, 11, 12, 13]
X = []
Y = []
for i in range(10):
lst = list(range(i, i+5))
X.append( [ [c/10] for c in lst] ) # 문제
Y.append( (i+5)/10 ) #
X = np.array(X)
Y = np.array(Y)
print( X.shape, Y.shape ) # 10개의 샘플 (5,1), 다음 0.5
print( X[0], Y[0])
print()
print( X[1], Y[1])
print()
print(Y)
print(X)
(10, 5, 1) (10,) [[0. ] [0.1] [0.2] [0.3] [0.4]] 0.5 [[0.1] [0.2] [0.3] [0.4] [0.5]] 0.6 [0.5 0.6 0.7 0.8 0.9 1. 1.1 1.2 1.3 1.4] [[[0. ] [0.1] [0.2] [0.3] [0.4]] [[0.1] [0.2] [0.3] [0.4] [0.5]] [[0.2] [0.3] [0.4] [0.5] [0.6]] [[0.3] [0.4] [0.5] [0.6] [0.7]] [[0.4] [0.5] [0.6] [0.7] [0.8]] [[0.5] [0.6] [0.7] [0.8] [0.9]] [[0.6] [0.7] [0.8] [0.9] [1. ]] [[0.7] [0.8] [0.9] [1. ] [1.1]] [[0.8] [0.9] [1. ] [1.1] [1.2]] [[0.9] [1. ] [1.1] [1.2] [1.3]]]
# 전체 데이터 확인
for i in range(len(X)):
print(X[i], Y[i])
print( X.shape, Y.shape )
[[0. ] [0.1] [0.2] [0.3] [0.4]] 0.5 [[0.1] [0.2] [0.3] [0.4] [0.5]] 0.6 [[0.2] [0.3] [0.4] [0.5] [0.6]] 0.7 [[0.3] [0.4] [0.5] [0.6] [0.7]] 0.8 [[0.4] [0.5] [0.6] [0.7] [0.8]] 0.9 [[0.5] [0.6] [0.7] [0.8] [0.9]] 1.0 [[0.6] [0.7] [0.8] [0.9] [1. ]] 1.1 [[0.7] [0.8] [0.9] [1. ] [1.1]] 1.2 [[0.8] [0.9] [1. ] [1.1] [1.2]] 1.3 [[0.9] [1. ] [1.1] [1.2] [1.3]] 1.4 (10, 5, 1) (10,)
model = tf.keras.Sequential([
tf.keras.layers.SimpleRNN(units=10, return_sequences=False, input_shape=[5,1]),
tf.keras.layers.Dense(1)
])
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Embedding, SimpleRNN
model = Sequential()
model.add(SimpleRNN(10, return_sequences=True, input_shape=[5,1]))
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= simple_rnn (SimpleRNN) (None, 5, 10) 120 ================================================================= Total params: 120 Trainable params: 120 Non-trainable params: 0 _________________________________________________________________
model = Sequential()
model.add(SimpleRNN(10, return_sequences=False, input_shape=[5,1]))
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= simple_rnn_1 (SimpleRNN) (None, 10) 120 ================================================================= Total params: 120 Trainable params: 120 Non-trainable params: 0 _________________________________________________________________
model.compile(optimizer='adam', loss='mse')
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= simple_rnn_1 (SimpleRNN) (None, 10) 120 ================================================================= Total params: 120 Trainable params: 120 Non-trainable params: 0 _________________________________________________________________
model.fit(X, Y, epochs=50, verbose=1)
Epoch 1/50 1/1 [==============================] - 1s 620ms/step - loss: 1.0241 Epoch 2/50 1/1 [==============================] - 0s 4ms/step - loss: 1.0171 Epoch 3/50 1/1 [==============================] - 0s 4ms/step - loss: 1.0103 Epoch 4/50 1/1 [==============================] - 0s 4ms/step - loss: 1.0036 Epoch 5/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9970 Epoch 6/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9906 Epoch 7/50 1/1 [==============================] - 0s 5ms/step - loss: 0.9842 Epoch 8/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9779 Epoch 9/50 1/1 [==============================] - 0s 5ms/step - loss: 0.9717 Epoch 10/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9657 Epoch 11/50 1/1 [==============================] - 0s 3ms/step - loss: 0.9598 Epoch 12/50 1/1 [==============================] - 0s 5ms/step - loss: 0.9539 Epoch 13/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9482 Epoch 14/50 1/1 [==============================] - 0s 3ms/step - loss: 0.9426 Epoch 15/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9371 Epoch 16/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9317 Epoch 17/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9264 Epoch 18/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9212 Epoch 19/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9161 Epoch 20/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9110 Epoch 21/50 1/1 [==============================] - 0s 5ms/step - loss: 0.9061 Epoch 22/50 1/1 [==============================] - 0s 4ms/step - loss: 0.9012 Epoch 23/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8964 Epoch 24/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8917 Epoch 25/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8870 Epoch 26/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8824 Epoch 27/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8778 Epoch 28/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8733 Epoch 29/50 1/1 [==============================] - 0s 3ms/step - loss: 0.8689 Epoch 30/50 1/1 [==============================] - 0s 3ms/step - loss: 0.8644 Epoch 31/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8600 Epoch 32/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8557 Epoch 33/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8513 Epoch 34/50 1/1 [==============================] - 0s 5ms/step - loss: 0.8470 Epoch 35/50 1/1 [==============================] - 0s 3ms/step - loss: 0.8427 Epoch 36/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8384 Epoch 37/50 1/1 [==============================] - 0s 6ms/step - loss: 0.8341 Epoch 38/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8298 Epoch 39/50 1/1 [==============================] - 0s 5ms/step - loss: 0.8256 Epoch 40/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8213 Epoch 41/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8170 Epoch 42/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8127 Epoch 43/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8085 Epoch 44/50 1/1 [==============================] - 0s 4ms/step - loss: 0.8042 Epoch 45/50 1/1 [==============================] - 0s 4ms/step - loss: 0.7999 Epoch 46/50 1/1 [==============================] - 0s 4ms/step - loss: 0.7956 Epoch 47/50 1/1 [==============================] - 0s 4ms/step - loss: 0.7912 Epoch 48/50 1/1 [==============================] - 0s 3ms/step - loss: 0.7869 Epoch 49/50 1/1 [==============================] - 0s 4ms/step - loss: 0.7825 Epoch 50/50 1/1 [==============================] - 0s 4ms/step - loss: 0.7781
<keras.callbacks.History at 0x1dba66c8b80>
print(X.shape, X)
(10, 5, 1) [[[0. ] [0.1] [0.2] [0.3] [0.4]] [[0.1] [0.2] [0.3] [0.4] [0.5]] [[0.2] [0.3] [0.4] [0.5] [0.6]] [[0.3] [0.4] [0.5] [0.6] [0.7]] [[0.4] [0.5] [0.6] [0.7] [0.8]] [[0.5] [0.6] [0.7] [0.8] [0.9]] [[0.6] [0.7] [0.8] [0.9] [1. ]] [[0.7] [0.8] [0.9] [1. ] [1.1]] [[0.8] [0.9] [1. ] [1.1] [1.2]] [[0.9] [1. ] [1.1] [1.2] [1.3]]]
print(Y) # 실제값
pred = model.predict(X)
np.max(pred, axis=0)
[0.5 0.6 0.7 0.8 0.9 1. 1.1 1.2 1.3 1.4] 1/1 [==============================] - 0s 136ms/step
array([ 0.76799405, -0.19018568, 0.7338462 , 0.35154802, 0.35796756, -0.18646191, 0.24571645, 0.44652814, 0.7476799 , 0.92708737], dtype=float32)