import keras
import tensorflow as tf
print(tf.__version__)
print(keras.__version__)
2.10.0 2.10.0
import numpy as np
x = np.array(12)
print("x의 차원 : ",x.ndim) # 차원 확인
print("x의 shape : ",x.shape)
print("x의 값 : ", x)
x의 차원 : 0 x의 shape : () x의 값 : 12
x = np.array([10,20,30,40,50])
print("x의 차원 : ",x.ndim) # 차원 확인
print("x의 shape : ",x.shape)
print("x의 값 : ", x)
x의 차원 : 1 x의 shape : (5,) x의 값 : [10 20 30 40 50]
x = np.array([ [11,21,31],
[12,22,32],
[13,23,33] ])
print("x의 차원 : ",x.ndim) # 차원 확인
print("x의 shape : ",x.shape)
print("x의 값 : ", x)
x의 차원 : 2 x의 shape : (3, 3) x의 값 : [[11 21 31] [12 22 32] [13 23 33]]
x = np.array([
[ [11,21,31],
[12,22,32],
[13,23,33] ],
[ [11,21,31],
[12,22,32],
[13,23,33] ],
[ [11,21,31],
[12,22,32],
[13,23,33] ]
])
print(x.ndim)
print(x.shape)
x
3 (3, 3, 3)
array([[[11, 21, 31], [12, 22, 32], [13, 23, 33]], [[11, 21, 31], [12, 22, 32], [13, 23, 33]], [[11, 21, 31], [12, 22, 32], [13, 23, 33]]])
from keras.datasets import mnist
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
# 이미지 데이터의 차원, shape, 자료형 확인
print(train_images.ndim)
print(train_images.shape)
print(train_images.dtype)
print(train_images[0])
3 (60000, 28, 28) uint8 [[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 3 18 18 18 126 136 175 26 166 255 247 127 0 0 0 0] [ 0 0 0 0 0 0 0 0 30 36 94 154 170 253 253 253 253 253 225 172 253 242 195 64 0 0 0 0] [ 0 0 0 0 0 0 0 49 238 253 253 253 253 253 253 253 253 251 93 82 82 56 39 0 0 0 0 0] [ 0 0 0 0 0 0 0 18 219 253 253 253 253 253 198 182 247 241 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 80 156 107 253 253 205 11 0 43 154 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 14 1 154 253 90 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 139 253 190 2 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 11 190 253 70 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 35 241 225 160 108 1 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 81 240 253 253 119 25 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 45 186 253 253 150 27 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 93 252 253 187 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 249 253 249 64 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 46 130 183 253 253 207 2 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 39 148 229 253 253 253 250 182 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 24 114 221 253 253 253 253 201 78 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 23 66 213 253 253 253 253 198 81 2 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 18 171 219 253 253 253 253 195 80 9 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 55 172 226 253 253 253 253 244 133 11 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 136 253 253 253 212 135 132 16 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]]
# target 데이터의 차원, shape, 자료형 확인
print(train_labels.ndim)
print(train_labels.shape)
print(train_labels.dtype)
print(train_labels[0:10])
1 (60000,) uint8 [5 0 4 1 9 2 1 3 1 4]
import matplotlib.pyplot as plt
image = train_images[5]
print(image.shape)
plt.imshow(image, cmap=plt.cm.binary)
plt.show()
(28, 28)
# 행, 열, 높이
# 행 10~50선택
my_slice = train_images[10:50]
print(my_slice.shape)
(40, 28, 28)
my_slice = train_images[10:50, :, :] # 이전것과 동일
print(my_slice.shape)
(40, 28, 28)
my_slice = train_images[10:50, 0:28, 0:28] # 이전것과 동일
print(my_slice.shape)
(40, 28, 28)
my_slice = train_images[:, 14:, 14:]
print(my_slice.shape)
(60000, 14, 14)
image = my_slice[5]
plt.imshow(image, cmap=plt.cm.binary)
plt.show()
batch = train_images[ : 128]
# 다음 배치
batch = train_images[128:256]
# n번째 배치
# batch = train_images[128 * n:128 * (n+1)]
from keras.datasets import mnist
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images.reshape((60000, 28*28))
train_images.shape
(60000, 784)
x = np.array( [[0. , 1.],
[2. , 3.],
[4. , 5.]])
print(x.shape)
x1 = x.reshape((6,1))
x1.shape
(3, 2)
(6, 1)
import mglearn
# graphviz의 설치가 필요 - 콜랩 확인 가능
# 퍼셉트론 설명 이미지 확인
# mglearn.plots.plot_logistic_regression_graph()
# mglearn.plots.plot_single_hidden_layer_graph()
import numpy as np
import matplotlib.pyplot as plt
line = np.linspace(-3, 3, 100)
tanh_line = np.tanh(line)
relu_line = np.maximum(line, 0) # 두개의 배열값 중 최대값 찾기
sig_line = 1/(1+np.exp(-line))
step_line = line.copy()
step_line[step_line <= 0] = 0
step_line[step_line > 0] = 1
# 음수 표시
import matplotlib
matplotlib.rcParams['axes.unicode_minus'] = False
plt.rcParams["figure.figsize"] = (14,10)
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2)
# step function
ax1.plot(line, step_line, label='step', color='red')
ax1.legend(loc='best')
# sigmoid function (시그모이드 함수)
ax2.plot(line, sig_line, label='sigmoid', color='green')
ax2.legend(loc='best')
# Hyperbolic Tangent(tanh)
ax3.plot(line, tanh_line, label='tanh')
ax3.legend(loc='best')
# relu
ax4.plot(line, np.maximum(line, 0), label='relu', color='orange')
ax4.legend(loc='best')
<matplotlib.legend.Legend at 0x17c8a3f9820>