활성함수(Activation)
계단함수(Step Function) : 0과 1로만 내보난다
예제
import numpy as np
def step_function(x):
return np.array(x > 0, dtype=int)
x = np.arange(-5,6,0.1)
res = step_function(x)
Sigmoid Function
예제
# h(x) = 1 / (1 + exp(-x)) : exp = 2.718281828...
def sigmoid(x):
return 1 / (1 + np.exp(-x))
x = np.arange(-5.0,5.0,0.1)
y = sigmoid(x)
예제
# 계단함수와 시그모이드 함수의 출력값을 한개의 차트에 그려서 비교해보세요
plt.figure(figsize=(5,2))
x = np.arange(-10,10,0.1)
y = step_function(x)
y2 = sigmoid(x)
plt.plot(x,y,'r--',label='step function')
plt.plot(x,y2,label='sigmoid')
plt.legend()
plt.show()
ReLU(Rectified Linear Unit) : x <= 0, -> 0, x > 0 -> x
예제
def relu(x):
return np.maximum(0,x) # arg1, arg2를 비교하여 큰 값 리턴
arr = np.arange(-10,11,1)
y = relu(arr)
Softmax : 다중분류
예제
# softmax : 다중분류
# 분류 클래스 수만큼의 값이 산출됨
# 산출된 값을 합하면 1이 됨
#!pip install tensorflow
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
np.random.seed(0)
tf.random.set_seed(3)
from sklearn.model_selection import train_test_split
import pandas as pd
df = pd.read_csv('housing.csv',delim_whitespace=True,header=None)
0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0.00632 | 18.0 | 2.31 | 0 | 0.538 | 6.575 | 65.2 | 4.0900 | 1 | 296.0 | 15.3 | 396.90 | 4.98 | 24.0 |
1 | 0.02731 | 0.0 | 7.07 | 0 | 0.469 | 6.421 | 78.9 | 4.9671 | 2 | 242.0 | 17.8 | 396.90 | 9.14 | 21.6 |
2 | 0.02729 | 0.0 | 7.07 | 0 | 0.469 | 7.185 | 61.1 | 4.9671 | 2 | 242.0 | 17.8 | 392.83 | 4.03 | 34.7 |
3 | 0.03237 | 0.0 | 2.18 | 0 | 0.458 | 6.998 | 45.8 | 6.0622 | 3 | 222.0 | 18.7 | 394.63 | 2.94 | 33.4 |
4 | 0.06905 | 0.0 | 2.18 | 0 | 0.458 | 7.147 | 54.2 | 6.0622 | 3 | 222.0 | 18.7 | 396.90 | 5.33 | 36.2 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
501 | 0.06263 | 0.0 | 11.93 | 0 | 0.573 | 6.593 | 69.1 | 2.4786 | 1 | 273.0 | 21.0 | 391.99 | 9.67 | 22.4 |
502 | 0.04527 | 0.0 | 11.93 | 0 | 0.573 | 6.120 | 76.7 | 2.2875 | 1 | 273.0 | 21.0 | 396.90 | 9.08 | 20.6 |
503 | 0.06076 | 0.0 | 11.93 | 0 | 0.573 | 6.976 | 91.0 | 2.1675 | 1 | 273.0 | 21.0 | 396.90 | 5.64 | 23.9 |
504 | 0.10959 | 0.0 | 11.93 | 0 | 0.573 | 6.794 | 89.3 | 2.3889 | 1 | 273.0 | 21.0 | 393.45 | 6.48 | 22.0 |
505 | 0.04741 | 0.0 | 11.93 | 0 | 0.573 | 6.030 | 80.8 | 2.5050 | 1 | 273.0 | 21.0 | 396.90 | 7.88 | 11.9 |
506 rows × 14 columns
dataset = df.values
X = dataset[:,:13]
y = dataset[:,13]
X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3,random_state=0)
X_train.shape # (354, 13)
X_test.shape # (152, 13)
y_train.shape # (354,)
y_test.shape # (152,)
model = Sequential()
model.add(Dense(30,input_dim=13, activation='relu'))
model.add(Dense(20,activation='relu')) # 히든레이어
model.add(Dense(10,activation='relu')) # 히든레이어
model.add(Dense(15,activation='relu')) # 히든레이어
model.add(Dense(1)) # 결과
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_2 (Dense) (None, 30) 420 dense_3 (Dense) (None, 20) 620 dense_4 (Dense) (None, 10) 210 dense_5 (Dense) (None, 15) 165 dense_6 (Dense) (None, 1) 16 ================================================================= Total params: 1,431 Trainable params: 1,431 Non-trainable params: 0 _________________________________________________________________
# Loss함수 'adam' 사용
model.compile(loss='mse', optimizer='adam')
# 학습내역
history = model.fit(X_train,y_train,epochs=200,batch_size=10) # epoch : 반복횟수, batch_size : 10개의 입력을 처리하고 손실함수를 기반으로 가중치 조정, 현재 1개 입력은 13의 input을 말한다
# 인공신경망이 계산한 집값
model.predict(X_test).flatten()
# 학습내역
history.history['loss']
# 학습내역 그래프