기본 콘텐츠로 건너뛰기

Python 인공신경망 예제

활성함수(Activation)

계단함수(Step Function) : 0과 1로만 내보난다

예제
import numpy as np
def step_function(x):
    return np.array(x > 0, dtype=int)
x = np.arange(-5,6,0.1)
res = step_function(x)
import matplotlib.pyplot as plt
plt.plot(x,res)
plt.show()

Sigmoid Function

예제
# h(x) = 1 / (1 + exp(-x)) : exp = 2.718281828...
def sigmoid(x):
    return 1 / (1 + np.exp(-x))
x = np.arange(-5.0,5.0,0.1)
y = sigmoid(x)
plt.figure(figsize=(3,2))
plt.plot(x,y)
plt.ylim(-0.1,1.1)
plt.show()

예제
# 계단함수와 시그모이드 함수의 출력값을 한개의 차트에 그려서 비교해보세요
plt.figure(figsize=(5,2))
x = np.arange(-10,10,0.1)
y = step_function(x)
y2 = sigmoid(x)
plt.plot(x,y,'r--',label='step function')
plt.plot(x,y2,label='sigmoid')
plt.legend()
plt.show()


ReLU(Rectified Linear Unit) : x <= 0, -> 0, x > 0 -> x

예제
def relu(x):
    return np.maximum(0,x) # arg1, arg2를 비교하여 큰 값 리턴
arr = np.arange(-10,11,1)
y = relu(arr)

plt.figure(figsize=(5,2))
plt.plot(arr,y)
plt.show()

Softmax : 다중분류

예제
# softmax : 다중분류
# 분류 클래스 수만큼의 값이 산출됨
# 산출된 값을 합하면 1이 됨
#!pip install tensorflow
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense

np.random.seed(0)
tf.random.set_seed(3)
from sklearn.model_selection import train_test_split
import pandas as pd
df = pd.read_csv('housing.csv',delim_whitespace=True,header=None)
012345678910111213
00.0063218.02.3100.5386.57565.24.09001296.015.3396.904.9824.0
10.027310.07.0700.4696.42178.94.96712242.017.8396.909.1421.6
20.027290.07.0700.4697.18561.14.96712242.017.8392.834.0334.7
30.032370.02.1800.4586.99845.86.06223222.018.7394.632.9433.4
40.069050.02.1800.4587.14754.26.06223222.018.7396.905.3336.2
.............................................
5010.062630.011.9300.5736.59369.12.47861273.021.0391.999.6722.4
5020.045270.011.9300.5736.12076.72.28751273.021.0396.909.0820.6
5030.060760.011.9300.5736.97691.02.16751273.021.0396.905.6423.9
5040.109590.011.9300.5736.79489.32.38891273.021.0393.456.4822.0
5050.047410.011.9300.5736.03080.82.50501273.021.0396.907.8811.9

506 rows × 14 columns

dataset = df.values

X = dataset[:,:13]
y = dataset[:,13]

X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3,random_state=0)
X_train.shape # (354, 13)
X_test.shape # (152, 13)
y_train.shape # (354,)
y_test.shape # (152,)

model = Sequential()
model.add(Dense(30,input_dim=13, activation='relu'))
model.add(Dense(20,activation='relu')) # 히든레이어
model.add(Dense(10,activation='relu')) # 히든레이어
model.add(Dense(15,activation='relu')) # 히든레이어
model.add(Dense(1)) # 결과

model.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_2 (Dense)             (None, 30)                420       
                                                                 
 dense_3 (Dense)             (None, 20)                620       
                                                                 
 dense_4 (Dense)             (None, 10)                210       
                                                                 
 dense_5 (Dense)             (None, 15)                165       
                                                                 
 dense_6 (Dense)             (None, 1)                 16        
                                                                 
=================================================================
Total params: 1,431
Trainable params: 1,431
Non-trainable params: 0
_________________________________________________________________

# Loss함수 'adam' 사용
model.compile(loss='mse', optimizer='adam')

# 학습내역
history = model.fit(X_train,y_train,epochs=200,batch_size=10) # epoch : 반복횟수, batch_size : 10개의 입력을 처리하고 손실함수를 기반으로 가중치 조정, 현재 1개 입력은 13의 input을 말한다

# 인공신경망이 계산한 집값
model.predict(X_test).flatten()

# 학습내역
history.history['loss']

# 학습내역 그래프
plt.plot(history.history['loss'])
plt.show()

이 블로그의 인기 게시물