본문 바로가기
Data Science/머신러닝&딥러닝 기초 이론

[딥러닝 실습] 활성화 함수 연습(step, sigmoid ,relu function)

by titaniumm 2020. 4. 14.

#활성화 항수(step fuction, sigmoid 그려보기)
import numpy as np
import matplotlib.pyplot as plt

def step_function(x):
    y = x>0
    return y.astype(np.int)
    #astype(np.xx)는 xx형태로 타입을 바꿔주는것

def sigmoid(x):
    return 1/(1 + np.exp(-x))

def rulu(x):
    return np.maximum(0,x)

x = np.arange(-5.0,5.0,0.01)
y1 = step_function(x)
y2 = sigmoid(x)

plt.plot(x,y1,"r",label ="step function")
plt.plot(x,y2,"b",label ="sigmoid function")
plt.legend(loc="lower right")
plt.xlim(-5,5)
plt.ylim(-0.1,1.1)
plt.xlabel('$x$')
plt.ylabel("activation function value")
plt.title("graph of activation functions")
plt.grid(True)
plt.show()

댓글