[딥러닝 실습] 활성화 함수 연습(step, sigmoid ,relu function)
#활성화 항수(step fuction, sigmoid 그려보기) import numpy as np import matplotlib.pyplot as plt def step_function(x): y = x>0 return y.astype(np.int) #astype(np.xx)는 xx형태로 타입을 바꿔주는것 def sigmoid(x): return 1/(1 + np.exp(-x)) def rulu(x): return np.maximum(0,x) x = np.arange(-5.0,5.0,0.01) y1 = step_function(x) y2 = sigmoid(x) plt.plot(x,y1,"r",label ="step function") plt.plot(x,y2,"b",label ="sigmoid func..
2020. 4. 14.