Select Git revision
-
Brady James Garvin authoredBrady James Garvin authored
activations.py 322 B
import numpy as np
def relu(x):
''' Rectified Linear Unit (ReLU) '''
return np.maximum(0., x)
def drelu(x):
''' derivative of ReLU '''
y=np.zeros(x.shape)
y[np.where(x>0)]=1
return y
def softmax(x):
''' softmax '''
z = np.exp(x)
return z / np.sum(z,axis=-1,keepdims=True)