Author - Mohit Rathore mrmohitrathoremr@gmail.com - markroxor.in
Licensed under The MIT License - https://opensource.org/licenses/MIT
from fromscratchtoml.neural_network import Activations
from fromscratchtoml.toolbox import binary_visualize
import numpy as np
# set this to avoid exponential notations
np.set_printoptions(suppress=True, formatter={'float': '{: 0.8f}'.format})
%matplotlib inline
X = np.linspace(-5,5,20)
Y = Activations.linear(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)
X = np.linspace(-5,5,20)
Y = Activations.tanh(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)
X = np.linspace(-5,5,20)
Y = Activations.sigmoid(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)
X = np.linspace(-5,5,20)
Y = Activations.relu(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)
X = np.linspace(-5,5,20)
Y = Activations.leaky_relu(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)
X = np.linspace(-5,5,20)
Y = Activations.softmax(X)
XY = np.stack([X, Y], axis=1)
print(repr(Y))
binary_visualize(XY)