Activation functions in neural networks

import numpy as np import matplotlib.pyplot as plt'seaborn') def softmax(x): e_x = np.exp(x - np.max(x)) return e_x / e_x.sum() def softplus(x): return np.log(1 + np.exp(x)) def sigmoid(x): return 1 / (1 + np.exp(-x)) def ReLU(x): return max(x, 0) xs = np.linspace(-10,10,30) plt.plot(xs, softmax(xs), ls='-', lw=4, alpha=0.5, color='#B3C6FF', label='Softmax') plt.plot(xs, softplus(xs), ls='-', lw=4, alpha=0.5, color='#C6B3FF', label='Softplus') plt.plot(xs, sigmoid(xs), ls='-', lw=4, alpha=0.5, color='#ECB3FF', label='Sigmoid') plt.plot(xs, [ReLU(x) for x in xs], ls='-', lw=4, alpha=0.5, color='#FFB3EC', label='ReLU') plt.legend() plt.tight_layout() Plot of some activation functions that can be used in neural networks