TE ANN Practical
practical 01. import numpy as np import matplotlib.pyplot as plt # Activation Functions def sigmoid ( x ): return 1 / ( 1 + np.exp( - x)) def tanh ( x ): return np.tanh(x) def relu ( x ): return np.maximum( 0 , x) def softmax ( x ): # Stabilized softmax to prevent overflow e_x = np.exp(x - np.max(x)) return e_x / e_x.sum( axis = 0 ) start = float ( input ( "Enter start value for x (e.g., -10): " )) end = float ( input ( "Enter end value for x (e.g., 10): " )) x = np.linspace(start, end, 100 ) plt.figure( figsize = ( 10 , 8 )) plt.subplot( 2 , 2 , 1 ) plt.plot(x, sigmoid(x)) plt.title( 'Sigmoid Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 2 ) plt.plot(x, tanh(x)) plt.title( 'Tanh Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 3 ) plt.plot(x, relu(x)) plt.title( 'ReLU Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 4 ) plt.plot(x, softm...