Posts

DS Lab Practical

DS lab manual   Practical No.1 DATA WRANGLING I import pandas as pd import numpy as np import seaborn as sns df = sns.load_dataset('iris') print(df.head()) print(df.shape) print(df.isnull().sum()) print(df.describe()) df.info() print(df.dtypes) numeric_cols = df.select_dtypes(include=['int64', 'float64']).columns df[numeric_cols] = (df[numeric_cols] - df[numeric_cols].min()) / (df[numeric_cols].max() - df[numeric_cols].min()) print(df.head()) df_encoded = pd.get_dummies(df, drop_first=True) print(df_encoded.head()) Practical No.2 DATA WRANGLING II import pandas as pd import numpy as np df = pd.DataFrame({ 'Name': ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'], 'Age': [20, 21, np.nan, 22, 20, 23, 21, 50], 'Study_Hours': [2, 3, 4, np.nan, 5, 6, 2, 20], 'Attendance': [80, 85, 90, 88, np.nan, 92, 87, 95], 'Marks': [60, 65, 70, 75, 80, 85, 90, 200] }) print(df) print(df....

TE ANN Practical

 practical 01. import numpy as np import matplotlib.pyplot as plt # Activation Functions def sigmoid ( x ):     return 1 / ( 1 + np.exp( - x)) def tanh ( x ):     return np.tanh(x) def relu ( x ):     return np.maximum( 0 , x) def softmax ( x ):     # Stabilized softmax to prevent overflow     e_x = np.exp(x - np.max(x))     return e_x / e_x.sum( axis = 0 ) start = float ( input ( "Enter start value for x (e.g., -10): " )) end = float ( input ( "Enter end value for x (e.g., 10): " )) x = np.linspace(start, end, 100 ) plt.figure( figsize = ( 10 , 8 )) plt.subplot( 2 , 2 , 1 ) plt.plot(x, sigmoid(x)) plt.title( 'Sigmoid Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 2 ) plt.plot(x, tanh(x)) plt.title( 'Tanh Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 3 ) plt.plot(x, relu(x)) plt.title( 'ReLU Activation Function' ) plt.grid( True ) plt.subplot( 2 , 2 , 4 ) plt.plot(x, softm...