DEPARTMENT OF
COMPUTER SCIENCE & ENGINEERING
Complex Probs
Student Name: Heemaal Jaglan UID:22BCS14205
Branch: BE-CSE Section/Group:903-A
Semester:6th Date of Performance:16/04/25
Subject Name: Deep Learning Lab Subject Code: 22CSP-359
Problem -01
1. Aim: Implement a decision surface for a binary classification problem using logistic
regression]
2.Code/Implementation : import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
# Step 1: Create a simple 2D dataset
X, y = make_classification(n_samples=100, n_features=2,
n_informative=2, n_redundant=0,
n_clusters_per_class=1, random_state=42)
# Step 2: Train Logistic Regression
model = LogisticRegression()
model.fit(X, y)
# Step 3: Create a mesh grid
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 500),
np.linspace(y_min, y_max, 500))
grid = np.c_[xx.ravel(), yy.ravel()]
# Step 4: Predict probabilities and reshape
probs = model.predict_proba(grid)[:, 1].reshape(xx.shape)
# Step 5: Plot
plt.contourf(xx, yy, probs, levels=[0, 0.5, 1], alpha=0.2, colors=["blue", "red"])
plt.scatter(X[:, 0], X[:, 1], c=y, cmap='bwr', edgecolors='k')
plt.title("Decision Surface for Logistic Regression")
plt.xlabel("Feature 1")
plt.ylabel("Feature 2")
plt.show()
DEPARTMENT OF
COMPUTER SCIENCE & ENGINEERING
Y.append(y[i+timesteps]) return
np.array(X), np.array(Y)
timesteps = 10
total_points = 500
# Generate data
X, Y = generate_sine_wave_data(timesteps, total_points)
# Reshape for LSTM input
X = X.reshape((X.shape[0], X.shape[1], 1))
# Split into train and test sets
split = int(0.8 * len(X))
X_train, Y_train = X[:split], Y[:split]
X_test, Y_test = X[split:], Y[split:]
# Build the LSTM model
model = Sequential([
LSTM(50, activation='relu', input_shape=(timesteps, 1)),
Dense(1)
])
# Compile the model
model.compile(optimizer='adam', loss='mse')
# Train the model
history = model.fit(X_train, Y_train, epochs=20, batch_size=32, validation_data=(X_test, Y_test))
# Predict
predictions = model.predict(X_test)
# Plot the results
plt.figure(figsize=(12, 6))
plt.plot(range(len(Y_test)), Y_test, label="True Values")
plt.plot(range(len(predictions)), predictions, label="Predicted Values", linestyle="--")
plt.title("LSTM Sine Wave Prediction")
plt.xlabel("Time Step")
plt.ylabel("Sine Value")
plt.legend()
plt.show()
5. Output:
DEPARTMENT OF
COMPUTER SCIENCE & ENGINEERING
Problem-2
1.Aim: Understand the difference between linear classifiers and non-linear classifiers. Experiment
with different values of regularization parameters and kernel functions.
2.Code: import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_classification, make_moons
from sklearn.svm import SVC
from sklearn.model_selection import train_test_split
# Generate a linearly separable dataset
X, y = make_classification(n_samples=100, n_features=2,
n_informative=2, n_redundant=0,
n_clusters_per_class=1, class_sep=1.5, random_state=42)
# Split into train/test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42
# Linear SVM
clf = SVC(kernel='linear', C=1.0)
clf.fit(X_train, y_train)
# Function to plot decision boundary
def plot_decision_boundary(model, X, y, title):
h = 0.02
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, alpha=0.2)
plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors='k')
plt.title(title)
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()
# Plot for linear classifier
plot_decision_boundary(clf, X, y, "Linear SVM Decision Boundary")
3.Output:
DEPARTMENT OF
COMPUTER SCIENCE & ENGINEERING
Problem-3
1.Aim: Learn the trade-offs between Batch, Stochastic, and Mini-Batch Gradient Descent by
implementing them and observing their convergence behavior.
2.Code:
import numpy as np
import matplotlib.pyplot as plt
# Step 1: Generate dummy data
np.random.seed(42)
X = 2 * np.random.rand(100, 1)
y = 4 + 3 * X + np.random.randn(100, 1)
# Add bias term
X_b = np.c_[np.ones((100, 1)), X] # shape (100, 2)
# Step 2: Gradient Descent Variants
def compute_mse(X, y, theta):
return np.mean((X @ theta - y) ** 2)
def batch_gradient_descent(X, y, lr=0.1, n_iter=100):
theta = np.random.randn(2, 1)
mse_history = []
for _ in range(n_iter):
gradients = 2 / len(X) * X.T @ (X @ theta - y)
theta -= lr * gradients
mse_history.append(compute_mse(X, y, theta))
return theta, mse_history
def stochastic_gradient_descent(X, y, lr=0.1, n_iter=100):
theta = np.random.randn(2, 1)
mse_history = []
for epoch in range(n_iter):
for i in range(len(X)):
rand_index = np.random.randint(len(X))
xi = X[rand_index:rand_index+1]
yi = y[rand_index:rand_index+1]
gradients = 2 * xi.T @ (xi @ theta - yi)
theta -= lr * gradients
mse_history.append(compute_mse(X, y, theta))
return theta, mse_history
def mini_batch_gradient_descent(X, y, lr=0.1, n_iter=100, batch_size=20):
theta = np.random.randn(2, 1)
mse_history = []
for epoch in range(n_iter):
indices = np.random.permutation(len(X))
X_shuffled = X[indices]
y_shuffled = y[indices]
for i in range(0, len(X), batch_size):
xi = X_shuffled[i:i+batch_size]
yi = y_shuffled[i:i+batch_size]
gradients = 2 / len(xi) * xi.T @ (xi @ theta - yi)
DEPARTMENT OF
COMPUTER SCIENCE & ENGINEERING
theta -= lr * gradients
mse_history.append(compute_mse(X, y, theta))
return theta, mse_history
# Step 3: Run all three
theta_bgd, mse_bgd = batch_gradient_descent(X_b, y)
theta_sgd, mse_sgd = stochastic_gradient_descent(X_b, y)
theta_mbgd, mse_mbgd = mini_batch_gradient_descent(X_b, y, batch_size=20)
# Step 4: Plotting convergence
plt.figure(figsize=(10, 6))
plt.plot(mse_bgd, label="Batch GD", linewidth=2)
plt.plot(mse_sgd, label="Stochastic GD", linewidth=2)
plt.plot(mse_mbgd, label="Mini-Batch GD", linewidth=2)
plt.xlabel("Epoch")
plt.ylabel("MSE")
plt.title("Convergence of Gradient Descent Methods")
plt.legend()
plt.grid(True)
plt.show()
4.Output-