diff --git a/.idea/misc.xml b/.idea/misc.xml
index 6d0e4cd..8d1afa2 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -3,5 +3,5 @@
-
+
\ No newline at end of file
diff --git a/effect-of-regularization-on-loss.py b/effect-of-regularization-on-loss.py
deleted file mode 100644
index 426f1b1..0000000
--- a/effect-of-regularization-on-loss.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import numpy as np
-import matplotlib.pyplot as plt
-
-# Generating Synthetic Data
-def generate_linear_data(n):
- x = np.random.uniform(0, 10, n) # initialize x
- eps = np.random.normal(0, 1, n) # initialize epsilon
- y = -3 * x + 8 + 2 * eps # y = −3x + 8 + 2ϵ
- return x.reshape(-1, 1), y
-
-# Gradient Descent with L1/L2
-def gradient_descent(x, y, lam, reg_type, lr, iters):
- x_b = np.hstack([np.ones_like(x), x]) # initialize x
- w = np.zeros(x_b.shape[1]) # initialize weight
- path = [w.copy()]
-
- for i in range(iters):
- pred = x_b @ w # linear regression prediction
- error = pred - y # error
- grad = x_b.T @ error / len(y) # gradient formula
-
- if reg_type == 'l2':
- grad += lam * w # L2 formula
- elif reg_type == 'l1':
- grad += lam * np.sign(w) # L1 formula
-
- w -= lr * grad # loss calculation
- path.append(w.copy())
-
- return w, np.array(path)
-
-# Plotting the loss
-def plot_contour(x, y, reg_type, lam):
- x_b = np.hstack([np.ones_like(x), x]) # initialize x
- w0, w1 = np.meshgrid(np.linspace(-10, 10, 100), np.linspace(-10, 10, 100)) # initialize intercept and slope
- loss = np.zeros_like(w0) # initialize loss
-
- for i in range(w0.shape[0]):
- for j in range(w0.shape[1]):
- w = np.array([w0[i, j], w1[i, j]])
- error = y - x_b @ w # error
- mse = np.mean(error ** 2) # mean square error
- reg = lam * (np.sum(w ** 2) if reg_type == 'l2' else np.sum(np.abs(w))) # regularization
- loss[i, j] = mse + reg # regularization and mse for the loss
-
- _, path = gradient_descent(x, y, lam, reg_type, 0.01, 500)
-
- # plotting the figure
- plt.figure(figsize=(6, 5))
- plt.contour(w0, w1, loss, levels=50, cmap='viridis')
- plt.plot(path[:, 0], path[:, 1], 'ro-', markersize=2, label='Gradient Descent Path')
- plt.title(f"{reg_type.upper()} Regularization (λ={lam})")
- plt.xlabel("w0 (intercept)")
- plt.ylabel("w1 (slope)")
- plt.grid(True)
- plt.legend()
- plt.tight_layout()
- plt.savefig('results/task4-effect-of-regularization-on-loss-' + reg_type + '-' + str(lam) + '.png')
-
-if __name__ == "__main__":
- print("Running Task 4: Effect of L1 and L2 Regularization on Loss Landscape")
-
- # Generate dataset
- x, y = generate_linear_data(30)
-
- # Values of lambda to visualize
- lambda_values = [0.01, 0.1, 1.0]
-
- # Plot for both L1 and L2 regularization
- for reg_type in ['l1', 'l2']:
- for lam in lambda_values:
- plot_contour(x, y, reg_type, lam)
\ No newline at end of file
diff --git a/results/task3-bias-decomposition-l1.png b/results/task3-bias-decomposition-l1.png
index 89968bc..b0e229e 100644
Binary files a/results/task3-bias-decomposition-l1.png and b/results/task3-bias-decomposition-l1.png differ
diff --git a/results/task3-bias-decomposition-l2.png b/results/task3-bias-decomposition-l2.png
index dc3eff0..8c55104 100644
Binary files a/results/task3-bias-decomposition-l2.png and b/results/task3-bias-decomposition-l2.png differ
diff --git a/results/task3-train-validation-errors-l1.png b/results/task3-train-validation-errors-l1.png
index b05e7d9..8f9381d 100644
Binary files a/results/task3-train-validation-errors-l1.png and b/results/task3-train-validation-errors-l1.png differ
diff --git a/results/task3-train-validation-errors-l2.png b/results/task3-train-validation-errors-l2.png
index 119b280..afb0d02 100644
Binary files a/results/task3-train-validation-errors-l2.png and b/results/task3-train-validation-errors-l2.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l1-0.01.png b/results/task4-effect-of-regularization-on-loss-l1-0.01.png
index 487af0a..26e2d62 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l1-0.01.png and b/results/task4-effect-of-regularization-on-loss-l1-0.01.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l1-0.1.png b/results/task4-effect-of-regularization-on-loss-l1-0.1.png
index 7481165..095966b 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l1-0.1.png and b/results/task4-effect-of-regularization-on-loss-l1-0.1.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l1-1.0.png b/results/task4-effect-of-regularization-on-loss-l1-1.0.png
index b891e6e..104bee1 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l1-1.0.png and b/results/task4-effect-of-regularization-on-loss-l1-1.0.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l2-0.01.png b/results/task4-effect-of-regularization-on-loss-l2-0.01.png
index 21ac1d5..9fbc76e 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l2-0.01.png and b/results/task4-effect-of-regularization-on-loss-l2-0.01.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l2-0.1.png b/results/task4-effect-of-regularization-on-loss-l2-0.1.png
index 951f90e..f168988 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l2-0.1.png and b/results/task4-effect-of-regularization-on-loss-l2-0.1.png differ
diff --git a/results/task4-effect-of-regularization-on-loss-l2-1.0.png b/results/task4-effect-of-regularization-on-loss-l2-1.0.png
index 1df9703..9fcc59e 100644
Binary files a/results/task4-effect-of-regularization-on-loss-l2-1.0.png and b/results/task4-effect-of-regularization-on-loss-l2-1.0.png differ
diff --git a/linear-regression-w-non-linear-functions.py b/tasks-1-&-2.py
similarity index 100%
rename from linear-regression-w-non-linear-functions.py
rename to tasks-1-&-2.py
diff --git a/regularization-w-cross-validation.py b/tasks-3-&-4.py
similarity index 69%
rename from regularization-w-cross-validation.py
rename to tasks-3-&-4.py
index 8375744..48ab651 100644
--- a/regularization-w-cross-validation.py
+++ b/tasks-3-&-4.py
@@ -137,6 +137,63 @@ def bias_variance_decomp(reg_type, lam_values, num_datasets, N, D):
plt.grid(True)
plt.savefig('results/task3-bias-decomposition-' + reg_type + '.png')
+# Generating Synthetic Data
+def generate_linear_data(n):
+ x = np.random.uniform(0, 10, n) # initialize x
+ eps = np.random.normal(0, 1, n) # initialize epsilon
+ y = -3 * x + 8 + 2 * eps # y = −3x + 8 + 2ϵ
+ return x.reshape(-1, 1), y
+
+# Gradient Descent with L1/L2
+def gradient_descent(x, y, lam, reg_type, lr, iters):
+ x_b = np.hstack([np.ones_like(x), x]) # initialize x
+ w = np.zeros(x_b.shape[1]) # initialize weight
+ path = [w.copy()]
+
+ for i in range(iters):
+ pred = x_b @ w # linear regression prediction
+ error = pred - y # error
+ grad = x_b.T @ error / len(y) # gradient formula
+
+ if reg_type == 'l2':
+ grad += lam * w # L2 formula
+ elif reg_type == 'l1':
+ grad += lam * np.sign(w) # L1 formula
+
+ w -= lr * grad # loss calculation
+ path.append(w.copy())
+
+ return w, np.array(path)
+
+# Plotting the loss
+def plot_contour(x, y, reg_type, lam):
+ x_b = np.hstack([np.ones_like(x), x]) # initialize x
+ w0, w1 = np.meshgrid(np.linspace(-10, 10, 100), np.linspace(-10, 10, 100)) # initialize intercept and slope
+ loss = np.zeros_like(w0) # initialize loss
+
+ for i in range(w0.shape[0]):
+ for j in range(w0.shape[1]):
+ w = np.array([w0[i, j], w1[i, j]])
+ error = y - x_b @ w # error
+ mse = np.mean(error ** 2) # mean square error
+ reg = lam * (np.sum(w ** 2) if reg_type == 'l2' else np.sum(np.abs(w))) # regularization
+ loss[i, j] = mse + reg # regularization and mse for the loss
+
+ _, path = gradient_descent(x, y, lam, reg_type, 0.01, 500)
+
+ # plotting the figure
+ plt.figure(figsize=(6, 5))
+ plt.contour(w0, w1, loss, levels=50, cmap='viridis')
+ plt.plot(path[:, 0], path[:, 1], 'ro-', markersize=2, label='Gradient Descent Path')
+ plt.title(f"{reg_type.upper()} Regularization (λ={lam})")
+ plt.xlabel("w0 (intercept)")
+ plt.ylabel("w1 (slope)")
+ plt.grid(True)
+ plt.legend()
+ plt.tight_layout()
+ plt.savefig('results/task4-effect-of-regularization-on-loss-' + reg_type + '-' + str(lam) + '.png')
+
+
if __name__ == "__main__":
print("Running Task 3: Regularization with Cross-Validation")
@@ -146,4 +203,17 @@ if __name__ == "__main__":
train_validation_err('l1', lam_values, 50, 20, 45)
bias_variance_decomp('l2', lam_values, 50, 20, 45)
- bias_variance_decomp('l1', lam_values, 50, 20, 45)
\ No newline at end of file
+ bias_variance_decomp('l1', lam_values, 50, 20, 45)
+
+ print("Running Task 4: Effect of L1 and L2 Regularization on Loss Landscape")
+
+ # Generate dataset
+ x, y = generate_linear_data(30)
+
+ # Values of lambda to visualize
+ lambda_values = [0.01, 0.1, 1.0]
+
+ # Plot for both L1 and L2 regularization
+ for reg_type in ['l1', 'l2']:
+ for lam in lambda_values:
+ plot_contour(x, y, reg_type, lam)
\ No newline at end of file