Added weight report and more comments.
This commit is contained in:
parent
c81789fbd3
commit
6922a8e6cb
4 changed files with 22 additions and 6 deletions
|
|
@ -164,3 +164,7 @@ if __name__ == "__main__":
|
||||||
preds = model.predict(x_test)
|
preds = model.predict(x_test)
|
||||||
print("\nFirst 10 predictions:")
|
print("\nFirst 10 predictions:")
|
||||||
print(preds.head(10))
|
print(preds.head(10))
|
||||||
|
|
||||||
|
# weight report
|
||||||
|
print("\nWeights from the model:")
|
||||||
|
print(model.w)
|
||||||
|
|
@ -74,7 +74,7 @@ class LogisticRegression:
|
||||||
z = self.x.dot(self.w) # linear prediction
|
z = self.x.dot(self.w) # linear prediction
|
||||||
p = self.sigmoid(z) # probabilities of the model predictions
|
p = self.sigmoid(z) # probabilities of the model predictions
|
||||||
|
|
||||||
gradient = self.x.T.dot(p - self.y) / self.y.size # gradient calculation formula
|
gradient = self.x.T.dot(p - self.y) / self.y.size # for logistic regression X^T*(p - y)
|
||||||
|
|
||||||
self.w -= self.lr * gradient # gradient multiplied by learning rate is removed from weight
|
self.w -= self.lr * gradient # gradient multiplied by learning rate is removed from weight
|
||||||
|
|
||||||
|
|
@ -190,4 +190,8 @@ if __name__ == "__main__":
|
||||||
# predict Y values using the trained data
|
# predict Y values using the trained data
|
||||||
first_10 = X_test[:10]
|
first_10 = X_test[:10]
|
||||||
y_hat = model.predict(first_10)
|
y_hat = model.predict(first_10)
|
||||||
print("\nFirst 10 predictions:", y_hat.ravel())
|
print("\nFirst 10 predictions:", y_hat.ravel())
|
||||||
|
|
||||||
|
# weight report
|
||||||
|
print("\nWeights from the model:")
|
||||||
|
print(model.w)
|
||||||
|
|
@ -74,7 +74,7 @@ class LinearRegression:
|
||||||
# makes Y prediction value for X batch value by multiplying X and weight vectors.
|
# makes Y prediction value for X batch value by multiplying X and weight vectors.
|
||||||
|
|
||||||
error = y_batch - y_pred # error is difference between Y batch value and Y prediction value
|
error = y_batch - y_pred # error is difference between Y batch value and Y prediction value
|
||||||
grad = -2 * x_batch.T.dot(error) / batch_size
|
grad = -2 * x_batch.T.dot(error) / batch_size # for linear regression -2*X^T*(error)
|
||||||
# gradient is calculated by multiplication of error, transposed X batch value and -2 divided by batch size
|
# gradient is calculated by multiplication of error, transposed X batch value and -2 divided by batch size
|
||||||
|
|
||||||
w_np -= self.lr * grad # weight is decreased by multiplication of learning rate and gradient
|
w_np -= self.lr * grad # weight is decreased by multiplication of learning rate and gradient
|
||||||
|
|
@ -169,4 +169,8 @@ if __name__ == "__main__":
|
||||||
# predict Y values using the trained data
|
# predict Y values using the trained data
|
||||||
preds = model.predict(x_test)
|
preds = model.predict(x_test)
|
||||||
print("\nFirst 10 predictions:")
|
print("\nFirst 10 predictions:")
|
||||||
print(preds.head(10))
|
print(preds.head(10))
|
||||||
|
|
||||||
|
# weight report
|
||||||
|
print("\nWeights from the model:")
|
||||||
|
print(model.w)
|
||||||
|
|
@ -30,7 +30,7 @@ class LogisticRegression:
|
||||||
"""Cross‑entropy loss is used for the cost calculation"""
|
"""Cross‑entropy loss is used for the cost calculation"""
|
||||||
eps = 1e-15
|
eps = 1e-15
|
||||||
p = np.clip(p, eps, 1 - eps)
|
p = np.clip(p, eps, 1 - eps)
|
||||||
return -np.mean(y * np.log(p) + (1 - y) * np.log(1 - p))
|
return -np.mean(y * np.log(p) + (1 - y) * np.log(1 - p)) # mean of -[y*log(p) + (1 - y)*log(1-p)]
|
||||||
|
|
||||||
def prepare(self, df: pd.DataFrame, target_col: str) -> None:
|
def prepare(self, df: pd.DataFrame, target_col: str) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
@ -90,7 +90,7 @@ class LogisticRegression:
|
||||||
z = x_batch.dot(self.w) # linear prediction
|
z = x_batch.dot(self.w) # linear prediction
|
||||||
p = self.sigmoid(z) # probabilities of the model predictions
|
p = self.sigmoid(z) # probabilities of the model predictions
|
||||||
|
|
||||||
grad = x_batch.T.dot(p - y_batch) / y_batch.size # gradient calculation formula
|
grad = x_batch.T.dot(p - y_batch) / y_batch.size # for logistic regression X^T*(p - y)
|
||||||
self.w -= self.lr * grad # gradient multiplied by learning rate is removed from weight
|
self.w -= self.lr * grad # gradient multiplied by learning rate is removed from weight
|
||||||
|
|
||||||
# cost is calculated through cross‑entropy and added for the current range
|
# cost is calculated through cross‑entropy and added for the current range
|
||||||
|
|
@ -207,3 +207,7 @@ if __name__ == "__main__":
|
||||||
first_10 = X_test[:10]
|
first_10 = X_test[:10]
|
||||||
y_hat = model.predict(first_10)
|
y_hat = model.predict(first_10)
|
||||||
print("\nFirst 10 predictions:", y_hat.ravel())
|
print("\nFirst 10 predictions:", y_hat.ravel())
|
||||||
|
|
||||||
|
# weight report
|
||||||
|
print("\nWeights from the model:")
|
||||||
|
print(model.w)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue