-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLogisticRegression.py
36 lines (29 loc) · 1.15 KB
/
LogisticRegression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import numpy as np
class LogisticRegressionModel:
def __init__(self, learning_rate=0.01, epochs=1000):
self.learning_rate = learning_rate
self.epochs = epochs
self.weights = None
self.bias = None
def _sigmoid(self, z):
return 1 / (1 + np.exp(-z))
def fit(self, X, y):
# Initialize weights and bias
n_samples, n_features = X.shape
self.weights = np.zeros(n_features)
self.bias = 0
# Gradient Descent
for _ in range(self.epochs):
# Linear prediction
linear_model = np.dot(X, self.weights) + self.bias
y_predicted = self._sigmoid(linear_model)
# Compute gradients
dw = (1 / n_samples) * np.dot(X.T, (y_predicted - y))
db = (1 / n_samples) * np.sum(y_predicted - y)
# Update weights and bias
self.weights -= self.learning_rate * dw
self.bias -= self.learning_rate * db
def predict(self, X):
linear_model = np.dot(X, self.weights) + self.bias
y_predicted = self._sigmoid(linear_model)
return [1 if i > 0.5 else 0 for i in y_predicted]