Link : Implement Logistic Regression from Scratch

Code

X = df.drop("Outcome", axis=1).to_numpy().T
Y = df['Outcome'].to_numpy()

# Initialize weights & bias to random values

W = np.random.rand(df.shape[1] - 1)
b = 0

# An empty to list to append loss from each epoch
J = []

# suppress warnings
warnings.filterwarnings('ignore')

# custom function
def sigmoid(x):
  return np.exp(x)/(1 + np.exp(x))

# define vectorized sigmoid
sigmoid_v = np.vectorize(sigmoid)
lr = 0.00001

for epochs in range(0, 5000000):    
    
    Z = np.dot(W.T, X) + b
    
    A = 1/(1 + np.exp(-Z))
    
    # append the loss to the list
    J.append(np.sum(-(Y*np.log(A) + (1-Y)*np.log(1-A))))
    
    # partial derivatives of loss function w.r.t. the logit i.e. Z 
    # (equation derived analytically)
    dZ = A - Y
    
    m = len(df)
    
    # partial derivatives of loss function w.r.t. weight parameters for each predictor and bias
    # (equation derived analytically)
    dW = (1/m)*np.dot(X, dZ)
    
    # partial derivatives of loss function w.r.t. bias parameter
    # (equation derived analytically)
    db = (1/m)*np.sum(dZ)
    
    # update weights and bias by taking a step in the opposite direction of gradient
    W = W - lr*dW
    b = b - lr*db
    

Leave a Comment