Calculate the gradient decent formula for logistic regression
X=x-a @J(x)/@(x)
def cost_function(self, theta, x, y):
# Computes the cost function for all the training samples
m = x.shape[0]
total_cost = -(1 / m) * np.sum(
y * np.log(probability(theta, x)) + (1 - y) * np.log(
1 - probability(theta, x)))
return total_cost
def gradient(self, theta, x, y):
# Computes the gradient of the cost function at the point theta
m = x.shape[0]
return (1 / m) * np.dot(x.T, sigmoid(net_input(theta, x)) - y)
Comments
Leave a comment