Skip to content

Commit

Permalink
Gradient descent implemented in Python
Browse files Browse the repository at this point in the history
  • Loading branch information
Kumar-laxmi committed Oct 4, 2023
1 parent 16da770 commit b886551
Show file tree
Hide file tree
Showing 2 changed files with 108 additions and 5 deletions.
102 changes: 102 additions & 0 deletions Python/Optimization/Gradient_Descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# Importing Libraries
import numpy as np
import matplotlib.pyplot as plt

def mean_squared_error(y_true, y_predicted):

# Calculating the loss or cost
cost = np.sum((y_true-y_predicted)**2) / len(y_true)
return cost

# Gradient Descent Function
# Here iterations, learning_rate, stopping_threshold
# are hyperparameters that can be tuned
def gradient_descent(x, y, iterations = 1000, learning_rate = 0.0001,
stopping_threshold = 1e-6):

# Initializing weight, bias, learning rate and iterations
current_weight = 0.1
current_bias = 0.01
iterations = iterations
learning_rate = learning_rate
n = float(len(x))

costs = []
weights = []
previous_cost = None

# Estimation of optimal parameters
for i in range(iterations):

# Making predictions
y_predicted = (current_weight * x) + current_bias

# Calculating the current cost
current_cost = mean_squared_error(y, y_predicted)

# If the change in cost is less than or equal to
# stopping_threshold we stop the gradient descent
if previous_cost and abs(previous_cost-current_cost)<=stopping_threshold:
break

previous_cost = current_cost

costs.append(current_cost)
weights.append(current_weight)

# Calculating the gradients
weight_derivative = -(2/n) * sum(x * (y-y_predicted))
bias_derivative = -(2/n) * sum(y-y_predicted)

# Updating weights and bias
current_weight = current_weight - (learning_rate * weight_derivative)
current_bias = current_bias - (learning_rate * bias_derivative)

# Printing the parameters for each 1000th iteration
print(f"Iteration {i+1}: Cost {current_cost}, Weight \
{current_weight}, Bias {current_bias}")


# Visualizing the weights and cost at for all iterations
plt.figure(figsize = (8,6))
plt.plot(weights, costs)
plt.scatter(weights, costs, marker='o', color='red')
plt.title("Cost vs Weights")
plt.ylabel("Cost")
plt.xlabel("Weight")
plt.show()

return current_weight, current_bias


def main():

# Data
X = np.array([32.50234527, 53.42680403, 61.53035803, 47.47563963, 59.81320787,
55.14218841, 52.21179669, 39.29956669, 48.10504169, 52.55001444,
45.41973014, 54.35163488, 44.1640495 , 58.16847072, 56.72720806,
48.95588857, 44.68719623, 60.29732685, 45.61864377, 38.81681754])
Y = np.array([31.70700585, 68.77759598, 62.5623823 , 71.54663223, 87.23092513,
78.21151827, 79.64197305, 59.17148932, 75.3312423 , 71.30087989,
55.16567715, 82.47884676, 62.00892325, 75.39287043, 81.43619216,
60.72360244, 82.89250373, 97.37989686, 48.84715332, 56.87721319])

# Estimating weight and bias using gradient descent
estimated_weight, estimated_bias = gradient_descent(X, Y, iterations=2000)
print(f"Estimated Weight: {estimated_weight}\nEstimated Bias: {estimated_bias}")

# Making predictions using estimated parameters
Y_pred = estimated_weight*X + estimated_bias

# Plotting the regression line
plt.figure(figsize = (8,6))
plt.scatter(X, Y, marker='o', color='red')
plt.plot([min(X), max(X)], [min(Y_pred), max(Y_pred)], color='blue',markerfacecolor='red',
markersize=10,linestyle='dashed')
plt.xlabel("X")
plt.ylabel("Y")
plt.show()


if __name__=="__main__":
main()
11 changes: 6 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -315,11 +315,12 @@
| --- | ----------------------------------------------------------------------------------- | ------------------------------ |
| 1. | Ant Colony Optimization | [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/aco_optimization_py.py) |
| 2. | Genetic Algorithm | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/genetic.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/genetic.py) |
| 3. | Grey Wolf Optimization | [C](https://github.com/Kumar-laxmi/Algorithms/blob/main/C/Optimization/greywolf.c), [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/greywolf.cpp), [Java](https://github.com/Kumar-laxmi/Algorithms/blob/main/Java/Optimization/grey_wolf.java), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/greywolf.py) |
| 4. | Hill Climbing Algorithm | [C](https://github.com/Kumar-laxmi/Algorithms/blob/main/C/Optimization/Hill_climbing.c), [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/Hill_climbing.cpp), [Java](https://github.com/Kumar-laxmi/Algorithms/blob/main/Java/Optimization/Hill_climbing.java), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/Hill_climbing.py) |
| 5. | Particle Swarm Optimization | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/particle_swarm_optimization.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/particle_swarm_optimization.py) |
| 6. | Shuffled Frog Leaping Algorithm | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/shuffled_frog_leaping_optimization.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/shuffled_frog_leaping_optimization.py) |
| 7. | Simulated Annealing | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/simulated_annealing.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/simulated_annealing.py) |
| 3. | Gradient Descend | [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/Gradient_Descent.py) |
| 4. | Grey Wolf Optimization | [C](https://github.com/Kumar-laxmi/Algorithms/blob/main/C/Optimization/greywolf.c), [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/greywolf.cpp), [Java](https://github.com/Kumar-laxmi/Algorithms/blob/main/Java/Optimization/grey_wolf.java), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/greywolf.py) |
| 5. | Hill Climbing Algorithm | [C](https://github.com/Kumar-laxmi/Algorithms/blob/main/C/Optimization/Hill_climbing.c), [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/Hill_climbing.cpp), [Java](https://github.com/Kumar-laxmi/Algorithms/blob/main/Java/Optimization/Hill_climbing.java), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/Hill_climbing.py) |
| 6. | Particle Swarm Optimization | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/particle_swarm_optimization.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/particle_swarm_optimization.py) |
| 7. | Shuffled Frog Leaping Algorithm | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/shuffled_frog_leaping_optimization.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/shuffled_frog_leaping_optimization.py) |
| 8. | Simulated Annealing | [C++](https://github.com/Kumar-laxmi/Algorithms/blob/main/C%2B%2B/Optimization/simulated_annealing.cpp), [Python](https://github.com/Kumar-laxmi/Algorithms/blob/main/Python/Optimization/simulated_annealing.py) |

<h3 align="center"><b>Pattern Matching</b></h3>

Expand Down

0 comments on commit b886551

Please sign in to comment.