'How to plot learning rate over the number of iterations?

I want to find the optimal learning rate over the number of iterations and a plot. Where to make changes in the code below to achieve the goal. Below is my cost function and gradient descent function with the main code.

# import required modules
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
%matplotlib inline
plt.rcParams['figure.figsize'] = (12, 8)

# Create data set.
X, y = make_regression(n_samples=20, n_features=1, n_informative=1, noise = 10, random_state=3)

 
# Convert  target variable array from 1d to 2d.
y = y.reshape(len(y),1)



def compute_cost(X, y, params):
    
    n_samples = len(y)
    h = np.matmul(X, params)    # h is a hypothesis function    
    # h = X @ params            # another way to formulate a hypothesis function

    return (1/(2*n_samples))*np.sum((h-y)**2) # return the cost value

def gradient_descent(X, y, params, learning_rate, n_iters):
    n_samples = len(y)
    J_history = np.zeros((n_iters,1)) # To store cost values over iterations
    for i in range(n_iters):
        params = params - (learning_rate/n_samples) * X.T @ (X @ params - y)
        cost = compute_cost(X, y, params)

    return (J_history, params)

n_iters = 300
learning_rate = 0.01
num_history = np.zeros((100,2))

# Add additional input feature x_0
n_samples = len(y)
X = np.hstack((np.ones((n_samples,1)),X)) 
n_features = np.size(X,1)


# zero initialization (you can try different initial values)
params = np.zeros((n_features,1)) 

initial_cost = compute_cost(X, y, params)
print("Initial cost is: ", initial_cost, "\n")

(J_history, optimal_params) = gradient_descent(X, y, params, learning_rate, n_iters)

print("Optimal parameters are: \n", optimal_params, "\n")
print("Final cost is: ", J_history[-1])


plt.plot(range(len(J_history)), J_history, 'r')
plt.title("Convergence Graph of Cost Function")
plt.xlabel("Number of Iterations")
plt.ylabel("Cost")
plt.show()

Thanks in advance " I am writing this line because StackOverflow does not allow to paste long code."



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source