Last active
June 28, 2017 18:26
-
-
Save Karlheinzniebuhr/08ed392dd2bc67061f10a142b5c16e18 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Given the starting point of any `x` gradient descent | |
should be able to find the minimum value of x for the | |
cost function `f` defined below. | |
""" | |
import random | |
def f(x): | |
""" | |
Quadratic function. | |
It's easy to see the minimum value of the function | |
is 5 when is x=0. | |
""" | |
return x**2 + 5 | |
def df(x): | |
""" | |
Derivative of `f` with respect to `x`. | |
""" | |
return 2*x | |
def gradient_descent_update(x, gradx, learning_rate): | |
""" | |
Performs a gradient descent update. | |
""" | |
# TODO: Implement gradient descent. | |
# x = x - learning_rate * gradient_of_x | |
x = x - (gradx * learning_rate) | |
# Return the new value for x | |
return x | |
# Random number between 0 and 10,000. Feel free to set x whatever you like. | |
x = random.randint(0, 10000) | |
# TODO: Set the learning rate | |
learning_rate = 0.1 | |
epochs = 100 | |
for i in range(epochs+1): | |
cost = f(x) | |
gradx = df(x) | |
print("EPOCH {}: Cost = {:.3f}, x = {:.3f}".format(i, cost, gradx)) | |
x = gradient_descent_update(x, gradx, learning_rate) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment