import numpy as np
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
%matplotlib notebook
def f(x, y):
"""f(x,y) = x^2 + y^2"""
return x**2 + y**2
def gradient_f(x, y):
"""
Partial derivatives of f(x,y):
df/dx = 2*x
df/dy = 2*y
"""
dfdx = 2*x
dfdy = 2*y
return np.array([dfdx, dfdy])
# surface of f(x, y)
x = np.linspace(start=-10, stop=10, num = 50)
y = np.linspace(start=-10, stop=10, num = 50)
x, y = np.meshgrid(x, y)
z = f(x, y)
# Suppose we stand at point (a,b)
a, b = -5, -5
c = f(a, b)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, z, rstride=3, cstride=3)
ax.plot(a, b, c, "or", markersize=10)
[<mpl_toolkits.mplot3d.art3d.Line3D at 0x2287a486df0>]
# initial point
a = -5
b = -5
c = f(a, b)
# surface plot
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_wireframe(x, y, z, rstride=3, cstride=3)
ax.plot(a, b, c, "or", markersize=10)
# making steps towards the direction of steepest descent
n_steps = 10
learning_rate = 0.1
for i in range(n_steps):
grad = gradient_f(a, b)
a = a - learning_rate * grad[0]
b = b - learning_rate * grad[1]
c = f(a, b)
ax.plot(a, b, c, "og", markersize=5)