By definition:
$$ \nabla f(x_1, x_2) =\frac{\partial f}{\partial x_1} + \frac{\partial f}{\partial x_1} $$
$$ \frac{\partial f}{\partial x_1} = \lim_{h\rightarrow 0}\frac{f(x+h)-f(x)}{h} $$
$$ \frac{\partial f}{\partial x_1} = \lim_{h\rightarrow 0}\frac{f(x+h)-f(x-h)}{2h} $$
- Code:
func_1 = lambda x: x**2 +5
func_2 = lambda x: x[0]**2 + x[1]**3 +1
x_lim = np.arange(-5,5,0.01)
input_val = np.array([2.0,3.0])
class Differentiate:
def __init__(self):
self.h = 1e-5
self.dx = None
def d1_diff(self, f, x):
fh1 = f(x+self.h)
fh2 = f(x-self.h)
self.dx = (fh1 - fh2)/(2*self.h)
return self.dx
def tangent(self, series, f, x_loc):
"""
Return a Tangent Line, for ploting.
"""
y_loc = f(x_loc)
self.d1_diff(func_1, x_loc)
b = y_loc - self.dx * x_loc
y_series = self.dx * series + b
return y_series
# for f(x1, x2, x_3, ...)
def dn_diff(self, f, x):
grad = np.zeros_like(x)
for i in range(len(x)):
temp_val = x[i]
x[i] = temp_val + self.h
fxh1 = f(x)
x[i] = temp_val - self.h
fxh2 = f(x)
grad[i] = (fxh1 - fxh2) / (2*self.h)
x[i] = temp_val
self.dx = grad
return self.dx
def gradient_descent(self, f, init_x, lr = 0.01, step_num = 1000):
x = init_x
for i in range(step_num):
self.dn_diff(f, x)
x -= lr * self.dx
return x