You are on page 1of 1

In [2]:

import numdifftools as nd
import numpy as np

In [3]:
def six_hump_camel(x):
return (x[0]**6/3) - 2.1*x[0]**4 + 4*x[1]**4 + 4*x[0]**2 - 4*x[1]**2 + x[0]*x[1]

In [4]:
grad1 = nd.Gradient(six_hump_camel)([-1,1])

In [5]:
print(grad1)

[-0.6 7. ]

In [6]:
grad1_2d = grad1[..., np.newaxis]
grad1_2d_T = np.transpose(grad1_2d)
A = grad1_2d_T @ grad1_2d
print(A)

[[49.36]]

In [7]:
from sympy import *
alfa = symbols('alfa')
x0 = (-1-alfa*(-0.6))
x1 =(1 - alfa*7)
y = [x0,x1]
f = six_hump_camel(y)

ff = diff(f, alfa)

In [8]:
ff

Out[8]: −397.52alf a − 112(1 − 7alf a)


3
+ 1.2(0.6alf a − 1)
5
− 5.04(0.6alf a − 1)
3
+ 58.8

In [9]:
def m(alfa):
return (-397.52)*alfa-112*(1-7*alfa)**3+1.2*(0.6*alfa-1)**5-5.04*(0.6*alfa-1)**3+5

In [10]:
from scipy.optimize import fsolve

In [11]:
fsolve(m,1)

Out[11]: array([0.2381632])

In [12]:
fsolve(m,0)

Out[12]: array([0.03464403])

In [13]:
#{{alfa -> 0.034644}, {alfa -> 0.155721}, {alfa -> 0.238163}}

In [14]:
# minimum positive alfa == 0.034644
alfa = 0.034644
x0_1 = (-1-alfa*(-0.6))
x1_1 =(1 - alfa*7)

In [15]:
print(x0_1)
print(x1_1)

-0.9792136
0.757492

In [16]:
grad2 = nd.Gradient(six_hump_camel)([x0_1,x1_1])

In [17]:
grad2

Out[17]: array([-0.98982097, -0.08483819])

In [18]:
grad2_2d = grad2[..., np.newaxis]
grad2_2d_T = np.transpose(grad2_2d)
B = grad2_2d_T @ grad2_2d
print(B)

[[0.98694307]]

In [19]:
from sympy import *
alfa2 = symbols('alfa2')
x0_2 = (x0_1-alfa2*(-0.98982097))
x1_2 =(x1_1 - alfa2*(-0.08483819))
yy = [x0_2,x1_2]
f1 = six_hump_camel(yy)

fff = diff(f1, alfa2)

In [20]:
fff

Out[20]: 7.94833351239181alf a2 + 0.589991192782428(0.111998793386597alf a2 + 1)


3

5
+ 1.88091815325895(alf a2 − 0.989283546902426)

3
− 8.06317132270327(alf a2 − 0.989283546902426) − 7.60137648810952

In [21]:
def mm(alfa):
return 7.94833351239181*alfa+0.589991192782428*(0.111998793386597*alfa+1)**3+1.880

In [22]:
fsolve(mm,1)

Out[22]: array([0.85627349])

In [23]:
#{{x -> -0.717592}, {x -> -0.139158}, {x -> 0.856273}}

In [24]:
alfa2 = 0.856273
x0_2 = (-0.9792136-alfa2*(-0.98982097))
x1_2 =(0.757492 - alfa2*(-0.08483819))

In [25]:
print(x0_2)
print(x1_2)

-0.13165662855519
0.83013665146587

In [26]:
grad3 = nd.Gradient(six_hump_camel)([x0_2,x1_2])

In [27]:
grad3

Out[27]: array([-0.20402614, 2.38036159])

In [28]:
grad3_2d = grad3[..., np.newaxis]
grad3_2d_T = np.transpose(grad3_2d)
C = grad3_2d_T @ grad3_2d
print(C)

[[5.70774794]]

In [29]:
import numpy as np

def gradient_descent(
gradient, start, learn_rate, n_iter=1, tolerance=1e-06
):
vector = start
for _ in range(n_iter):
diff = -learn_rate * gradient(vector)
if np.all(np.abs(diff) <= tolerance):
break
vector += diff
return vector
gradient_descent(nd.Gradient(six_hump_camel),[-1,1],0.03464403)

Out[29]: array([-0.97921358, 0.75749179])

In [30]:
six_hump_camel([-1,1])

Out[30]: 1.2333333333333334

In [31]:
six_hump_camel([-0.9792136,0.757492])

Out[31]: 0.47857155283954844

In [32]:
six_hump_camel([-0.13165662855519,0.83013665146587])

Out[32]: -0.8975124517303251

You might also like