You are on page 1of 4

DHIVYA S 22D027

OPTIMIZATION
1] Find the number of experiments to be conducted in the following methods
to obtain a value of LN/L0=0.001
a) Fibonacci method
def Fibo(n):
if n < 0:
print("Incorrect input")
elif n == 0 or n == 1:
return 1
else:
return Fibo(n-1) + Fibo(n-2)
def ratio(x):
n=0
while True:
if Fibo(n) == 1/x:
return n
elif Fibo(n) > 1/x:
return n - 1
n=n+1
x = 0.001
n = ratio(x)
print(f"The number of experiments to achieve a reduction ratio of {x} is: {n}")

OUTPUT
The number of experiments to achieve a reduction ratio of 0.001 is: 15

b) Golden Section method


import math
def golden_section_search(ratio):
gamma = 1.618
n = math.floor((math.log(1/ratio) / math.log(gamma))+1)
return n
ratio = 0.001
n = golden_section_search(ratio)
print(f"Number of experiments needed for reduction ratio {ratio}: {n}")

OUTPUT
Number of experiments needed for reduction ratio 0.001: 15

2] Find the value of x in the interval (0,1) which minimizes the function f=x(x-
1.5)- 27 to within ±0.05 by (a) the golden section method and (b) the Fibonacci
method.
import math
def f(x):
return x * (x - 1.5) + 27
def golden_section(a, b, tolerance=0.05):
phi = 1.618
x1 = b - (b - a) / phi
x2 = a + (b - a) / phi
while abs(b - a) > tolerance:
if f(x1) < f(x2):
b = x2
x2 = x1
x1 = b - (b - a) / phi
else:
a = x1
x1 = x2
x2 = a + (b - a) / phi
return (a + b) / 2
def fibonacci(a, b, tolerance=0.05):
fibonacci = [1, 1]
while (b - a) / tolerance > fibonacci[-1]:
fibonacci.append(fibonacci[-1] + fibonacci[-2])
n = len(fibonacci) - 3
x1 = a + (fibonacci[n] / fibonacci[n+2]) * (b - a)
x2 = a + (fibonacci[n+1] / fibonacci[n+2]) * (b - a)
for i in range(n, -1, -1):
if f(x1) < f(x2):
b = x2
x2 = x1
x1 = a + (fibonacci[i] / fibonacci[i+2]) * (b - a)
else:
a = x1
x1 = x2
x2 = a + (fibonacci[i+1] / fibonacci[i+2]) * (b - a)
return (a + b) / 2
a=0
b=1
result_g= golden_section(a, b)
print("Golden Section Method:", result_g)
result_f= fibonacci(a, b)
print("Fibonacci Method:", result_f)

OUTPUT
Golden Section Method: 0.7467155548360327
Fibonacci Method: 0.735566021280307

3] Perform two iterations of the given methods to minimize the function f(x1,
x2)=100(X2-X1^2)^2+27+(1-X1) from the starting point (-1.2,1.0)

a)Univariant Method
import numpy as np
def f(x1, x2, N):
return 100 * (x2 - x1*2)**2 + N + (1 - x1)*2
def univariate_minimization(N, x1, x2):
df_dx1 = -400 * x1 * (x2 - x1**2) - 2 * (1 - x1)
alpha = 0.01 # Step size
x1 -= alpha * df_dx1
f_val = f(x1, x2, N)
return x1, f_val
N = 27
x1 = - 1.2
x2 = + 1.0
for i in range(2):
x1, f_val = univariate_minimization(N, x1, x2)
print(f"Iteration {i+1}: x1 = {x1:.4f}, f(x1, x2) = {f_val:.4f}")

OUTPUT
Iteration 1: x1 = 0.9560, f(x1, x2) = 110.2624
Iteration 2: x1 = 1.2860, f(x1, x2) = 273.5393

b)Gradient Method

import numpy as np
def f(x1, x2, N):
return 100 * (x2 - x1*2)**2 + N + (1 - x1)*2
def grad_f(x1, x2):
df_dx1 = -400 * x1 * (x2 - x1**2) - 2 * (1 - x1)
df_dx2 = 200 * (x2 - x1**2)
return np.array([df_dx1, df_dx2])
def gradient_descent(N, x_init, learning_rate, iterations):
x = np.array(x_init)

for i in range(iterations):
grad = grad_f(*x)
x -= learning_rate * grad

return x, f(*x, N)
N = 27
x_init = [- 1.2, 1.0]
learning_rate = 0.01
iterations = 2
x_opt, f_opt = gradient_descent(N, x_init, learning_rate, iterations)
print(f"Optimal solution: x = {x_opt}, f(x) = {f_opt}")

OUTPUT
Optimal solution: x = [0.956 1.88 ], f(x) = 27.190400000000004

c)Conjugate Gradient Method

import numpy as np
def f(x1, x2, N):
return 100 * (x2 - x1*2)**2 + N + (1 - x1)*2
def grad_f(x1, x2):
df_dx1 = -400 * x1 * (x2 - x1**2) - 2 * (1 - x1)
df_dx2 = 200 * (x2 - x1**2)
return np.array([df_dx1, df_dx2])
def conjugate_gradient(N, x_init, iterations):
x = np.array(x_init)
p = -grad_f(*x) # Initial search direction
for _ in range(iterations):
alpha = 0.01 # Step size
x_new = x + alpha * p
beta = max(0, np.dot(grad_f(*x_new), grad_f(*x_new) - grad_f(*x)) / np.dot(grad_f(*x), grad_f(*x)))
p = -grad_f(*x_new) + beta * p
x = x_new
return x, f(*x, N)
N = 27
x_init = [- 1.2,+ 1.0]
x_cg, f_cg = conjugate_gradient(N, x_init, iterations=2)
print("Conjugate Gradient Method:")
print("Optimal solution:", x_cg)
print("Minimum value of f(x1, x2):", f_cg)

OUTPUT
Conjugate Gradient Method:
Optimal solution: [0.956 1.88 ]
Minimum value of f(x1, x2): 27.190400000000004

You might also like