You are on page 1of 8

Experiment- 2

AIM:
To Implement Cuckoo Search (CS) Algorithm.
Introduction:
Cuckoo search is an optimization algorithm developed by Xin-she Yang
and Suash Deb in 2009. It was inspired by the obligate brood parasitism of
some cuckoo species by laying their eggs in the nests of other host birds
(of other species). Some host birds can engage direct conflict with the
intruding cuckoos. For example, if a host bird discovers the eggs are not
their own, it will either throw these alien eggs away or simply abandon its
nest and build a new nest elsewhere. Some cuckoo species such as the
New World brood- parasitic Tapera have evolved in such a way that
female parasitic cuckoos are often very specialized in the mimicry in
colors and pattern of the eggs of a few chosen host species. Cuckoo
search idealized such breeding behavior, and thus can be applied for
various optimization problems.
Cuckoo search (CS) uses the following representations:

Each egg in a nest represents a solution, and a cuckoo egg represents a


new solution. The aim is to use the new and potentially better solutions
(cuckoos) to replace a not-so-good solution in the nests. In the simplest
form, each nest has one egg. The algorithm can be extended to more
complicated cases in which each nest has multiple eggs representing a set
of solutions.

CS is based on three idealized rules:


1. Each cuckoo lays one egg at a time, and dumps its egg in a randomly
chosen nest;
2. The best nests with high quality of eggs will carry over to the next
generation;
3. The number of available hosts nests is fixed, and the egg laid by a
cuckoo is discovered by the host bird with a probability p​a ​∈ (0,1)​.
Discovering operate on some set of worst nests, and discovered
solutions dumped from farther calculations.
In addition, Yang and Deb discovered that the random-walk style search is
better performed by Lévy flights rather than simple random walk.
Code:
config.py
class Config:
PopulationSize = 50 # Population Size
MaxDomain = 500 # variable upper limit
MinDomain = -500 # variable lower limit
Lambda = 1.5 # parameter for Levy flight
Pa = 0.25
Step_Size = 0.01
Dimension = 10 # The number of dimension
Trial = 11
Iteration = 1001

@classmethod
def
get_population_size(cls)
: return cls.
PopulationSize

@classmethod
def
get_Pa(cls):
return cls. Pa

@classmethod
def
get_iteration(cls):
return cls.
Iteration

@classmethod
def
get_trial(cls):
return cls. Trial

@classmethod
def
get_dimension(cls):
return cls.
Dimension

@classmethod
def get_max_domain(cls):
return cls.
MaxDomain
@classmethod
def set_max_domain(cls, _max_domain):
cls. MaxDomain = _max_domain

@classmethod
def
get_min_domain(cls):
return cls.
MinDomain

@classmethod
def set_min_domain(cls,
_min_domain): cls. MinDomain =
_min_domain

@classmethod
def get_lambda(cls):
return cls.
Lambda

@classmethod
def set_lambda(cls,
_lambda): cls. Lambda =
_lambda

@classmethod
def
get_stepsize(cls):
return cls.
Step_Size

function.py
import numpy as np

"""
[Reference]
https://​www.sfu.ca/~ssurjano/index.htm
l​ """
def calculation(array, t):#as you
want fitness = schwefel(array)
return fitness
"""Benchmark Functions""" def eggholder(array):
z = - (array[1] + 47) * np.sin(np.sqrt(abs(array[1] +
(array[0]/2) +47))) -
array[0] *np.sin(np.sqrt(abs(array[0] -
(array[1]+47)))) return z

def
sphere(array):
fitness = 0
for i in range(len(array)):
fitness = fitness +
array[i]**2
return fitness
def
rastrigin(array
): sum = 0
fitness = 0
for x in
array:
sum = sum + x**2 - 10 * np.cos(2 * np.pi
* x) fitness = 10.0 * len(array) + sum
return fitness

def
schwefel(array):
sum = 0
fitness = 0
for x in
array:
sum = sum + x *
np.sin(np.sqrt(np.abs(x))) fitness =
418.9829 * len(array) - sum
return fitness
def michalewicz(array):#for the number of Dimension
is 2 sum = 0
fitness = 0
m = 10
for (i,x) in enumerate(array, start=1):
sum = sum + np.sin(x) * np.sin((i * (x**2)
)/np.pi)**(2*m) fitness = -sum
return fitness
if name == '__main ': a = np.array([2.20,1.0]) print
(michalewicz(a))

individual.py
import numpy as np
import math
from config import Config
as cf import function as fn

def levy_flight(Lambda):
#generate step from levy distribution
sigma1 = np.power((math.gamma(1 + Lambda) * np.sin((np.pi
* Lambda) / 2)) \
/ math.gamma((1 + Lambda) / 2) * np.power(2, (Lambda -
1) / 2), 1 / Lambda)
sigma2 = 1

u = np.random.normal(0, sigma1,
size=cf.get_dimension()) v = np.random.normal(0,
sigma2, size=cf.get_dimension()) step = u /
np.power(np.fabs(v), 1 / Lambda)

return step # return np.array (ex. [ 1.37861233


-1.49481199 1.38124823])
class Individual:
def __init (self):
self. position = np.random.rand(cf.get_dimension()) *
(cf.get_max_domain() - cf.get_min_domain()) +
cf.get_min_domain()
self. fitness = fn.calculation(self.
position,0) # iteration = 0

def
get_position(self):
return self.
position

def set_position(self,
position): self.
position = position
def get_fitness(self):
return self. fitness

def set_fitness(self,
fitness): self.
fitness = fitness

def abandon(self):
# abandon some variables
for i in range(len(self.
position)): p =
np.random.rand()
if p < cf.get_Pa():
self. position[i] = np.random.rand() *
(cf.get_max_domain() - cf.get_min_domain()) +
cf.get_min_domain()

def get_cuckoo(self):
step_size = cf.get_stepsize() *
levy_flight(cf.get_lambda()) # Update position
self. position = self. position + step_size

# Simple Boundary Rule


for i in range(len(self. position)):
if self. position[i] >
cf.get_max_domain(): self.
position[i] = cf.get_max_domain()
if self. position[i] <
cf.get_min_domain(): self.
position[i] = cf.get_min_domain()

def print_info(self,i):
print("id:","{0:3d}".format(i),
"|| fitness:",str(self. fitness).rjust(14," "),
"|| position:",np.round(self. position,decimals=4))
if name == '__main ':
print(levy_flight(cf.get_lambd
a()))
cs.py
import numpy as np
import individual as
id import function
as fn import sys
import os
import csv
from config import Config as cf

if
os.path.exists("resul
ts"): pass
else:
os.mkdir("results")

results = open("results" + os.sep + "results.csv",


"w") results_writer = csv.writer(results,
lineterminator="\n")

def main():
for trial in
range(cf.get_trial()):
np.random.seed(trial)

results_list = [] #
fitness list cs_list =
[]
"""Generate Initial Population"""
for p in
range(cf.get_population_size()):
cs_list.append(id.Individual())

"""Sort List"""
cs_list = sorted(cs_list, key=lambda ID: ID.get_fitness())

"""Find Initial Best"""


BestPosition = cs_list[0].get_position()
BestFitness = fn.calculation(cs_list[0].get_position(),0)

"""↓↓↓Main Loop↓↓↓"""
for iteration in range(cf.get_iteration()):
"""Generate New
Solutions""" for i in
range(len(cs_list)):
cs_list[i].get_cuckoo()

cs_list[i].set_fitness(fn.calculation(cs_list[i].get_position(),ite

ration))

"""random choice (say j)"""

j = np.random.randint(low=0,
high=cf.get_population_size()) while j == i: #random
id[say j] ≠ i
j = np.random.randint(0, cf.get_population_size())

# for minimize problem


if(cs_list[i].get_fitness() <
cs_list[j].get_fitness()):
cs_list[j].set_position(cs_list[i].get_po
sition())
cs_list[j].set_fitness(cs_list[i].get_fit
ness())

"""Sort (to Keep Best)"""


cs_list = sorted(cs_list, key=lambda ID: ID.get_fitness())

"""Abandon Solutions (exclude the


best)""" for a in
range(1,len(cs_list)):
r =
np.random.rand()
if(r <
cf.get_Pa()):
cs_list[a].abandon()

cs_list[a].set_fitness(fn.calculation(cs_list[a].get_position(),ite

ration))

"""Sort to Find the Best"""


cs_list = sorted(cs_list, key=lambda ID: ID.get_fitness())

if cs_list[0].get_fitness() <
BestFitness: BestFitness =
cs_list[0].get_fitness()
BestPosition =
cs_list[0].get_position()
sys.stdout.write("\r Trial:%3d , Iteration:%7d,
BestFitness:%.4f" % (trial , iteration, BestFitness))

results_list.append(str(BestFitness))

results_writer.writerow(results_li
st) sys.stdout.write("\n")

if name == '__main
': main()
results.close()

Output:

Finding and Learnings:


An important advantage of this algorithm is its simplicity. In fact,
comparing with other population- or agent-based metaheuristic
algorithms such as particle swarm optimization and harmony search,
there is essentially only a single parameter ​pa​ ​ ​in Cuckoo Search (apart
from the population size ​n)​ . Therefore, it is very easy to implement.

You might also like