Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds diferential evolution algorithm #368

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions algorithms/de_algorithm/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# de-algorithm
Implementation of an algorithm of evolutionary computation called differential evolution (DE) for optimizes a problem by iteratively trying to improve a candidate solution with regard to a given measure of quality.
98 changes: 98 additions & 0 deletions algorithms/de_algorithm/de.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import random
import copy

from parameters import (
iterations_number, num_of_individuos, probability_of_recombination,
dimensions
)
from functions import AFunction
from selections import ASelection


class DifferentialEvolution():
function = AFunction
floating_scale_factor = False
scale_factor = 0.5
selection = ASelection

def __init__(self, function, type_scale_factor, selection):
self.function = function
self.floating_scale_factor = type_scale_factor
self.selection = selection

if type_scale_factor:
self.scale_factor = 0.9

def differential_evolution(self):
population = []
best = 0

population = self.initializa_population()

for j in range(0, iterations_number):
personal_best = []
for i in range(0, len(population)):
fitness = self.function.calculate_fitness(population[i])
experimental_vector = self.create_trial_vector(population, population[i])
new_individuo = self.create_offspring(population[i], experimental_vector)
new_fitness = self.function.calculate_fitness(new_individuo)
if new_fitness < fitness:
personal_best.append(new_individuo)
best = new_fitness
else:
personal_best.append(population[i])
best = fitness

if self.floating_scale_factor:
self.update_parameters()

population = personal_best
print(best)
return best

def update_parameters(self):
self.scale_factor -= (0.9 - 0.4) / 10000

def create_offspring(self, individuo, experimental_vector):
new_individuo = []
for i in range(0, len(individuo)):
limiar = random.random()
if limiar < probability_of_recombination:
new_individuo.insert(i, experimental_vector[i])
else:
new_individuo.insert(i, individuo[i])
return new_individuo

def create_trial_vector(self, population, individuo):
new_pop = copy.deepcopy(population)
new_pop.remove(individuo)

destiny = self.selection.selection_target_vector(new_pop, self.function, individuo)
new_pop.remove(destiny)

vect1 = new_pop[random.randint(0, len(new_pop) - 1)]
new_pop.remove(vect1)

vect2 = new_pop[random.randint(0, len(new_pop) - 1)]
new_pop.remove(vect2)

u = []
for i in range(0, len(destiny)):
u.append(destiny[i] + self.scale_factor * (vect1[i] - vect2[i]))

return u

def initializa_population(self):
population = []

for j in range(0, num_of_individuos):
x1 = []
for i in range(0, dimensions):
x1.append(
self.function.lower_bound + random.random() * (
self.function.upper_bound - self.function.lower_bound
)
)
population.insert(0, x1)

return population
13 changes: 13 additions & 0 deletions algorithms/de_algorithm/enums.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from enum import Enum


class EnumSelection(Enum):
BEST = 1
RAND = 2
RAND_TO_BEST = 3
CURRENT_TO_BEST = 4


class EnumCrossover(Enum):
BINOMIAL = 1
EXPONENCIAL = 2
53 changes: 53 additions & 0 deletions algorithms/de_algorithm/functions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import math

from abc import ABCMeta, abstractmethod


class AFunction:
__metaclass__ = ABCMeta

upper_bound = 100
lower_bound = -100

@abstractmethod
def calculate_fitness(self, position):
pass


class Sphere(AFunction):

def __init__(self):
AFunction.upper_bound = 100
AFunction.lower_bound = -100

def calculate_fitness(self, position_list):
solution = 0
for position in position_list:
solution += position ** 2
return solution


class Rastrigin(AFunction):

def __init__(self):
AFunction.upper_bound = 5.12
AFunction.lower_bound = -5.12

def calculate_fitness(self, position_list):
solution = 0
for position in position_list:
solution += (position ** 2 + 10 - 10 * math.cos(2 * math.pi * position))
return solution


class Rosenbrocks(AFunction):

def __init__(self):
AFunction.upper_bound = 30
AFunction.lower_bound = -30

def calculate_fitness(self, position_list):
solution = 0
for i in range(0, len(position_list) - 1):
solution += (100 * ((position_list[i] ** 2 - position_list[i + 1]) ** 2) + ((position_list[i] - 1) ** 2))
return solution
11 changes: 11 additions & 0 deletions algorithms/de_algorithm/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from functions import Sphere
from de import DifferentialEvolution
from selections import Rand


def main():
d = DifferentialEvolution(Sphere(), False, Rand())
d.differential_evolution()


main()
4 changes: 4 additions & 0 deletions algorithms/de_algorithm/parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
iterations_number = 10000
num_of_individuos = 30
probability_of_recombination = 0.6
dimensions = 30
65 changes: 65 additions & 0 deletions algorithms/de_algorithm/selections.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import random

from abc import ABCMeta, abstractmethod
from enums import EnumSelection
from parameters import dimensions


class ASelection:
__metaclass__ = ABCMeta

def __init__(self, enum_selection):
self.enum_selection = enum_selection

@abstractmethod
def selection_target_vector(self, population, function, individuo):
pass


class Best(ASelection):

def __init__(self):
ASelection.enum_selection = EnumSelection.BEST

def selection_target_vector(self, population, function, individuo):
best = 100000
best_individuo = []

for i in population:
if function.calculate_fitness(i) < best:
best_individuo = i
return best_individuo


class Rand(ASelection):

def __init__(self):
ASelection.enum_selection = EnumSelection.RAND

def selection_target_vector(self, population, function, individuo):
return population[random.randint(0, len(population) - 1)]


class RandToBest(ASelection):

def __init__(self):
ASelection.enum_selection = EnumSelection.RAND_TO_BEST

def selection_target_vector(self, population, function, individuo):
rand = Rand().selection_target_vector(population)
best = Best().selection_target_vector(population, function)
limiar = random.randint(0, dimensions - 1)
target_vector = rand[0:limiar] + best[limiar:]
return target_vector


class CurrentToBest(ASelection):

def __init__(self):
ASelection.enum_selection = EnumSelection.CURRENT_TO_BEST

def selection_target_vector(self, population, function, individuo):
rand = Rand().selection_target_vector(population)
limiar = random.randint(0, dimensions - 1)
target_vector = rand[0:limiar] + individuo[limiar:]
return target_vector