Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Particle Swarm Optimisation for Generating Alternatives #16

Draft
wants to merge 9 commits into
base: main
Choose a base branch
from
2 changes: 1 addition & 1 deletion .github/workflows/Test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
fail-fast: false
matrix:
version:
- "1.6"
- "1.7"
- "1"
os:
- ubuntu-latest
Expand Down
8 changes: 6 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@ authors = ["Matthijs Arnoldus <[email protected]> and contributors"]
version = "0.1.0"

[deps]
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[compat]
Distances = "0.10"
JuMP = "1"
MathOptInterface = "1"
Distances = "0.10"
julia = "1.6"
Metaheuristics = "3.3"
julia = "1.7"
5 changes: 5 additions & 0 deletions src/NearOptimalAlternatives.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,14 @@ module NearOptimalAlternatives
using JuMP
using Distances
using MathOptInterface
using Metaheuristics
using DataStructures
using Statistics

include("results.jl")
include("alternative-optimisation.jl")
include("generate-alternatives.jl")
include("alternative-metaheuristics.jl")
include("algorithms/PSOGA/PSOGA.jl")

end
233 changes: 233 additions & 0 deletions src/algorithms/PSOGA/PSOGA.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
import Metaheuristics: initialize!, update_state!, final_stage!
import Metaheuristics: AbstractParameters, gen_initial_state, Algorithm, get_position
# genetic operators
import Metaheuristics: SBX_crossover, polynomial_mutation!, create_solution, is_better
import Metaheuristics: reset_to_violated_bounds!
import Metaheuristics: velocity
include("is_better.jl")

"""
Structure holding all parameters for PSOGA (Particle Swarm Optimisation for Generating Alternatives).
"""
mutable struct PSOGA <: AbstractParameters
N::Int # Total population size
N_solutions::Int # Number of solutions sought. This is the same as the number of subpopulations searching for a solution.
C1::Float64 # Cognitive parameter. Used to compute velocity based on own best solution.
C2::Float64 # Social parameter. Used to compute velocity based on best solution in subpopulation.
ω::Float64 # Inertia parameter. Used to compute velocity to ensure not too large changes.
v::Array{Float64} # Array of velocities per individual.
flock::Array # Array of all current positions of each of the individuals.
subBest::Array # Array of best solutions per subpopulation.
maximise_total::Bool # If true, we maximise the sum of distances between a point and all centroids of other subpopulations, else we maximise the minimum distance between a point and the centroids of other subpopulations.
end

"""
PSOGA(;
N = 100,
N_solutions = 1,
C1 = 2.0,
C2 = 2.0,
ω = 0.8,
v = Float64[],
flock == Metaheuristics.xf_indiv[],
subBest = Metaheuristics.xf_indiv[],
information = Information(),
options = Options(),
)

Construct a PSOGA Metaheuristic algorithm.

# Arguments
- `N`: total population size
- `N_solutions::Int`: number of solutions sought. This is the same as the number of subpopulations searching for a solution.
- `C1::Float64`: cognitive parameter. Used to compute velocity based on own best solution.
- `C2::Float64: social parameter. Used to compute velocity based on best solution in subpopulation.
- `ω::Float64`: inertia parameter. Used to compute velocity to ensure not too large changes.
- `v::Array{Float64}`: array of velocities per individual.
- `flock::Array`: array of all current positions of each of the individuals.
- `subBest::Array`: array of best solutions per subpopulation.
- `maximise_total::Bool`: if true, we maximise the sum of distances between a point and all centroids of other subpopulations, else we maximise the minimum distance between a point and the centroids of other subpopulations.
"""
function PSOGA(;
N::Int = 100,
N_solutions::Int = 1,
C1::Float64 = 2.0,
C2::Float64 = 2.0,
ω::Float64 = 0.8,
v::Array{Float64} = Float64[],
flock::Array = Metaheuristics.xf_indiv[],
subBest::Array = Metaheuristics.xf_indiv[],
maximise_total::Bool = true,
information = Information(),
options = Options(),
)
parameters =
PSOGA(N, N_solutions, promote(Float64(C1), C2, ω)..., v, flock, subBest, maximise_total)

return Algorithm(parameters, information = information, options = options)
end

"""
initialize!(
status,
parameters::PSOGA,
problem,
information,
options,
args...;
kwargs...
)

Initialise all parameters used when solving a problem using PSOGA. Called by main loop of Metaheuristics.
"""
function initialize!(status, parameters::PSOGA, problem, information, options, args...; kwargs...)
# Get problem dimensions.
D = Metaheuristics.getdim(problem)

# Fix parameters if they don't have sizes that work
if options.f_calls_limit == 0
options.f_calls_limit = 10000 * D
options.debug && @warn("f_calls_limit increased to $(options.f_calls_limit)")
end
if options.iterations == 0
options.iterations = div(options.f_calls_limit, parameters.N) + 1
end
if mod(parameters.N, parameters.N_solutions) != 0
parameters.N += mod(parameters.N, parameters.N_solutions)
println(parameters.N)
options.debug &&
@warn("Population size increased to $(parameters.N) to ensure equal size subpopulations.")
end

# Initialise velocity and population parameters.
parameters.v = zeros(parameters.N, D)
status = gen_initial_state(problem, parameters, information, options, status)

# Initialise parameter for best values per subpopulation and populate this array with bests in initial population.
parameters.subBest = Array{Any}(undef, parameters.N_solutions)
fill!(parameters.subBest, status.population[1])
for (i, sol) in enumerate(status.population)
if Metaheuristics.is_better(
sol,
parameters.subBest[Int(1 + div(i - 1, (parameters.N / parameters.N_solutions)))],
)
parameters.subBest[Int(1 + div(i - 1, (parameters.N / parameters.N_solutions)))] = sol
end
end

# Initialise flock (set of all previous populations).
parameters.flock = status.population

return status
end

"""
update_state(
status,
parameters::PSOGA,
problem,
information,
options,
args...;
kwargs...
)

Perform one iteration of PSOGA. Called by main loop of Metaheuristics.
"""
function update_state!(
status,
parameters::PSOGA,
problem::Metaheuristics.AbstractProblem,
information::Information,
options::Options,
args...;
kwargs...,
)
# Initialise vector of new generation of individuals.
X_new = zeros(parameters.N, Metaheuristics.getdim(problem))

# Update all individuals' position by adding their velocity.
for i = 1:(parameters.N)
# Obtain the best position in the individuals subpopulation, its current position and its alltime best position.
xSPBest =
get_position(parameters.subBest[Int(1 + div(i - 1, (parameters.N / parameters.N_solutions)))])
x = get_position(parameters.flock[i])
xPBest = get_position(status.population[i])
# Generate new velocity.
parameters.v[i, :] = velocity(x, parameters.v[i, :], xPBest, xSPBest, parameters, options.rng)
# Update position and reset to its bounds if it violates any.
x += parameters.v[i, :]
reset_to_violated_bounds!(x, problem.search_space)
X_new[i, :] = x
end

# Compute the centroids of each subpopulation
centroids = ones(parameters.N_solutions, Metaheuristics.getdim(problem))
for i = 1:(parameters.N_solutions)
centroids[i, :] = Statistics.mean(
X_new[
((i - 1) * div(parameters.N, parameters.N_solutions) + 1):(i * div(
parameters.N,
parameters.N_solutions,
)),
:,
],
dims = 1,
)
end

# Update local bests (in population) and bests per subpopulation (subBest).
for (i, sol) in enumerate(Metaheuristics.create_solutions(X_new, problem; ε = options.h_tol))
if is_better_psoga(
sol,
status.population[i],
centroids,
Int(1 + div(i - 1, (parameters.N / parameters.N_solutions))),
parameters.maximise_total,
)
status.population[i] = sol
if is_better_psoga(
sol,
parameters.subBest[Int(1 + div(i - 1, (parameters.N / parameters.N_solutions)))],
centroids,
Int(1 + div(i - 1, (parameters.N / parameters.N_solutions))),
parameters.maximise_total,
)
parameters.subBest[Int(1 + div(i - 1, (parameters.N / parameters.N_solutions)))] = sol
end
end

# Update current generation.
parameters.flock[i] = sol

# Check if stop criteria are met.
Metaheuristics.stop_criteria!(status, parameters, problem, information, options)
status.stop && break
end
end

"""
final_stage(
status,
parameters::PSOGA,
problem,
information,
options,
args...;
kwargs...
)

Perform concluding operations after solving a problem using PSOGA. Called by main loop of Metaheuristics.
"""
function final_stage!(
status,
parameters::PSOGA,
problem::Metaheuristics.AbstractProblem,
information::Information,
options::Options,
args...;
kwargs...,
)
# Set end time of algorithm.
status.final_time = time()
end
59 changes: 59 additions & 0 deletions src/algorithms/PSOGA/is_better.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
"""
is_better_psoga(
A::T,
B::T,
centroids::Vector{Any},
subpop_a::Int64,
subpop_b::Int64,
maximise_total::Bool
) where {T <: Metaheuristics.xFgh_solution}

Compare two solutions of the PSOGA algorithm with respect to their distance to the optimal solution and other alternatives.

# Arguments
- `A`: solution in PSOGA to be compared.
- `B`: solution in PSOGA to be compared.
- `centroids::Vector{Any}`: vector of centroids per subpopulation. A centroid is the average point of all solutions in a subpopulation.
- `subpop::Int64`: index of the subpopulation solution A and B are in. Note that they are always in the same, since we only compare within subpopulations or with themselves.
- `maximise_total::Bool`: if true, we maximise the sum of distances between a point and all centroids of other subpopulations, else we maximise the minimum distance between a point and the centroids of other subpopulations.
"""
function is_better_psoga(
A::T,
B::T,
centroids::Matrix{Float64},
subpop::Int64,
maximise_total::Bool,
) where {T <: Metaheuristics.xFgh_solution}
A_vio = A.sum_violations
B_vio = B.sum_violations

# If either A or B violates the constraints, the one with the smaller violation is better.
if A_vio < B_vio
return true
elseif B_vio < A_vio
return false
end

# Set distances for A and B equal to negative objective value, which is equivalent to the distance between the point and the initial optimal solution.
A_dist = -A.f[1]
B_dist = -B.f[1]

# For each subpopulation compute the distance between both points and the centroid of that subpopulation.
for i in eachindex(centroids[:, 1])
# Skip the subpopulation A and B are in.
if i == subpop
continue
end
# Update distance based on whether we aim for maximising the total distance or the minimum distance.
if maximise_total
A_dist += sum((A.x[j] - centroids[i, j])^2 for j in eachindex(A.x))
B_dist += sum((B.x[j] - centroids[i, j])^2 for j in eachindex(A.x))
else
A_dist = min(A_dist, sum((A.x[j] - centroids[i, j])^2 for j in eachindex(A.x)))
B_dist = min(B_dist, sum((B.x[j] - centroids[i, j])^2 for j in eachindex(B.x)))
end
end

# If total or minimum distance of A is bigger than for B, A is better so return true.
return A_dist > B_dist
end
Loading
Loading