Commit fda9a634 authored by Fredrik Bagge Carlson's avatar Fredrik Bagge Carlson
Browse files

merge dev

parents 9810392d d411efe5
......@@ -6,3 +6,17 @@ function PCA(W)
score = score*diagm(latent)
C,score,latent,W0
end
using MLKernels
function kernelPCA(X; α=1.0)
κ = GaussianKernel(α)
K = kernelmatrix(κ,X)
N = size(K)[1]
In = fill(1/N,(N,N))
K = K-In*K - K*In + In*K*In # Make sure mean is zero
(D,V) = eig(K)
Kpc = K*V
Kpc,D,V
end
module SystemIdentification
if !isdefined(:DEBUG); DEBUG = false; end
export
Model,LinearModel,NonLinearModel,
Network,
Polynom,PolynomMatrix,
TFdata,
AR,
ARX,
RBFARX,
......@@ -12,11 +13,12 @@ FitResult,
IdData,
# Functions
ar,arx,getARregressor,getARXregressor,find_na,
toeplitz, kalman
toeplitz, kalman, kalman_smoother, forward_kalman, PCA, plotmodel
## Fit Methods =================
:LS
:LS_reg
:L1
:LM
## Types =======================
......@@ -24,8 +26,8 @@ abstract Model
abstract LinearModel <: Model
abstract NonLinearModel <: Model
abstract Network <: NonLinearModel
typealias Polynom{T<:Real} Union(Array{T,1} , T)
typealias PolynomMatrix{T} Union(Array{Polynom{T},1},Polynom{T}, T)
typealias Polynom{T<:Real} Union{Array{T,1} , T}
typealias PolynomMatrix{T} Union{Array{Polynom{T},1},Polynom{T}, T}
......@@ -58,6 +60,11 @@ type ARX <: LinearModel
nb::Polynom{Int}
end
type TFdata <: LinearModel
P
F
end
type RBFARX <: Network
na::Int
n_centers::Int
......@@ -99,10 +106,16 @@ sse(x) = sum(x.^2)
fit(y,yh) = 100 * (1-rms(y-yh)./rms(y-mean(y)));
aic(x,d) = log(sse(x)) + 2d/length(x)
##
Base.show(fit::FitStatistics) = println("Fit RMS:$(fit.RMS), FIT:$(fit.FIT), AIC:$(fit.AIC)")
include("utilities")
include("transfer_functions")
include("idinput")
include("armax.jl")
include("kalman.jl")
include("PCA.jl")
include("kernelPCA.jl")
include("toeplitz.jl")
include("cuckooSearch.jl")
end
......@@ -7,19 +7,22 @@ function ar(y::Vector{Float64}, na; λ = 0, normtype=2, verbose=false)
if normtype == 2
if λ == 0
w = A\y_train
method = :LS
else
w = (A'A + λ*eye(size(A,2)))\A'y_train
method = :LS_reg
end
elseif normtype == 1
w = Variable(size(A,2),1)
problem = minimize(sum(abs(A*w-y_train )) + λ*norm(w))
solve!(problem, SCSSolver(verbose=Int(verbose)))
w = w.value[:]
method = :L1
end
prediction = A*w
error = y_train - prediction
model = AR(w,na)
result = FitResult(y_train,prediction,na, λ>0?(:LS_reg) :(:LS),λ)
result = FitResult(y_train,prediction,na, method,λ)
return model, result
end
ar(iddata::IdData, na; λ = 0) = ar(iddata.y, na; λ = 0)
......@@ -31,14 +34,17 @@ function arx(y::Vector{Float64}, u::VecOrMat{Float64}, na, nb; λ = 0, normtype=
if normtype == 2
if λ == 0
w = A\y_train
method = :LS
else
w = (A'A + λ*eye(size(A,2)))\A'y_train
method = :LS_reg
end
elseif normtype == 1
w = Variable(size(A,2),1)
problem = minimize(sum(abs(A*w-y_train )) + λ*norm(w))
solve!(problem, SCSSolver(verbose=Int(verbose)))
w = w.value[:]
method = :L1
end
prediction = A*w
error = y_train - prediction
......@@ -51,7 +57,7 @@ function arx(y::Vector{Float64}, u::VecOrMat{Float64}, na, nb; λ = 0, normtype=
si += nb[i]
end
model = ARX(w[1:na],b,na,nb)
result = FitResult(y_train,prediction,na, λ>0?(:LS_reg) :(:LS), λ)
result = FitResult(y_train,prediction,na, method, λ)
return model, result
end
arx(iddata::IdData, na, nb; λ = 0) = arx(iddata.y, iddata.u, na, nb; λ = 0)
......@@ -87,7 +93,7 @@ getARXregressor(iddata::IdData, na, nb) = getARXregressor(iddata.y,iddata.u, na,
"""Plots the RMSE and AIC for model orders up to `n`. Useful for model selection"""
function find_na(y::Vector{Float64},n::Int)
function find_na(y::Vector,n::Int)
error = zeros(n,2)
for i = 1:n
w,e = ar(y,i)
......@@ -96,6 +102,21 @@ function find_na(y::Vector{Float64},n::Int)
print(i,", ")
end
println("Done")
plotsub(error,"-o")
show()
scatter(error, show=true)
end
"""
plotmodel(y,m::AR)
Plots a signal `y` and the output of the model `m`
"""
function plotmodel(y,m::AR)
na = length(m.a)
y,A = getARregressor(y,na)
yh = A*m.a
error = y-yh
plot(y,c=:black)
plot(yh,c=:b)
plot(error,c=:r, title="Fitresult, AR, n_a: $na, RMSE = $(rms(error)) Fit = $(fit(y,yh))")
end
using Devectorize
"""
`cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)`\n
`n` = Number of nests (or different solutions)
`pa=0.25` Discovery rate of alien eggs/solutions
Change this if you want to get better results
Based on implementation by
@inproceedings{yang2009cuckoo,
title={Cuckoo search via L{\'e}vy flights},
author={Yang, Xin-She and Deb, Suash},
booktitle={Nature \& Biologically Inspired Computing, 2009. NaBIC 2009. World Congress on},
pages={210--214},
year={2009},
organization={IEEE}
}
http://www.mathworks.com/matlabcentral/fileexchange/29809-cuckoo-search--cs--algorithm
"""
function cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)
nd=size(X0,1);
X0t = X0'
Lb = Lb'
Ub = Ub'
if !all(isfinite(Lb))
Lb=X0t-0.99999*abs(X0t);
end
if !all(isfinite(Ub))
Ub=X0t+0.99999*abs(X0t);
end
# Random initial solutions
nest = zeros(n,nd)
nest[1,:] = X0
for i=2:n
nest[i,:]=Lb+(Ub-Lb).*rand(size(Lb));
end
# Get the current best
fitness=10^20*ones(n,1);
fmin,bestnest,nest,fitness=get_best_nest(f,nest,nest,fitness);
N_iter=0;
t0 = time()
## Starting iterations
while fmin>Tol && N_iter < max_iter
# Generate new solutions (but keep the current best)
new_nest=get_cuckoos(nest,bestnest,Lb,Ub);
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter
N_iter += n;
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
# Discovery and randomization
new_nest=empty_nests(nest,Lb,Ub,pa) ;
# Evaluate this set of solutions
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter again
N_iter += n;
# Find the best objective so far
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
end ## End of iterations
## Post-optimization processing
## Display all the nests
println("Total number of iterations=",N_iter);
squeeze(bestnest',2),fmin
end
## --------------- All subfunctions are list below ------------------
## Get cuckoos by ramdom walk
function get_cuckoos(nest,best,Lb,Ub)
# Levy flights
n=size(nest,1);
# Levy exponent and coefficient
# For details, see equation (2.21), Page 16 (chapter 2) of the book
# X. S. Yang, Nature-Inspired Metaheuristic Algorithms, 2nd Edition, Luniver Press, (2010).
beta=3/2;
sigma=(gamma(1+beta)*sin(pi*beta/2)/(gamma((1+beta)/2)*beta*2^((beta-1)/2)))^(1/beta);
for j=1:n
s=nest[j,:];
# This is a simple way of implementing Levy flights
# For standard random walks, use step=1;
## Levy flights by Mantegna’s algorithm
u=randn(size(s))*sigma;
v=randn(size(s));
betai = 1/beta
@devec step=u./abs(v).^betai;
# In the next equation, the difference factor (s-best) means that
# when the solution is the best solution, it remains unchanged.
stepsize=0.01*step.*(s-best);
# Here the factor 0.01 comes from the fact that L/100 should the typical
# step size of walks/flights where L is the typical lenghtscale;
# otherwise, Levy flights may become too aggresive/efficient,
# which makes new solutions (even) jump out side of the design domain
# (and thus wasting evaluations).
# Now the actual random walks or flights
s=s+stepsize.*randn(size(s));
# Apply simple bounds/limits
nest[j,:]=simplebounds(s,Lb,Ub);
end
nest
end
## Find the current best nest
function get_best_nest(f,nest,newnest,fitness)
# Evaluating all new solutions
for j=1:size(nest,1)
fnew=f(squeeze(newnest[j,:]',2));
if fnew<=fitness[j]
fitness[j]=fnew;
nest[j,:]=newnest[j,:];
end
end
# Find the current best
(fmin,K) = findmin(fitness) ;
best=nest[K,:];
fmin,best,nest,fitness
end
## Replace some nests by constructing new solutions/nests
function empty_nests(nest,Lb,Ub,pa)
# A fraction of worse nests are discovered with a probability pa
n=size(nest,1);
# Discovered or not -- a status vector
K=rand(size(nest)).>pa;
# In the real world, if a cuckoo’s egg is very similar to a host’s eggs, then
# this cuckoo’s egg is less likely to be discovered, thus the fitness should
# be related to the difference in solutions. Therefore, it is a good idea
# to do a random walk in a biased way with some random step sizes.
## New solution by biased/selective random walks
stepsize=rand()*(nest[randperm(n),:]-nest[randperm(n),:]);
new_nest=nest+stepsize.*K;
end
# Application of simple constraints
function simplebounds(s,Lb,Ub)
# Apply the lower bound
ns_tmp=s;
I=ns_tmp.<Lb;
ns_tmp[I]=Lb[I];
# Apply the upper bounds
J=ns_tmp.>Ub;
ns_tmp[J]=Ub[J];
# Update this new move
s=ns_tmp;
end
# # ## You can replace the following by your own functions
# # # A d-dimensional objective function
# # function fobj(u)
# # ## d-dimensional sphere function sum_j=1^d (u_j-1)^2.
# # # with a minimum at (1,1, ...., 1);
# # z=sum((u-1).^2);
# # end
# # dims = 10
# # cuckoo_search(fobj,zeros(dims),-10*ones(dims),10*ones(dims))
using Devectorize
"""
`cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)`\n
`n` = Number of nests (or different solutions)
`pa=0.25` Discovery rate of alien eggs/solutions
Change this if you want to get better results
Based on implementation by
@inproceedings{yang2009cuckoo,
title={Cuckoo search via L{\'e}vy flights},
author={Yang, Xin-She and Deb, Suash},
booktitle={Nature \& Biologically Inspired Computing, 2009. NaBIC 2009. World Congress on},
pages={210--214},
year={2009},
organization={IEEE}
}
http://www.mathworks.com/matlabcentral/fileexchange/29809-cuckoo-search--cs--algorithm
"""
function cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)
nd=size(X0,1);
X0t = X0'
Lb = Lb'
Ub = Ub'
if !all(isfinite(Lb))
Lb=X0t-0.99999*abs(X0t);
end
if !all(isfinite(Ub))
Ub=X0t+0.99999*abs(X0t);
end
# Random initial solutions
nest = zeros(n,nd)
nest[1,:] = X0
for i=2:n
nest[i,:]=Lb+(Ub-Lb).*rand(size(Lb));
end
# Get the current best
fitness=10^20*ones(n,1);
fmin,bestnest,nest,fitness=get_best_nest(f,nest,nest,fitness);
N_iter=0;
t0 = time()
## Starting iterations
while fmin>Tol && N_iter < max_iter
# Generate new solutions (but keep the current best)
new_nest=get_cuckoos(nest,bestnest,Lb,Ub);
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter
N_iter += n;
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
# Discovery and randomization
new_nest=empty_nests(nest,Lb,Ub,pa) ;
# Evaluate this set of solutions
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter again
N_iter += n;
# Find the best objective so far
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
end ## End of iterations
## Post-optimization processing
## Display all the nests
println("Total number of iterations=",N_iter);
println("f(bestnest) = $(fmin)")
squeeze(bestnest',2),fmin
end
## --------------- All subfunctions are list below ------------------
## Get cuckoos by ramdom walk
function get_cuckoos(nest,best,Lb,Ub)
# Levy flights
n=size(nest,1);
# Levy exponent and coefficient
# For details, see equation (2.21), Page 16 (chapter 2) of the book
# X. S. Yang, Nature-Inspired Metaheuristic Algorithms, 2nd Edition, Luniver Press, (2010).
beta=3/2;
sigma=(gamma(1+beta)*sin(pi*beta/2)/(gamma((1+beta)/2)*beta*2^((beta-1)/2)))^(1/beta);
for j=1:n
s=nest[j,:];
# This is a simple way of implementing Levy flights
# For standard random walks, use step=1;
## Levy flights by Mantegna’s algorithm
u=randn(size(s))*sigma;
v=randn(size(s));
betai = 1/beta
@devec step=u./abs(v).^betai;
# In the next equation, the difference factor (s-best) means that
# when the solution is the best solution, it remains unchanged.
stepsize=0.01*step.*(s-best);
# Here the factor 0.01 comes from the fact that L/100 should the typical
# step size of walks/flights where L is the typical lenghtscale;
# otherwise, Levy flights may become too aggresive/efficient,
# which makes new solutions (even) jump out side of the design domain
# (and thus wasting evaluations).
# Now the actual random walks or flights
s=s+stepsize.*randn(size(s));
# Apply simple bounds/limits
nest[j,:]=simplebounds(s,Lb,Ub);
end
nest
end
## Find the current best nest
function get_best_nest(f,nest,newnest,fitness)
# Evaluating all new solutions
for j=1:size(nest,1)
fnew=f(squeeze(newnest[j,:]',2));
if fnew<=fitness[j]
fitness[j]=fnew;
nest[j,:]=newnest[j,:];
end
end
# Find the current best
(fmin,K) = findmin(fitness) ;
best=nest[K,:];
fmin,best,nest,fitness
end
## Replace some nests by constructing new solutions/nests
function empty_nests(nest,Lb,Ub,pa)
# A fraction of worse nests are discovered with a probability pa
n=size(nest,1);
# Discovered or not -- a status vector
K=rand(size(nest)).>pa;
# In the real world, if a cuckoo’s egg is very similar to a host’s eggs, then
# this cuckoo’s egg is less likely to be discovered, thus the fitness should
# be related to the difference in solutions. Therefore, it is a good idea
# to do a random walk in a biased way with some random step sizes.
## New solution by biased/selective random walks
stepsize=rand()*(nest[randperm(n),:]-nest[randperm(n),:]);
new_nest=nest+stepsize.*K;
for j = 1:size(nest,1)
new_nest[j,:]=simplebounds(new_nest[j,:],Lb,Ub);
end
return new_nest
end
# Application of simple constraints
function simplebounds(s,Lb,Ub)
# Apply the lower bound
I = s.<Lb;
s[I] = Lb[I];
# Apply the upper bounds
J = s.>Ub;
s[J] = Ub[J];
return s
end
using Devectorize
"""
`cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)`\n
<<<<<<< HEAD
=======
>>>>>>> dev
`n` = Number of nests (or different solutions)
`pa=0.25` Discovery rate of alien eggs/solutions
Change this if you want to get better results
Based on implementation by
@inproceedings{yang2009cuckoo,
title={Cuckoo search via L{\'e}vy flights},
author={Yang, Xin-She and Deb, Suash},
booktitle={Nature \& Biologically Inspired Computing, 2009. NaBIC 2009. World Congress on},
pages={210--214},
year={2009},
organization={IEEE}
}
http://www.mathworks.com/matlabcentral/fileexchange/29809-cuckoo-search--cs--algorithm
"""
<<<<<<< HEAD
function cuckoo_search(f,X0;Lb=-convert(Float64,Inf),Ub=convert(Float64,Inf),n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e5, timeout = Inf)
=======
function cuckoo_search(f,X0, Lb,Ub;n=25,pa=0.25, Tol=1.0e-5, max_iter = 1e3, timeout = Inf)
X00 = deepcopy(X0)
>>>>>>> dev
nd=size(X0,1);
X0t = X0'
Lb = Lb'
Ub = Ub'
if !all(isfinite(Lb))
Lb=X0t-0.99999*abs(X0t);
end
if !all(isfinite(Ub))
Ub=X0t+0.99999*abs(X0t);
end
# Random initial solutions
nest = zeros(n,nd)
<<<<<<< HEAD
nest[1,:] = X0
for i=2:n
nest[i,:]=Lb+(Ub-Lb).*rand(size(Lb));
end
# Get the current best
fitness=10^20*ones(n,1);
fmin,bestnest,nest,fitness=get_best_nest(f,nest,nest,fitness);
=======
nest[1,:] = X0t
for i=2:n
nest[i,:]=Lb+(Ub-Lb).*rand(size(Lb));
DEBUG && @assert !any(nest[i,:] .> Ub)
DEBUG && @assert !any(nest[i,:] .< Lb)
end
# Get the current best
fitness=10^20*ones(n,1);
fmin,bestnest,nest,fitness=get_best_nest(f,nest,nest,fitness);
DEBUG && println("f(X0) = $(f(X00)), f(bestnest) = $(fmin)")
DEBUG && @assert X00 == X0
>>>>>>> dev
N_iter=0;
t0 = time()
## Starting iterations
while fmin>Tol && N_iter < max_iter
# Generate new solutions (but keep the current best)
new_nest=get_cuckoos(nest,bestnest,Lb,Ub);
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter
N_iter += n;
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
# Discovery and randomization
new_nest=empty_nests(nest,Lb,Ub,pa) ;
# Evaluate this set of solutions
fnew,best,nest,fitness=get_best_nest(f,nest,new_nest,fitness);
# Update the counter again
N_iter += n;
# Find the best objective so far
if fnew<fmin
fmin=fnew;
bestnest=best;
end
if time()-t0 > timeout
display("Cuckoo search: timeout $(timeout)s reached ($(time()-t0)s)")
break
end
end ## End of iterations
## Post-optimization processing
## Display all the nests
println("Total number of iterations=",N_iter);
<<<<<<< HEAD
=======
println("f(bestnest) = $(fmin)")
>>>>>>> dev
squeeze(bestnest',2),fmin
end
## --------------- All subfunctions are list below ------------------
## Get cuckoos by ramdom walk
function get_cuckoos(nest,best,Lb,Ub)
# Levy flights
n=size(nest,1);
# Levy exponent and coefficient
# For details, see equation (2.21), Page 16 (chapter 2) of the book
# X. S. Yang, Nature-Inspired Metaheuristic Algorithms, 2nd Edition, Luniver Press, (2010).
beta=3/2;
sigma=(gamma(1+beta)*sin(pi*beta/2)/(gamma((1+beta)/2)*beta*2^((beta-1)/2)))^(1/beta);
for j=1:n
s=nest[j,:];
# This is a simple way of implementing Levy flights
# For standard random walks, use step=1;
## Levy flights by Mantegna’s algorithm