From 608645fdefec7f43cf9c594f26d14459cba9339d Mon Sep 17 00:00:00 2001 From: Fredrik Bagge Carlsson <cont-frb@ulund.org> Date: Mon, 7 Sep 2015 09:50:19 +0200 Subject: [PATCH] add dev rbfmodels --- .gitignore | 8 + src/rbfmodels/getPsiVec.jl | 44 ++++ src/rbfmodels/getPsiVecNd.jl | 97 +++++++++ src/rbfmodels/testJacobian.jl | 20 ++ src/rbfmodels/testRBF.jl | 27 +++ src/rbfmodels/trainRBF.jl | 179 +++++++++++++++++ src/rbfmodels/trainRBF_ARX.jl | 364 ++++++++++++++++++++++++++++++++++ 7 files changed, 739 insertions(+) create mode 100644 .gitignore create mode 100644 src/rbfmodels/getPsiVec.jl create mode 100644 src/rbfmodels/getPsiVecNd.jl create mode 100644 src/rbfmodels/testJacobian.jl create mode 100644 src/rbfmodels/testRBF.jl create mode 100644 src/rbfmodels/trainRBF.jl create mode 100644 src/rbfmodels/trainRBF_ARX.jl diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d3ea8a8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +*.mat +*.pdf +*.log +*.aux +*.gz +*.tex +*.m~ +*.m diff --git a/src/rbfmodels/getPsiVec.jl b/src/rbfmodels/getPsiVec.jl new file mode 100644 index 0000000..29790da --- /dev/null +++ b/src/rbfmodels/getPsiVec.jl @@ -0,0 +1,44 @@ +function getΨVec!(Ψ,σvec,phi,centers,w=0;period = 0, normalized=false) + + if period > 0 + psi(Phi,c,σ) = exp((cos((Phi-c)*period/(2*pi))-1)/σ); + else + function psi!(ψ,j,Phi,c,σ) + @simd for i = 1:size(ψ,1) + ψ[i,j] = exp(-(c-Phi[i])^2*σ)[1] + end + end + end + + if w == 0 + w = zeros(size(Ψ,2)) + end + σvec = σvec[:] + centers = centers[:] + Nsignals = 1 + (Nbasis, ) = size(centers); + ∇ = zeros(size(Ψ,1),size(centers,1)*2) + if normalized + for i = 1:Nbasis + psi!(Ψ,i,phi,centers[i],σvec[i]); + @devec Ψ[:,i] = Ψ[:,i]./sum(Ψ[:,i]) + ∇[:,i] = -Ψ[:,i].*2.*(centers[i] - phi).*σvec[i] + ∇[:,i+Nbasis] = -Ψ[:,i].*(centers[i] - phi).^2 + end + else + + for i = 1:Nbasis + psi!(Ψ,i, phi,centers[i],σvec[i]); + ∇[:,i] = -Ψ[:,i].*2.*(centers[i] - phi).*σvec[i] + ∇[:,i+Nbasis] = -Ψ[:,i].*(centers[i] - phi).^2 + end + + end + + + return ∇ + # if normalized + # Ψ = Ψ./repmat(sum(Ψ,2),1,size(Ψ,2)) + # end + +end diff --git a/src/rbfmodels/getPsiVecNd.jl b/src/rbfmodels/getPsiVecNd.jl new file mode 100644 index 0000000..fca0674 --- /dev/null +++ b/src/rbfmodels/getPsiVecNd.jl @@ -0,0 +1,97 @@ +function getΨVecNd!(Ψ, phi , params, w=0; period = 0, normalized=true) + + Nbasis = size(Ψ,2) + Nsignals = size(phi,2) + + if period > 0 + psi(Phi,c,σ) = exp((cos((c-Phi)*period/(2*pi))-1)/σ); + else + if Nsignals >= 1 + function psi(Phi,c,σ) + # Denna jävla funktionen allokerar en massa minne! + ret = exp(- sum(((c-Phi).^2).*σ) ) + end + else + function psi(Phi,c,σ) + # Denna jävla funktionen allokerar en massa minne! + ret = exp(- ((Phi-c)^2*(1/σ))[1,1] ) + end + end + end + + if w == 0 + w = zeros(Nbasis) + end + + + # figure(); pp= imagesc(Centers); display(pp); + # figure(); pp= imagesc(Σ); display(pp); + + Σoffset = convert(Int64,size(params,1)/2); + N = size(phi,1) + ∇ = zeros(N,size(params,1)) + if normalized + s = 1e-10*ones(size(phi,1)) + i1 = 1 + for i = 1:Nbasis + for j = 1:size(phi,1) + Ψ[j,i] = psi(phi[j,:]',params[i1:(i1+Nsignals-1)],params[(Σoffset+i1):(Σoffset+i1+Nsignals-1)]) + s[j] += Ψ[j,i] + end + i1 += Nsignals + end + for j = 1:size(phi,1) + @devec Ψ[j,:] = Ψ[j,:]./s[j] + end + i1 = 1 + for i = 1:Nbasis + for j = 1:N + c = params[i1:(i1+Nsignals-1)] + s = params[(Σoffset+i1):(Σoffset+i1+Nsignals-1)] + x = phi[j,:]' + ∇[j,i1:(i1+Nsignals-1)] = (Ψ[j,i]*w[i]).*(2*(x-c).*s) + ∇[j,(Σoffset+i1):(Σoffset+i1+Nsignals-1)] = -(Ψ[j,i]*w[i]).*((c-x).^2 ) + end + i1 += Nsignals + end + + else + i1 = 1 + + + for i = 1:Nbasis + for j = 1:N + c = params[i1:(i1+Nsignals-1)] + s = params[(Σoffset+i1):(Σoffset+i1+Nsignals-1)] + x = phi[j,:]' + Ψ[j,i] = psi(x,c,s)[1] + ∇[j,i1:(i1+Nsignals-1)] = (Ψ[j,i]*w[i]).*(2*(x-c).*s) + ∇[j,(Σoffset+i1):(Σoffset+i1+Nsignals-1)] = -(Ψ[j,i]*w[i]).*((c-x).^2 ) + + end + i1 += Nsignals + end + + end + return ∇ +end + + +function getCenters(centers,σvec) + + (nbasis,Nsignals) = size(centers) + Nbasis::Int64 = nbasis^Nsignals + Centers = zeros(Nsignals, Nbasis) + Σ = zeros(Nsignals, Nbasis) + v = Nbasis + h = 1 + for i = 1:Nsignals + v = convert(Int64,v / nbasis) + Centers[i,:] = vec(repmat(centers[:,i]',v,h))' + Σ[i,:] = vec(repmat(σvec[:,i]',v,h))' + h *= nbasis + end + + [vec(Centers), vec(Σ)] + +end diff --git a/src/rbfmodels/testJacobian.jl b/src/rbfmodels/testJacobian.jl new file mode 100644 index 0000000..1a39602 --- /dev/null +++ b/src/rbfmodels/testJacobian.jl @@ -0,0 +1,20 @@ +N = 100 +m = 2 + +w = randn(m,1) +A = randn(N,m) +b = A*w + 0.1randn(N) +plot([b A*w]) + +f(w) = A*w-b +g(w) = A + +X0 = zeros(2) +@time res = levenberg_marquardt(f, g, X0, + maxIter = 200, + tolG = 1e-7, + tolX = 1e-9, + show_trace=true) +@show w +@show wh = res.minimum +we = norm(w-wh) diff --git a/src/rbfmodels/testRBF.jl b/src/rbfmodels/testRBF.jl new file mode 100644 index 0000000..1920231 --- /dev/null +++ b/src/rbfmodels/testRBF.jl @@ -0,0 +1,27 @@ +Pkg.pin("Winston", v"0.11.0") +using Winston +using Optim +using Debug +N = 150; +k = [1:N]; + +x = linspace(-2,2,N) +y = linspace(-pi,pi,N) +z = x.^2 + sign(y) + 0.1randn(N) + + + +Nbasis = 3; +σ = 1./[2 2]; +# dbstop in learnRBF at 16 + +(w,params, Psi) = trainRBF(z, Nbasis, σ, [x y], nonlin=true, liniters=50, ND=true, λ=1e-2) + + +rms(e) = sqrt(mean(e.^2)) +mae(e) = mean(abs(e)) + +@show rms(z) +@show rms(z-Psi*w) + + diff --git a/src/rbfmodels/trainRBF.jl b/src/rbfmodels/trainRBF.jl new file mode 100644 index 0000000..02a2260 --- /dev/null +++ b/src/rbfmodels/trainRBF.jl @@ -0,0 +1,179 @@ +using Optim +using Devectorize +import ASCIIPlots.scatterplot +function trainRBF(b, Nbasis, σ, ϕtrain; nonlin=false, liniters=20,nonliniters=5, λ=1e-8, ND=false, normalized=false) + limits = [minimum(ϕtrain,1)' maximum(ϕtrain,1)'] + Nseries = size(ϕtrain,2); + NBASIS::Int64 = Nbasis^Nseries + if Nseries == 1 + ND = false; + end + Ψ = Array{Float64,2} + if ND + Ψ = zeros(size(ϕtrain,1),NBASIS) + else + Ψ = zeros(size(ϕtrain,1),Nbasis); + end + assert(length(σ) == Nseries) + σvec = zeros(Nbasis,Nseries) + centers = zeros(Nbasis,Nseries) + for s = 1:Nseries + l = limits[s,2]-limits[s,1]; + d = l/Nbasis; + lc = [(d/2):d:l]+limits[s,1] + centers[:,s] = lc; + σvec[:,s] = σ[s].*ones(Nbasis); + end + + # params = Array{Float64,1} + + if ND + params = getCenters(centers,σvec) + getΨVecNd!(Ψ, ϕtrain , params, normalized=normalized) + else + getΨVec!(Ψ,σvec,ϕtrain,centers, normalized=normalized) + end + + figure(); pp= imagesc(Ψ); display(pp); + Nparams = size(Ψ,2) + # @show size(Ψ) + # @show size(b) + + w = ((Ψ'*Ψ + λ*eye(Nparams))\Ψ'*b); + + + + + if nonlin + if ND + function costfunc(Ψ,w,ϕtrain,b , params) + getΨVecNd!(Ψ, ϕtrain , params, w, normalized=normalized) + f = Ψ*w - b + # @devec c = sum(f.^2); + # c + end + cost(X) = costfunc(Ψ,w,ϕtrain,b,X); + function g(xparams) + return getΨVecNd!(Ψ, ϕtrain , xparams, w, normalized=normalized) + #figure();plot(storage); ylabel("gradient"); display(pp) + end + else + function costfunc(Ψ,w,ϕtrain,centers,σvec,b) + getΨVec!(Ψ,σvec,ϕtrain,centers,w, normalized=normalized) + f = Ψ*w - b + end + cost(X) = costfunc(Ψ,w,ϕtrain,reshape(X[1:(Nbasis*Nseries)],Nbasis,Nseries),reshape(X[(Nbasis*Nseries)+1:end],Nbasis,Nseries),b); + function g(xparams) + return getΨVec!(Ψ,σvec,ϕtrain,centers,w, normalized=normalized) + #figure();plot(storage); ylabel("gradient"); display(pp) + end + end + + + # function cost∇cost(X,storage) + # h = 1e-10 + # x = X + h*im + # y = cost(x) + # storage[:] = imag(y)./h + # return real(y) + # end + + # ∇ = DifferentiableFunction(cost,∇cost, cost∇cost) + cold = 1e20 + fvec = zeros(liniters+1) + fvec[1] = ND ? sum(cost(params).^2) : sum(cost([centers[:],σvec[:]]).^2) + finalIter = 1 + useCuckoo = false + for i = 1:liniters + X0 = ND ? params : [centers[:],σvec[:]] + # test = copy(X0) + # show(∇cost(X0,test)) + try + # test = X0 + # @show ∇cost(X0,test) + if true#!useCuckoo #i % 2 == 1 + # @time res = optimize(cost, X0, + # method = :l_bfgs, + # iterations = iters, + # grtol = 1e-5, + # xtol = 1e-8, + # ftol = 1e-8) + @time res = Optim.levenberg_marquardt(cost,g, X0, + maxIter = nonliniters, + tolG = 1e-5, + tolX = 1e-8, + show_trace=false) + X = res.minimum + fvec[i+1] = res.f_minimum + else + display("Using cuckoo search to escape local minimum") + @time (bestnest,fmin) = cuckoo_search(x -> sum(cost(x).^2),X0;Lb=-inf(Float64),Ub=inf(Float64),n=30,pa=0.25, Tol=1.0e-5, max_iter = 10*nonliniters+100) + X = bestnest + fvec[i+1] = fmin + end + if ND + params = X + getΨVecNd!(Ψ, ϕtrain , params,b, w, normalized=normalized) + else + centers = reshape(X[1:(Nbasis*Nseries)],Nbasis,Nseries) + σvec = reshape(X[(Nbasis*Nseries)+1:end],Nbasis,Nseries) + getΨVec!(Ψ,σvec,ϕtrain,centers,w, normalized=normalized) + end + w = ((Ψ'*Ψ + λ*eye(Nparams))\Ψ'*b) + display("Non-linear optimization, iteration $i") + # iters *= convert(Int64,round(20^(1/liniters))) + f = Ψ*w - b + c = fvec[i+1] + if abs(cold-c) < 1e-10 + if useCuckoo + finalIter += 1 + display("No significant change in function value") + break + end + useCuckoo = true + elseif useCuckoo + useCuckoo = false + end + cold = c + catch ex + display("Optimization failed, using current best point") + display(ex) + if ND + getΨVecNd!(Ψ, ϕtrain , params, w, normalized=normalized) + else + getΨVec!(Ψ,σvec,ϕtrain,centers,w, normalized=normalized) + end + w = ((Ψ'*Ψ + λ*eye(Nparams))\Ψ'*b) + break + end + + finalIter += 1 + try + display(scatterplot(fvec[1:finalIter],sym='*')) + catch + display("Plot failed") + end + end + figure();pp = plot(fvec[1:finalIter],"o"); xlabel("Iteration"); title("Best function value"); display(pp) + diffFvec = diff(fvec[1:finalIter]) + figure();pp = plot(diffFvec[1:2:end],"o");hold(true);plot(diffFvec[2:2:end],"og"); xlabel("Iteration"); title("Best function value decrease"); display(pp) + end + f = Ψ*w; + figure();pp = plot(ϕtrain[:,1],".b");hold(true) + plot(ϕtrain,f[:,1],"--b"); xlabel("ϕ"); ylabel("output"); display(pp) + + if size(b,2) > 1 + hold(true) + plot(ϕtrain[:,2],"g.") + plot(ϕtrain,f[:,2],"g--") + display(pp) + figure(); pp= plot(f[:,1],f[:,2],"--"); hold(true) + plot(b[:,1][:,2]); xlabel("output dim 1");ylabel("output dim 2") + display(pp) + end + if ND + return w,params, Ψ + else + return w,σ, centers, Ψ + end +end diff --git a/src/rbfmodels/trainRBF_ARX.jl b/src/rbfmodels/trainRBF_ARX.jl new file mode 100644 index 0000000..bba0cb4 --- /dev/null +++ b/src/rbfmodels/trainRBF_ARX.jl @@ -0,0 +1,364 @@ +using Devectorize +using Clustering +using Debug + + +type RbfNonlinearParameters + x::Vector{Float64} + n_state::Integer + n_centers::Integer +end + +type RbfLinearParameters + x::Vector{Float64} +end + +type RbfParameters + n::RbfNonlinearParameters + l::RbfLinearParameters +end + + + + +# for op = (:+, :*, :\, :/) +# @eval ($op)(a::RbfNonlinearParameters,b) = ($op)(a.x,b) +# @eval ($op)(b,a::RbfNonlinearParameters) = ($op)(b,a.x) +# end + +Base.display(p::RbfNonlinearParameters) = println("RbfNonlinearParameters: Parameters = $(length(p.x)), n_state(x) = $(p.n_state), n_centers(x) = $(p.n_centers)") +Base.start(p::RbfNonlinearParameters) = 1 +Base.done(p::RbfNonlinearParameters, state) = state > p.n_centers +Base.next(p::RbfNonlinearParameters, state) = (p.x[1+(state-1)*2*p.n_state:state*2p.n_state])::VecOrMat, state + 1 +"""Train your RBF-ARX network.`trainRBF_ARX(y, na, nc; state = :A, liniters=3,nonliniters=50, normalized=false, initialcenters="equidistant", inputpca=false, outputnet = true, cuckoosearch = false)`\n +The number of centers is equal to `nc` if Kmeans is used to get initial centers, otherwise the number of centers is `nc^n_state`\n +`n_state` is equal to the state dimension, possibly reduced to `inputpca` if so desired.\n +The number of nonlinear parameters is `n_centers × n_state`\n +The number of linear parameters is `outputnet ? n_state × (n_centers+1) × (na)+1) : (na)×(n_centers+1)+1)`""" +function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=false, initialcenters="equidistant", inputpca=false, outputnet = true, cuckoosearch = false) + n_points = length(y) + na = isa(A,Matrix) ? size(A,2) : 1 + + function getcentersKmeans(state, nc) + iters = 21 + errorvec = zeros(iters) + params = Array(Float64,(nc*2*n_state,iters)) + methods = [:rand;:kmpp] + for iter = 1:iters + clusterresult = kmeans(state', nc; maxiter=200, display=:none, init=iter<iters ? methods[iter%2+1] : :kmcen) + for i = 1:nc + si = 1+(i-1)n_state*2 + params[si:si+n_state-1,iter] = clusterresult.centers[:,i] + C = cov(state[clusterresult.assignments .== i,:]) + params[si+n_state:si+2n_state-1,iter] = diag(inv(C)) + end + errorvec[iter] = rms(predictionerror(params[:,iter])) + end + println("Std in errors among initial centers: ", round(std(errorvec),3)) + ind = indmin(errorvec) + return RbfNonlinearParameters(params[:,ind],n_state, nc) + end + + + function saturatePrecision(x,n_state) + for i = 1:2n_state:length(x) + range = i+n_state:i+2n_state-1 + x[range] = abs(x[range]) + end + return x + end + + function plotcenters(Z) + X = zeros(Z.n_centers,2) + for (i,Zi) in enumerate(Z) + X[i,:] = Zi[1:2]' + end + newplot(X[:,1],X[:,2],"o"); title("Centers") + end + + function getΨ(Znl) + RBF(x, Znl::VecOrMat,n_state::Integer) = exp(-(((x-Znl[1:n_state]).^2.*Znl[n_state+1:end])[1])) + if normalized + rowsum = ones(n_points) + for (j,Zi) in enumerate(Znl) + for i = n_state+1:2n_state + Zi[i] = Zi[i] <= 0 ? 1.0 : Zi[i] # Reset to 1 if precision became negative + end + for i = 1:n_points + statei = squeeze(state[i,:]',2) + a = RBF(statei, Zi, n_state) + Ψ[i,j] = a + rowsum[i] += a + end + end + for i = 1:n_points + if rowsum[i] <= 1e-10 + continue + end + @devec Ψ[i,:] ./= rowsum[i] + end + else # Not normalized + for (j,Zi) in enumerate(Znl) + for i = n_state+1:2n_state + Zi[i] = Zi[i] <= 0 ? 1.0 : Zi[i] # Reset to 1 if precision became negative + end + + for i = 1:n_points + statei = squeeze(state[i,:]',2) + # statei = slice(state,i,:) + Ψ[i,j] = RBF(statei, Zi, n_state) + if DEBUG && !isfinite(Ψ[i,j]) + @show i,j,statei, Zi, n_state, Ψ[i,j] + @show (statei-Zi[1:n_state]).^2 + @show Zi[n_state+1:end] + # @show exp(-(((statei-Zi[1:n_state]).^2.*Zi[n_state+1:end])[1])) + error("Stopping") + end + end + end + end + if DEBUG && sum(!isfinite(Ψ)) > 0 + @show sum(!isfinite(Ψ)) + end + return Ψ + end + + function fitlinear(V) + try + assert(isa(V,Matrix)) + assert(isa(y,Vector)) + DEBUG && assert(!any(!isfinite(V))) + DEBUG && assert(!any(!isfinite(y))) + return V\y + catch ex + @show reshape(Znl.x,2n_state,n_centers) + display(ex) + error("Linear fitting failed") + end + end + + function jacobian(Znl, Ψ, w) + J = Array(Float64,(n_points,length(Znl.x))) + ii = 1 + for (k,Zi) in enumerate(Znl) + μ = Zi[1:n_state] # slice? + γ = Zi[n_state+1:end] + i1 = ii-1 + for l = 1:n_points + Ψw = 1.0 + if outputnet + for i = 1:na + for j = 1:n_state + ind = j + n_state*(k-1) + n_state*(n_centers+1)*(i-1) + Ψw += V[l,ind]*w[ind] + end + end + else + for i = 1:na + ind = k + (n_centers+1)*(i-1) + Ψw += V[l,ind]*w[ind] + end + end + for p = 1:n_state + x_μ = state[l,p]-μ[p] + J[l,i1+p] = 2*Ψw*x_μ*γ[p] + J[l,i1+n_state+p] = (-Ψw)*x_μ^2 + end + end + ii += 2n_state + end + return J + end + + function getLinearRegressor(Ψ) + if outputnet + ii = 1 + for i = 1:na + for k = 1:n_centers+1 + for j = 1:n_state + for l = 1:n_points + V[l,ii] = Ψ[l,k]*A[l,i]*state[l,j] + end + ii = ii+1 + end + end + end + else + ii = 1 + for i = 1:na + for k = 1:n_centers+1 + for l = 1:n_points + V[l,ii] = Ψ[l,k]*A[l,i] + end + ii = ii+1 + end + end + end + if DEBUG && sum(!isfinite(V)) > 0 + @show sum(!isfinite(V)) + end + return V + end + + function predictionerror(z) + znl = RbfNonlinearParameters(z,n_state,n_centers) + getΨ(znl); + getLinearRegressor(Ψ); + zl = fitlinear(V); + prediction = V*zl + error = prediction-y + return error + end + + # Get initial centers ================================ + Znl::RbfNonlinearParameters + if isa(inputpca, Int) + if inputpca > size(state,2) + warn("inputpca must be <= n_state") + inputpca = size(state,2) + end + state-= repmat(mean(state,1),n_points,1) + state./= repmat(var(state,1),n_points,1) + C,score,latent,W0 = PCA(state,true) + state = score[:,1:inputpca] + end + n_state = size(state,2) + + if lowercase(initialcenters) == "equidistant" + initialcenters = :equidistant + n_centers = nc^n_state + else + initialcenters = :kmeans + n_centers = nc + end + + + Ψ = Array(Float64,(n_points,n_centers+1)) + Ψ[:,end] = 1.0 + V = outputnet ? V = Array(Float64,(n_points, n_state* (n_centers+1)* (na)+1)) : V = Array(Float64,(n_points, (na)*(n_centers+1)+1)) + V[:,end] = 1.0 + if initialcenters == :equidistant + Znl = getcentersEq(state,nc); debug("gotcentersEq") + else + Znl = getcentersKmeans(state,nc); debug("gotcentersKmeans") + end + @ddshow Znl + getΨ(Znl); debug("Got Ψ") + @ddshow sum(!isfinite(Ψ)) + getLinearRegressor(Ψ); debug("Got linear regressor V") + @ddshow size(V) + @ddshow sum(!isfinite(V)) + Zl = fitlinear(V); debug("fitlinear") + @ddshow sum(!isfinite(Zl)) + prediction = V*Zl + error = y - prediction + errors = zeros(liniters+1) + + # ============= Main loop ================================================ + debug("Calculating initial error") + errors[1] = sse(predictionerror(Znl.x)) + println("Training RBF_ARX Centers: $(Znl.n_centers), Nonlinear parameters: $(length(Znl.x)), Linear parameters: $(length(Zl))") + for i = 1:liniters + function g(z) + znl = RbfNonlinearParameters(z,n_state,n_centers) + w = fitlinear(V) + jacobian(znl,Ψ, w) + end + f(z) = predictionerror(z) + + X0 = Znl.x + + if i%2 == 1 || !cuckoosearch + @time res = levenberg_marquardt(f, g, X0, + maxIter = nonliniters, + tolG = 1e-7, + tolX = 1e-10, + show_trace=true, + timeout = 60) + Znl = RbfNonlinearParameters(saturatePrecision(res.minimum,n_state),n_state,n_centers) + errors[i+1] = res.f_minimum + # show(res.trace) + else + display("Using cuckoo search to escape local minimum") + @time (bestnest,fmin) = cuckoo_search(x -> sum(f(x).^2),X0; + n=30, + pa=0.25, + Tol=1.0e-5, + max_iter = i < liniters-1 ? 80 : 200, + timeout = 120) + debug("cuckoo_search done") + Znl = RbfNonlinearParameters(bestnest,n_state,n_centers) + errors[i+1] = fmin + end + if abs(errors[i+1]-errors[i]) < 1e-10 + display("No significant change in function value") + break + end + getΨ(Znl) + getLinearRegressor(Ψ) + fitlinear(V) + # Znl.x = res.minimum + end + + # Test =============================================== + getΨ(Znl) + getLinearRegressor(Ψ) + Zl = fitlinear(V); debug("fitlinear") + prediction = V*Zl + error = y - prediction + if PYPLOT || WINSTON + newplot(y,"k"); + plot(prediction,"b"); + plot(error,"r");title("Fitresult, RBF-ARX, n_a: $na, n_c: $(Znl.n_centers), Nonlin params: $(length(Znl.x)), Lin params: $(length(Zl)) RMSE = $(rms(error)) Fit = $(fit(y,prediction))") + plotcenters(Znl) + newplot(errors,"o"); title("Errors"); + end + + # Exit =============================================== + println("tainRBF_ARX done. Centers: $(Znl.n_centers), Nonlinear parameters: $(length(Znl.x)), Linear parameters: $(length(Zl)), RMSE: $(rms(error))") + + + +end + + + +function getcentersEq(state::VecOrMat, nc::Integer) + n_points = size(state,1) + n_state = typeof(state) <: Matrix ? size(state,2) : 1 + state_extrema = [minimum(state,1)' maximum(state,1)'] + statewidths = state_extrema[:,2] - state_extrema[:,1] + Δc = statewidths/nc + Z = zeros(nc, 2*n_state) # 2*n_state to fit center coordinates and scaling parameters + # Fill initial centers + for i = 1:n_state + Z[:,i] = collect((state_extrema[i,1]+ Δc[i]/2):Δc[i]:state_extrema[i,2]) + end + # add bandwidth parameters γ, give all centers the same bandwidth with Δc as a (hopefully) good initial guess + # display(Z) + Z[:,n_state+1:end] = 1*repmat(4./(Δc.^2)',nc,1) # Spread the initial guess to all centers + assert(all(Z[:,n_state+1:end].> 0)) + debug("Z done") + n_centers::Int64 = nc^n_state # new number of centers wich considers gridding of 1D centers + ZZ1 = zeros(n_state, n_centers) + ZZ2 = zeros(n_state, n_centers) + # Here comes the magic. Spread each one dimensional center onto a grid in n_state dimensions + v = n_centers + h = 1 + ii = 1 + for i = 1:n_state # For each iteration, v decreases and h increases by a factor of nc. If v and h are used in repmat(⋅,v,h) which is then vecotrized, the desired grid will be created. It's a bit tricky, but makes sense after a lot of thinking + v = convert(Int64, v / nc) + ZZ1[i,:] = vec(repmat(Z[:,i]',v,h))' + ZZ2[i,:] = vec(repmat(Z[:,i+n_state]',v,h))' + h *= nc + ii += 1 + end + debug("ZZ done") + RbfNonlinearParameters(vec([ZZ1; ZZ2]), n_state, n_centers) + #error("Bias parameter!") +end + + + + + -- GitLab