diff --git a/src/rbfmodels/levenberg_marquardt.jl b/src/rbfmodels/levenberg_marquardt.jl
index 3f30a3f5a380d3a4c24b23e443aa3312021874b4..c78b1e253cbd6e9beaf235fc08a67452dd53fd6d 100644
--- a/src/rbfmodels/levenberg_marquardt.jl
+++ b/src/rbfmodels/levenberg_marquardt.jl
@@ -46,7 +46,7 @@ function levenberg_marquardt(f::Function, g::Function, x0; tolX=1e-8, tolG=1e-12
     # Maintain a trace of the system.
     tr = Optim.OptimizationTrace()
     if show_trace
-        d = {"lambda" => lambda}
+        d = Dict("lambda" => lambda)
         os = Optim.OptimizationState(iterCt, rms(fcur), NaN, d)
 
         push!(tr, os)
@@ -101,7 +101,7 @@ function levenberg_marquardt(f::Function, g::Function, x0; tolX=1e-8, tolG=1e-12
         # show state
         if show_trace && (iterCt < 20 || iterCt % 10 == 0)
             gradnorm = norm(J'*fcur, Inf)
-            d = {"lambda" => lambda}
+            d = Dict("lambda" => lambda)
             os = Optim.OptimizationState(iterCt, rms(fcur), gradnorm, d)
             push!(tr, os)
             println("Iter:$iterCt, f:$(round(os.value,5)), ||g||:$(round(gradnorm,5)), λ:$(round(lambda,5))")
diff --git a/src/rbfmodels/trainRBF_ARX.jl b/src/rbfmodels/trainRBF_ARX.jl
index 1605083fbec2017a2084b67f9fd2c25193e902b2..f1eda12f4b043647b07b1b5af4002cbfabecbbc6 100644
--- a/src/rbfmodels/trainRBF_ARX.jl
+++ b/src/rbfmodels/trainRBF_ARX.jl
@@ -11,9 +11,6 @@ type RbfNonlinearParameters
 end
 
 
-
-
-
 # for op = (:+, :*, :\, :/)
 #     @eval ($op)(a::RbfNonlinearParameters,b) = ($op)(a.x,b)
 #     @eval ($op)(b,a::RbfNonlinearParameters) = ($op)(b,a.x)
@@ -44,17 +41,7 @@ function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=fal
 
     # Get initial centers ================================
     Znl::RbfNonlinearParameters
-    if isa(inputpca, Int)
-        if inputpca > size(state,2)
-            warn("inputpca must be <= n_state")
-            inputpca = size(state,2)
-        end
-        state-= repmat(mean(state,1),n_points,1)
-        state./= repmat(var(state,1),n_points,1)
-        C,score,latent,W0 = PCA(state,true)
-        state = score[:,1:inputpca]
-    end
-    n_state = size(state,2)
+    state,n_state, TT = inputtransform(state, inputpca)
 
     if lowercase(initialcenters) == "equidistant"
         initialcenters = :equidistant
@@ -73,7 +60,8 @@ function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=fal
     else
         Znl = getcentersKmeans(state, nc, predictionerror, n_state); debug("gotcentersKmeans")
     end
-    @ddshow Znl
+    @show Znl
+    @show size(state)
     getΨ(Ψ, Znl, state, n_points, n_state, normalized); debug("Got Ψ")
     @ddshow sum(!isfinite(Ψ))
     getLinearRegressor(V,Ψ,A,state,outputnet,na,n_state,n_centers,n_points); debug("Got linear regressor V")
@@ -85,6 +73,7 @@ function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=fal
     error = y - prediction
     errors = zeros(liniters+1)
     Lb,Ub = getbounds(Znl, state, n_state, n_centers)
+#     5hz går på  5:09min, 10hz på 7:10min och 40hz 16
 
     # ============= Main loop  ================================================
     debug("Calculating initial error")
@@ -93,7 +82,7 @@ function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=fal
     function g(z)
         znl = RbfNonlinearParameters(z,n_state,n_centers)
         w = fitlinear(V,y)
-        return outputnet ? jacobian_outputnet(znl,Ψ, w, V) : jacobian_no_outputnet(znl,Ψ, w, V)
+        return outputnet ? jacobian_outputnet(znl,Ψ, w, V, state) : jacobian_no_outputnet(znl,Ψ, w, V, state)
     end
     f(z) = predictionerror(z)
     X0 = deepcopy(Znl.x)
@@ -162,12 +151,161 @@ function trainRBF_ARX(y, A, state, nc; liniters=3,nonliniters=50, normalized=fal
 
     # Exit ===============================================
     println("tainRBF_ARX done. Centers: $(Znl.n_centers), Nonlinear parameters: $(length(Znl.x)), Linear parameters: $(length(Zl)), RMSE: $(rms(error))")
+end
+
+
+
+function trainRBF(y, state, nc; liniters=3,nonliniters=50, normalized=false, initialcenters="equidistant", inputpca=false, cuckoosearch = false, cuckooiter=100)
+    n_points = length(y)
+    function predictionerror(z)
+        znl = RbfNonlinearParameters(z,n_state,n_centers)
+        psi = getΨ(Ψ, znl, state, n_points, n_state, normalized)
+        zl = fitlinear(Ψ,y);
+        prediction = Ψ*zl
+        error = prediction-y
+        return error
+    end
+
+    # Get initial centers ================================
+    Znl::RbfNonlinearParameters
+
+    state, n_state, TT = inputtransform(state, inputpca)
+    #     n_state = size(state,2)
+
+    if lowercase(initialcenters) == "equidistant"
+        initialcenters = :equidistant
+        n_centers = nc^n_state
+    else
+        initialcenters = :kmeans
+        n_centers = nc
+    end
+    Ψ = Array(Float64,(n_points,n_centers+1))
+    Ψ[:,end] = 1.0
+    if initialcenters == :equidistant
+        Znl = getcentersEq(state,nc); debug("gotcentersEq")
+    else
+        Znl = getcentersKmeans(state, nc, predictionerror, n_state); debug("gotcentersKmeans")
+    end
+    #     Znl.x += 0.1*randn(size(Znl.x))
+    @ddshow Znl
+    getΨ(Ψ, Znl, state, n_points, n_state, normalized); debug("Got Ψ")
+#     matshow(deepcopy(Ψ));axis("tight"); colorbar()
+#     plotparams(Znl)
+
+
+    @ddshow sum(!isfinite(Ψ))
+    w = fitlinear(Ψ,y); debug("fitlinear")
+    newplot(w,"o"); title("Linear parameters")
+    @ddshow sum(!isfinite(Zl))
+    prediction = Ψ*w
+    error = y - prediction
+#     newplot([y prediction error]);title("inbetween")
+    errors = zeros(liniters+1)
+    Lb,Ub = getbounds(Znl, state, n_state, n_centers)
+
+    # ============= Main loop  ================================================
+    debug("Calculating initial error")
+    errors[1] = rms(predictionerror(Znl.x))
+    println("Training RBF_ARX Centers: $(Znl.n_centers), Nonlinear parameters: $(length(Znl.x)), Linear parameters: $(length(w))")
+    function g(z)
+        znl = RbfNonlinearParameters(z,n_state,n_centers)
+        w = fitlinear(Ψ,y)
+        return jacobian(znl,Ψ, w, state)
+    end
+    f(z) = predictionerror(z)
+    X0 = deepcopy(Znl.x)
+    for i = 1:liniters
+        if i%2 == 1 || !cuckoosearch
+            @time res = levenberg_marquardt(f, g, X0,
+                                            maxIter = nonliniters,
+                                            tolG = 1e-7,
+                                            tolX = 1e-10,
+                                            show_trace=true,
+                                            timeout = 60,
+                                            n_state = n_state)
+            X0 = deepcopy(res.minimum)
+            DEBUG && assert(X0 == res.minimum)
+            DEBUG && @show ff1 = rms(f(X0))
+            if DEBUG
+                _Ψ = deepcopy(Ψ)
+            end
+            DEBUG && @show ff2 = rms(f(res.minimum))
 
+            if DEBUG
+                @assert ff1 == ff2
+                @show res.minimum == X0
+                @show _Ψ == Ψ
+            end
+            assert(X0 == res.minimum)
+            #             Znl = RbfNonlinearParameters(saturatePrecision(copy(res.minimum),n_state),n_state,n_centers)
+            Znl = RbfNonlinearParameters(deepcopy(res.minimum),n_state,n_centers)
+            errors[i+1] = res.f_minimum
+            # show(res.trace)
+        else
+            display("Using cuckoo search to escape local minimum")
+            @time (bestnest,fmin) = cuckoo_search(x -> rms(f(x)),X0, Lb, Ub;
+                                                  n=50,
+                                                  pa=0.25,
+                                                  Tol=1.0e-5,
+                                                  max_iter = i < liniters-1 ? cuckooiter : 2cuckooiter,
+                                                  timeout = 120)
+            debug("cuckoo_search done")
+            X0 = deepcopy(bestnest)
+            @ddshow rms(f(X0))
+            Znl = RbfNonlinearParameters(deepcopy(bestnest),n_state,n_centers)
+            errors[i+1] = fmin
+        end
+        if abs(errors[i+1]-errors[i]) < 1e-10
+            display("No significant change in function value")
+            break
+        end
+    end
+
+    # Test ===============================================
+    getΨ(Ψ, Znl, state, n_points, n_state, normalized)
+    w = fitlinear(Ψ,y); debug("fitlinear")
+    prediction = Ψ*w
+    error = y - prediction
+    if PYPLOT || WINSTON
+        newplot(y,"k");
+        plot(prediction,"b");
+        plot(error,"r");title("Fitresult, RBF, n_a: $na, n_c: $(Znl.n_centers), Nonlin params: $(length(Znl.x)), Lin params: $(length(w)) RMSE = $(rms(error)) Fit = $(fit(y,prediction))")
+        plotcenters(Znl)
+        newplot(errors,"o"); title("Errors");
+    end
 
+    # Exit ===============================================
+    println("tainRBF done. Centers: $(Znl.n_centers), Nonlinear parameters: $(length(Znl.x)), Linear parameters: $(length(w)), RMSE: $(rms(error))")
+    return prediction
+end
 
+type InputTransform
+    mean
+    variance
+    C
 end
 
+function inputtransform(state, inputpca)
+    n_points = size(state,1)
+    m = mean(state,1)
+    v = std(state,1)
+    state-= repmat(m,n_points,1)
+    state./= repmat(v,n_points,1)
+    if isa(inputpca, Int)
+        if inputpca > size(state,2)
+            warn("inputpca must be <= n_state")
+            inputpca = size(state,2)
+        end
+        C,score,latent,W0 = PCA(state,true)
+        state = score[:,1:inputpca]
+    else
+        C = 1
+    end
+    n_state = size(state,2)
+    #     newplot(state); title("State after transformation")
+    return state, n_state, InputTransform(m,v,C)
 
+end
 
 function getcentersEq(state::VecOrMat, nc::Integer)
     n_points = size(state,1)
@@ -182,7 +320,7 @@ function getcentersEq(state::VecOrMat, nc::Integer)
     end
     # add bandwidth parameters γ, give all centers the same bandwidth with Δc as a (hopefully) good initial guess
     #     display(Z)
-    Z[:,n_state+1:end] = 1*repmat(4./(Δc.^2)',nc,1) # Spread the initial guess to all centers
+    Z[:,n_state+1:end] = ones(1*repmat(4./(Δc.^2)',nc,1)) # Spread the initial guess to all centers
     assert(all(Z[:,n_state+1:end].> 0))
     debug("Z done")
     n_centers::Int64 = nc^n_state # new number of centers wich considers gridding of 1D centers
@@ -204,6 +342,36 @@ function getcentersEq(state::VecOrMat, nc::Integer)
     #error("Bias parameter!")
 end
 
+function getcentersKmeans(state, nc::Int, f::Function, n_state::Int)
+    iters = 21
+    errorvec = zeros(iters)
+    params = Array(Float64,(nc*2*n_state,iters))
+    methods = [:rand;:kmpp]
+    for iter = 1:iters
+        clusterresult = kmeans(state', nc; maxiter=200, display=:none, init=iter<iters ? methods[iter%2+1] : :kmcen)
+        for i = 1:nc
+            si = 1+(i-1)n_state*2
+            params[si:si+n_state-1,iter] = clusterresult.centers[:,i]
+            C = cov(state[clusterresult.assignments .== i,:])
+            C = cond(C) > 1e4 ? eye(C) : C
+            params[si+n_state:si+2n_state-1,iter] = diag(inv(C))
+            any(diag(inv(C)) .< 0) && show(C)
+        end
+        errorvec[iter] = rms(f(params[:,iter]))
+    end
+    println("Std in errors among initial centers: ", round(std(errorvec),6))
+    ind = indmin(errorvec)
+    return RbfNonlinearParameters(params[:,ind],n_state, nc)
+end
+
+function plotcenters(Z)
+    X = zeros(Z.n_centers,2)
+    for (i,Zi) in enumerate(Z)
+        X[i,:] = Zi[1:2]'
+    end
+    newplot(X[:,1],X[:,2],"o"); title("Centers")
+end
+
 
 
 
@@ -216,7 +384,7 @@ function getΨ(Ψ, Znl, state, n_points, n_state, normalized::Bool)
 end
 
 function getΨnormalized(Ψ, Znl, state, n_points, n_state)
-    RBF(x, Znl::VecOrMat,n_state::Integer) = exp(-(((x-Znl[1:n_state]).^2.*Znl[n_state+1:end])[1]))
+    RBF(x, Znl::VecOrMat,n_state::Integer) = exp(-(sum((x-Znl[1:n_state]).^2.*Znl[n_state+1:end])))
     rowsum = ones(n_points)
     for (j,Zin) in enumerate(Znl)
         Zi = deepcopy(Zin)
@@ -240,45 +408,52 @@ function getΨnormalized(Ψ, Znl, state, n_points, n_state)
 end
 
 function getΨnonnormalized(Ψ, Znl, state, n_points, n_state)
-    RBF(x, Znl::VecOrMat,n_state::Integer) = exp(-(((x-Znl[1:n_state]).^2.*Znl[n_state+1:end])[1]))
-    for (j,Zin) in enumerate(Znl)
-        Zi = deepcopy(Zin)
+    #     RBF(x, Z,n_state) = exp(-(x-Z[1:n_state]).^2)[1]
+    RBF(x, Z,n_state) = exp(-(sum(((x-Z[1:n_state]).^2).*Z[n_state+1:end])))
+    for (j,Zi) in enumerate(Znl)
         for i = 1:n_points
-            statei = squeeze(state[i,:]',2)
+            statei = state[i,:]'
             #                     statei = slice(state,i,:)
             Ψ[i,j] = RBF(statei, Zi, n_state)
             if DEBUG && !isfinite(Ψ[i,j])
                 @show i,j,statei, Zi, n_state, Ψ[i,j]
                 @show (statei-Zi[1:n_state]).^2
                 @show Zi[n_state+1:end]
-                #                     @show exp(-(((statei-Zi[1:n_state]).^2.*Zi[n_state+1:end])[1]))
                 error("Stopping")
             end
         end
     end
+
     return Ψ
 end
 
-
 function getLinearRegressor(V,Ψ,A,state,outputnet,na,n_state,n_centers,n_points)
-    if outputnet
-        ii = 1
-        for i = 1:na
-            for k = 1:n_centers+1
-                for j = 1:n_state
-                    for l = 1:n_points
-                        V[l,ii] = Ψ[l,k]*A[l,i]*state[l,j]
-                    end
-                    ii = ii+1
-                end
+    outputnet ? getLinearRegressor_outputnet(V,Ψ,A,state,na,n_state,n_centers,n_points) : getLinearRegressor_no_outputnet(V,Ψ,A,state,na,n_state,n_centers,n_points)
+end
+
+function getLinearRegressor_no_outputnet(V,Ψ,A,state,na,n_state,n_centers,n_points)
+    ii = 1
+    for i = 1:na
+        for k = 1:n_centers+1
+            for l = 1:n_points
+                V[l,ii] = Ψ[l,k]*A[l,i]
             end
+            ii = ii+1
         end
-    else
-        ii = 1
-        for i = 1:na
-            for k = 1:n_centers+1
+    end
+    if DEBUG && sum(!isfinite(V)) > 0
+        @show sum(!isfinite(V))
+    end
+    return V
+end
+
+function getLinearRegressor_outputnet(V,Ψ,A,state,na,n_state,n_centers,n_points)
+    ii = 1
+    for i = 1:na
+        for k = 1:n_centers+1
+            for j = 1:n_state
                 for l = 1:n_points
-                    V[l,ii] = Ψ[l,k]*A[l,i]
+                    V[l,ii] = Ψ[l,k]*A[l,i]*state[l,j]
                 end
                 ii = ii+1
             end
@@ -291,13 +466,7 @@ function getLinearRegressor(V,Ψ,A,state,outputnet,na,n_state,n_centers,n_points
 end
 
 
-function plotcenters(Z)
-    X = zeros(Z.n_centers,2)
-    for (i,Zi) in enumerate(Z)
-        X[i,:] = Zi[1:2]'
-    end
-    newplot(X[:,1],X[:,2],"o"); title("Centers")
-end
+
 
 function getbounds(Znl, state, n_state, n_centers)
     Lb = zeros(Znl.x)
@@ -314,29 +483,10 @@ function getbounds(Znl, state, n_state, n_centers)
 end
 
 
-function getcentersKmeans(state, nc::Int, f::Function, n_state::Int)
-    iters = 21
-    errorvec = zeros(iters)
-    params = Array(Float64,(nc*2*n_state,iters))
-    methods = [:rand;:kmpp]
-    for iter = 1:iters
-        clusterresult = kmeans(state', nc; maxiter=200, display=:none, init=iter<iters ? methods[iter%2+1] : :kmcen)
-        for i = 1:nc
-            si = 1+(i-1)n_state*2
-            params[si:si+n_state-1,iter] = clusterresult.centers[:,i]
-            C = cov(state[clusterresult.assignments .== i,:])
-            params[si+n_state:si+2n_state-1,iter] = diag(inv(C))
-            @assert !any(diag(inv(C)) .< 0)
-        end
-        errorvec[iter] = rms(f(params[:,iter]))
-    end
-    println("Std in errors among initial centers: ", round(std(errorvec),3))
-    ind = indmin(errorvec)
-    return RbfNonlinearParameters(params[:,ind],n_state, nc)
-end
 
 
-function jacobian_outputnet(Znl, Ψ, w, V)
+
+function jacobian_outputnet(Znl, Ψ, w, V, state)
     n_points = size(Ψ,1)
     n_state = Znl.n_state
     n_centers = Znl.n_centers
@@ -365,7 +515,7 @@ function jacobian_outputnet(Znl, Ψ, w, V)
     return J
 end
 
-function jacobian_no_outputnet(Znl, Ψ, w,v)
+function jacobian_no_outputnet(Znl, Ψ, w,v, state)
     n_points = size(Ψ,1)
     n_state = Znl.n_state
     n_centers = Znl.n_centers
@@ -392,6 +542,29 @@ function jacobian_no_outputnet(Znl, Ψ, w,v)
     return J
 end
 
+function jacobian(Znl, Ψ, w, state)
+    n_points = size(Ψ,1)
+    n_state = Znl.n_state
+    n_centers = Znl.n_centers
+    J = Array(Float64,(n_points,length(Znl.x)))
+    ii = 1
+    for (k,Zi) in enumerate(Znl)
+        μ = Zi[1:n_state] # slice?
+        γ = Zi[n_state+1:end]
+        i1 = ii-1
+        for l = 1:n_points
+            Ψw = Ψ[l,k]*w[k]
+            for p = 1:n_state
+                x_μ = state[l,p]-μ[p]
+                J[l,i1+p] = 2*Ψw*x_μ*γ[p]
+                J[l,i1+n_state+p] = (-Ψw)*x_μ^2
+            end
+        end
+        ii += 2n_state
+    end
+    return J
+end
+
 function saturatePrecision(x,n_state)
     for i = 1:2n_state:length(x)
         range = i+n_state:i+2n_state-1
@@ -412,3 +585,12 @@ function fitlinear(V,y)
         error("Linear fitting failed")
     end
 end
+
+function Base.show(p::RbfNonlinearParameters)
+    println(round(reshape(p.x,2p.n_state,p.n_centers),4))
+end
+
+function plotparams(p::RbfNonlinearParameters)
+    newplot(reshape(p.x,2p.n_state,p.n_centers)',"o")
+    title("Nonlinear parameters")
+end