Commit ae8c62a7 authored by Axel Ahrens's avatar Axel Ahrens
Browse files

Merge branch 'master' of gitlab.control.lth.se:letter2martin/turtlerob1

parents 1f081f48 4752a980
% Assuming that you already have a 'client' handle, and that the 'binauloc'
% component is running on the turtlebot, load this component.
binauloc = client.load('binauloc')
% Connect the input port of binauloc to the output port of bass, so that
% binauloc receives the raw audio data from bass. Note that the data flow
% goes from bass on the Raspberry Pi to binauloc on the turtlebot. The
% audio data does not pass by Matlab here.
binauloc.connect_port('Audio', 'bass/Audio')
% Setup the parameters of the algorithm. To have a bit more information
% about the role of each parameter, you can call binauloc.Setup('-h'). To
% have further information, c.f. Portello et al. IROS 2013 & IROS 2014
binauloc.Setup( ...
'/home/turtlebot/genom3/activeloc/data/setup', ... % just a file to write parameters in (please do not change)
24414, ... % the sample rate
5000, ... % the maximum frequency
1024, ... % the number of samples per frame (here a frame is a block of audio)
512, ... % the overlap between two frames
'::localization::HANNING', ... % the window function
10, ... % contributes to the smoothing of the pseudo-likelihood
'/home/turtlebot/genom3/activeloc/data/projector', ... % another file to write data in (please do not change)
'/home/turtlebot/genom3/activeloc/data/noise') % yet another file to write data in (please do not change)
% Learn the noise characteristics. Make sure the environment is silent
% before calling this service!
binauloc.LearnNoise(2) % 2 seconds
% Use the inter-aural transfer function to compute a projector on the space
% spanned by the steering vector [1; inter-aural transfer function]
binauloc.PretabProj('/home/turtlebot/genom3/activeloc/steerVecs/steerVecS72')
% Start the localization (runs indefinitely, don't forget the '-a' option)
rLocalizeSources = binauloc.LocalizeSources('-a', 1, 1, 4)
% The azimuth detection is now running, you can retrieve the current
% estimated azimuth in p = binauloc.Azimuths(). The value is in radians,
% increasing towards right.
% The localization can be stopped with binauloc.StopLocalization()
\ No newline at end of file
function r = g_density(y,xp,xr)
sigma_v = 1;
N = length(xp);
v = xp - repmat(xr,1,N);
az = atan2d(v(2,:),v(1,:));
r = -0.5/sigma_v * (y-az).^2; %% This assumes gaussian noise, try to change to Laplacian noise for robustness
end
\ No newline at end of file
function [xp, w, expw] = pf( y, xp, w, expw, g_density, f)
function [xp, w, expw] = pf(xr, y, xp, w, expw, g_density, f)
% Particle filter for sound source tracking
N = length(xp);
% Resample
if(1/sum(expw.^2) < N/2)
resampleInstants(t) = 1;
% [~, j] = histc(rand(N,1), [0 cumsum(exp(w(:,t-1))')]);
bins = [0 cumsum(expw')];
bins = bins./(bins(end));
[~, j] = histc((rand(1)/N+0):1/N:1, bins);
xp = xp(j);
xp = xp(:,j);
w = log(1/N)*ones(N,1);
else
end
% Time update
xp = f(xp);
w = w + g_density(y,xp);
w = w + g_density(y,xp,xr);
offset = max(w);
normConstant = log(sum(exp(w-offset)))+offset;
......
......@@ -45,18 +45,15 @@ mObj = manager(dObj,'localization',par_sub);
%% Particle filter parameters
sigma_w = 1; % State noise std
sigma_v = 1; % Measurement noise std
sigma_w0 = 10;
fs = 24414;
chunks = 2048;
dt = chunks/fs;
Q = [1/4*dt^4, 1/2*dt^3; 1/2*dt^3, dt^2]*sigma_w; % Process noise covariance
R = 1; % Measurement noise covariance
x = [0; 0]; % Initial state
A = [1, dt; 0, 1];
Npart = 100;
f = @(x) A*x + sigma_w*diag(Q)*randn(1,size(x,2));
g_density = @(y,x) -0.5/sigma_v * (y-x(1,:)).^2; %% This assumes gaussian noise, try to change to Laplacian noise for robustness
Npart = 500;
xp = sigma_w0*randn(2,Npart);
f = @(x) x + sigma_w*randn(size(x));
% g_density = @(y,x) -0.5/sigma_v * (y-x(1,:)).^2; %% This assumes gaussian noise, try to change to Laplacian noise for robustness
%% Initialization
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment