Analysis

f_analysis

Code

 1function rst = f_analysis(stg,analysis,mmf,analysis_options)
 2
 3if contains(analysis,analysis_options(1))
 4    disp("Starting " + analysis_options(1))
 5    rst.diag = f_diagnostics(stg,mmf);
 6    disp(analysis_options(1) + " completed successfully")
 7end
 8
 9if contains(analysis,analysis_options(2))
10    disp("Starting " + analysis_options(2))
11    rst.opt = f_opt(stg,mmf);
12    disp(analysis_options(2) + " completed successfully")
13end
14
15if contains(analysis,analysis_options(3))
16    disp("Starting " + analysis_options(3))
17    rst.gsa = f_gsa(stg,mmf);
18    disp(analysis_options(3) + " completed successfully")
19end
20end

Calls the proper analysis functions depending on the analysis that was chosen on the settings file. The supported analysis right now are:

Diagnostics

f_diagnostics

Code

 1function rst = f_diagnostics(stg,mmf)
 2
 3% Run the model and obtain scores for fitness Multi Core
 4if stg.optmc
 5    disp("Running the model and obtaining Scores (Multicore)")
 6
 7    pa = stg.pa;
 8    % Iterate over the parameter arrays to be tested
 9    parfor n = stg.pat
10        [~,rst(n),~] = f_sim_score(pa(n,:),stg,mmf);
11    end
12    
13    % Run the model and obtain scores for fitness single Core
14else
15    disp("Running the model and obtaining Scores (Singlecore)")
16    
17    % Iterate over the parameter arrays to be tested
18    for n = stg.pat
19        [~,rst(n),~] = f_sim_score(stg.pa(n,:),stg,mmf);
20    end
21end
22end
Used to understand the effects of different parameters sets on model behaviour or in comparing different parameters sets.
It loads the user defined configurations, performs all the needed simulations, and calculates scores of the error functions either per experimental output, per experiment, or in total (check results).

Optimization

f_opt

Code

 1function rst = f_opt(stg,mmf)
 2% Call function to run fmincon optimization algorithm if chosen in settings
 3
 4if stg.fmincon
 5    rst(1) = f_opt_fmincon(stg,mmf);
 6end
 7
 8% Call function to run simulated annealing optimization algorithm if chosen
 9% in settings
10if stg.sa
11    rst(2) = f_opt_sa(stg,mmf);
12end
13
14% Call function to run pattern search optimization algorithm if chosen in
15% settings
16if stg.psearch
17    rst(3) = f_opt_psearch(stg,mmf);
18end
19
20% Call function to run genetic algorithm optimization if chosen in settings
21if stg.ga
22    rst(4) = f_opt_ga(stg,mmf);
23end
24
25% Call function to run Particle swarm optimization algorithm if chosen in
26% settings
27if stg.pswarm
28    rst(5) = f_opt_pswarm(stg,mmf);
29end
30
31% Call function to run Surrogate optimization algorithm if chosen in
32% settings
33if stg.sopt
34    rst(6) = f_opt_sopt(stg,mmf);
35end
36end

Calls the correct optmizer or optimizers that have been chosen in the settings file.

f_opt_start

Code

 1function [spoint,spop] = f_opt_start(stg)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Optimization Start method 1
 7if stg.osm == 1
 8    
 9    % Get a random starting point or group of starting points, if using
10    % multistart, inside the bounds
11    spoint = lhsdesign(stg.msts,stg.parnum).*(stg.ub-stg.lb)+stg.lb;
12    
13    % Get a group of ramdom starting points inside the bounds
14    spop = lhsdesign(stg.popsize,stg.parnum).*(stg.ub-stg.lb)+stg.lb;
15    
16    % Optimization Start method 2
17elseif stg.osm == 2
18    
19    % Get a random starting point or group of starting points, if using
20    % multistart, near the best point
21    spoint = stg.bestpa - stg.dbpa +...
22        (stg.dbpa*2*lhsdesign(stg.msts,stg.parnum));
23    
24    % Get a group of ramdom starting points near the best point
25    spop = stg.bestpa - stg.dbpa +...
26        (stg.dbpa*2*lhsdesign(stg.popsize,stg.parnum));
27end
28end
Creates the starting parameter set or sets of the optimizations, if single or multistart selected in settings file.
It supports two different random distributions for the starting points.

f_opt_fmincon/sa/psearch/ga/pswarm/sopt

Code

f_opt_fmincon

 1function rst = f_opt_fmincon(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Set the starting point for the optimization
 7[startpoint,~] = f_opt_start(stg);
 8
 9% Get the optimization options from settings
10options = stg.fm_options;
11options.UseParallel = stg.optmc;
12
13% Display console messages if chosen in settings
14if stg.optcsl
15    options.Display = 'iter-detailed';
16end
17
18% Display plots if chosen in settings
19if stg.optplots
20    options.PlotFcn = ...
21        {@optimplotx,@optimplotfunccount,@optimplotfval,...
22        @optimplotstepsize,@optimplotfirstorderopt};
23end
24
25% Optimize the model with multiple starting points if chosen in settings
26if stg.mst
27    parfor n = 1:stg.msts
28        disp(string(n))
29        [x(n,:),fval(n),exitflag(n),output(n)] =...
30            fmincon(@(x)f_sim_score(x,stg,mmf),startpoint(n,:),...
31            [],[],[],[],stg.lb,stg.ub,[],options);
32    end
33    
34    % Optimize the model
35else
36    [x(1,:),fval(1),exitflag(1),output(1)] =...
37        fmincon(@(x)f_sim_score(x,stg,mmf),startpoint(1,:),...
38        [],[],[],[],stg.lb,stg.ub,[],options);
39end
40
41% Save results
42rst.name = 'fmincon';
43rst.x = x;
44rst.fval = fval;
45rst.exitflag = exitflag;
46rst.output = output;
47end

f_opt_sa

 1function rst = f_opt_sa(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Set the starting point for the optimization
 7[startpoint,~] = f_opt_start(stg);
 8
 9% Get the optimization options from settings
10options = stg.sa_options;
11options.MaxTime = stg.optt;
12options.InitialTemperature = ones(1,stg.parnum)*2;
13
14% Display console messages if chosen in settings
15if stg.optcsl
16    options.Display = 'iter';
17end
18
19% Display plots if chosen in settings
20if stg.optplots
21    options.PlotFcn =...
22        {@saplotbestf,@saplottemperature,@saplotf,@saplotstopping,@saplotx};
23end
24
25% Optimize the model with multiple starting points if chosen in settings
26if stg.mst
27    parfor n = 1:stg.msts
28        disp(string(n))
29        [x(n,:),fval(n),exitflag(n),output(n)] =...
30            simulannealbnd(@(x)f_sim_score(x,stg,mmf),startpoint(n,:),...
31            stg.lb,stg.ub,options);
32    end
33    
34    % Optimize the model
35else
36    [x(1,:),fval(1),exitflag(1),output(1)] =...
37        simulannealbnd(@(x)f_sim_score(x,stg,mmf),startpoint(1,:),...
38        stg.lb,stg.ub,options);
39end
40
41% Save results
42rst.name = 'Simulated annealing';
43rst.x = x;
44rst.fval = fval;
45rst.exitflag = exitflag;
46rst.output = output;
47end

f_opt_psearch

 1function rst = f_opt_psearch(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Set the starting point for the optimization
 7[startpoint,~] = f_opt_start(stg);
 8
 9% Get the optimization options from settings
10options = stg.psearch_options;
11options.MaxTime = stg.optt;
12options.UseParallel = stg.optmc;
13
14% Display console messages if chosen in settings
15if stg.optcsl
16    options.Display = 'iter';
17end
18
19% Display plots if chosen in settings
20if stg.optplots
21    options.PlotFcn = ...
22        {@psplotbestf,@psplotfuncount,@psplotmeshsize,@psplotbestx};
23end
24
25% Optimize the model with multiple starting points if chosen in settings
26if stg.mst
27    parfor n = 1:stg.msts
28        disp(string(n))
29        [x(n,:),fval(n),exitflag(n),output(n)] =...
30            patternsearch(@(x)f_sim_score(x,stg,mmf),startpoint(n,:),[],[], ...
31            [],[],stg.lb,stg.ub,[],options);
32    end
33    
34    % Optimize the model
35else
36    [x(1,:),fval(1),exitflag(1),output(1)] =...
37        patternsearch(@(x)f_sim_score(x,stg,mmf),startpoint(1,:),[],[], ...
38        [],[],stg.lb,stg.ub,[],options);
39end
40
41% Save results
42rst.name = 'Pattern search';
43rst.x = x;
44rst.fval = fval;
45rst.exitflag = exitflag;
46rst.output = output;
47end

f_opt_ga

 1function rst = f_opt_ga(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Get the optimization options from settings
 7options = stg.ga_options;
 8options.MaxTime = stg.optt;
 9options.UseParallel = stg.optmc;
10options.PopulationSize = stg.popsize;
11
12% Display console messages if chosen in settings
13if stg.optcsl
14    options.Display = 'iter';
15end
16
17% Display plots if chosen in settings
18if stg.optplots
19    options.PlotFcn = {@gaplotbestf,...
20        @gaplotexpectation,@gaplotrange,@gaplotscorediversity,...
21        @gaplotstopping,@gaplotscores,@gaplotdistance,...
22        @gaplotselection};
23end
24
25% Set the starting population for the optimization
26[~,startpop] = f_opt_start(stg);
27options.InitialPopulationMatrix = startpop;
28
29% Optimize the model with multiple starting points if chosen in settings
30if stg.mst
31    parfor n = 1:stg.msts
32        disp(string(n))
33        [x(n,:),fval(n)] = ga(@(x)f_sim_score(x,stg,mmf),stg.parnum,...
34            [],[],[],[],stg.lb,stg.ub,[],options);
35    end
36    
37    % Optimize the model
38else
39    [x(1,:),fval(1)] = ga(@(x)f_sim_score(x,stg,mmf),stg.parnum,...
40        [],[],[],[],stg.lb,stg.ub,[],options);
41end
42
43% Save results
44rst.name = 'Genetic algorithm';
45rst.x = x;
46rst.fval = fval;
47rst.exitflag = [];
48rst.output = [];
49end

f_opt_pswarm

 1function rst = f_opt_pswarm(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Get the optimization options from settings
 7options = stg.pswarm_options;
 8options.MaxTime = stg.optt;
 9options.UseParallel = stg.optmc;
10options.SwarmSize = stg.popsize;
11
12% Display console messages if chosen in settings
13if stg.optcsl
14    options.Display = 'iter';
15end
16
17% Display plots if chosen in settings
18if stg.optplots
19    options.PlotFcn = @pswplotbestf;
20end
21
22% Set the starting population for the optimization
23[~,startpop] = f_opt_start(stg);
24options.InitialSwarmMatrix = startpop;
25
26% Optimize the model with multiple starting points if chosen in settings
27if stg.mst
28    parfor n = 1:stg.msts
29        disp(string(n))
30        [x(n,:),fval(n),exitflag(n),output(n)] =...
31            particleswarm(@(x)f_sim_score(x,stg,mmf),...
32            stg.parnum,stg.lb,stg.ub,options);
33    end
34    
35    % Optimize the model
36else
37    [x(1,:),fval(1),exitflag(1),output(1)] =...
38        particleswarm(@(x)f_sim_score(x,stg,mmf),...
39        stg.parnum,stg.lb,stg.ub,options);
40end
41
42% Save results
43rst.name = 'Particle swarm';
44rst.x = x;
45rst.fval = fval;
46rst.exitflag = exitflag;
47rst.output = output;
48end

f_opt_sopt

 1function rst = f_opt_sopt(stg,mmf)
 2
 3% Set the randomm seed for reproducibility
 4rng(stg.rseed);
 5
 6% Get the optimization options from settings
 7options = stg.sopt_options;
 8options.MaxTime = stg.optt;
 9options.UseParallel = stg.optmc;
10
11% Display console messages if chosen in settings
12if stg.optcsl
13    options.Display = 'iter';
14end
15
16% Display plots if chosen in settings
17if stg.optplots
18    options.PlotFcn = @surrogateoptplot;
19end
20
21% Set the starting population for the optimization
22[~,startpop] = f_opt_start(stg);
23options.InitialPoints = startpop;
24
25% Optimize the model with multiple starting points if chosen in settings
26if stg.mst
27    parfor n = 1:stg.msts
28        disp(string(n))
29        [x(n,:),fval(n),exitflag(n),output(n)] =...
30            surrogateopt(@(x)f_sim_score(x,stg,mmf),stg.lb,stg.ub,options);
31    end
32    
33    % Optimize the model
34else
35    [x(1,:),fval(1),exitflag(1),output(1)] =...
36        surrogateopt(@(x)f_sim_score(x,stg,mmf),stg.lb,stg.ub,options);
37end
38
39% Save results
40rst.name = 'Surrogate optimization';
41rst.x = x;
42rst.fval = fval;
43rst.exitflag = exitflag;
44rst.output = output;
45end

These functions call built in MATLAB® functions that perform parameter optimization . For furher information relating to how these optimizers work please follow the links to the MATLAB® documentation. Optimizers used:

Global Sensitivity Analysis

f_gsa

Code

1function rst = f_gsa(stg,mmf)
2
3rst = f_make_par_samples(stg);
4
5rst = f_make_output_sample(rst,stg,mmf);
6
7rst = f_calc_sensitivities(rst,stg);
8end

Calls the global sensitivity analysis functions in the correct order.

f_make_par_samples

Code

 1function rst= f_make_par_samples(stg)
 2% Code inspired by Geir Halnes et al. 2009 paper. (Halnes, Geir, et al. J.
 3% comp. neuroscience 27.3 (2009): 471.)
 4
 5% MAKE SAMPLE MATRICES
 6M1 = zeros(stg.sansamples, stg.parnum); % Pre-allocate memory for data
 7M2 = zeros(stg.sansamples, stg.parnum);
 8N = zeros(stg.sansamples, stg.parnum, stg.parnum);
 9rng(stg.rseed)
10
11% Create a distribution for each parameter acording to settings(Note that
12% the sampling is being performed in log space as the parameters and its
13% bounds are in log space)
14for i=1:stg.parnum
15    % Uniform distribution truncated at the parameter bounds
16    if stg.sasamplemode == 0
17        M1(:,i) = stg.lb(i) +...
18            (stg.ub(i)-stg.lb(i)).*rand(1,stg.sansamples);
19        M2(:,i) = stg.lb(i) +...
20            (stg.ub(i)-stg.lb(i)).*rand(1,stg.sansamples);
21        % Normal distribution with mu as the best value for a parameter and
22        % sigma as stg.sasamplesigma truncated at the parameter bounds
23    elseif stg.sasamplemode == 1
24        pd(i) = makedist('Normal','mu',stg.bestpa(i),...
25            'sigma',stg.sasamplesigma);
26        t(i) = truncate(pd(i),stg.lb(i),stg.ub(i));
27        r{i} = random(t(i),stg.sansamples,1);
28        r2{i} = random(t(i),stg.sansamples,1);
29        M1(:,i) = r{i};
30        M2(:,i) = r2{i};
31        % Same as 1 without truncation
32    elseif stg.sasamplemode == 2
33        pd(i) = makedist('Normal','mu',stg.bestpa(i),...
34            'sigma',stg.sasamplesigma);
35        r{i} = random(pd(i),stg.sansamples,1);
36        r2{i} = random(pd(i),stg.sansamples,1);
37        M1(:,i) = r{i};
38        M2(:,i) = r2{i};
39        % Normal distribution centered at the mean of the parameter bounds
40        % and sigma as stg.sasamplesigma truncated at the parameter bounds
41    elseif stg.sasamplemode == 3
42        pd(i) = makedist('Normal','mu',...
43            stg.lb(i) + (stg.ub(i)-stg.lb(i))/2,'sigma',stg.sasamplesigma);
44        t(i) = truncate(pd(i),stg.lb(i),stg.ub(i));
45        r{i} = random(t(i),stg.sansamples,1);
46        r2{i} = random(t(i),stg.sansamples,1);
47        M1(:,i) = r{i};
48        M2(:,i) = r2{i};
49        % Same as 3 without truncation.
50    elseif stg.sasamplemode == 4
51        pd(i) = makedist('Normal','mu',...
52            stg.lb(i) + (stg.ub(i)-stg.lb(i))/2,'sigma',stg.sasamplesigma);
53        r{i} = random(pd(i),stg.sansamples,1);
54        r2{i} = random(pd(i),stg.sansamples,1);
55        M1(:,i) = r{i};
56        M2(:,i) = r2{i};
57    end
58end
59
60for i=1:stg.parnum
61    % Replace the i:th column in M2 by the i:th column from M1 to obtain Ni
62    N(:,:,i) = M2;
63    N(:,i,i) = M1(:,i);
64end
65
66rst.M1=M1;
67rst.M2=M2;
68rst.N=N;

Creates parameter sets samples with specific parameter distributions that are used to perform the global sensitivity analysis.

Code inspired by Geir Halnes et al. 2009 paper.

f_make_output_sample

Code

 1function rst = f_make_output_sample(rst,stg,mmf)
 2% Code inspired by Geir Halnes et al. 2009 paper. (Halnes, Geir, et al. J.
 3% comp. neuroscience 27.3 (2009): 471.)
 4
 5nSamples = stg.sansamples;
 6nPars = stg.parnum;
 7parameter_array = zeros(nSamples,nPars);
 8progress = 1;
 9time_begin = datetime;
10D = parallel.pool.DataQueue;
11
12afterEach(D, @progress_track);
13
14for i=1:nSamples
15    parameter_array(i,:) = rst.M1(i,:);
16end
17
18parfor i=1:nSamples
19    [~,~,RM1(i)] = f_sim_score(parameter_array(i,:),stg,mmf);
20    send(D, "GSA M1 ");
21end
22disp("GSA M1 Runtime: " + string(datetime - time_begin) +...
23                "  All " + nSamples + " samples executed")
24
25
26for i=1:nSamples
27    fM1.sd(i,:) = [RM1(i).sd{:}];
28    fM1.se(i,:) = RM1(i).se(:);
29    fM1.st(i,:) = RM1(i).st;
30    fM1.xfinal(i,:) = [RM1(i).xfinal{:}];
31end
32
33rst.fM1 = fM1;
34clear a FM1
35
36for  i=1:nSamples
37    parameter_array(i,:)= rst.M2(i,:);
38end
39
40progress = 1;
41parfor i=1:nSamples
42    [~,~,RM2(i)] = f_sim_score(parameter_array(i,:),stg,mmf);
43    send(D, "GSA M2 ");
44end
45disp("GSA M2 Runtime: " + string(datetime - time_begin) +...
46                "  All " + nSamples + " samples executed")
47
48for i=1:nSamples
49    fM2.sd(i,:) = [RM2(i).sd{:}];
50    fM2.se(i,:) = RM2(i).se(:);
51    fM2.st(i,:) = RM2(i).st;
52    fM2.xfinal(i,:) = [RM2(i).xfinal{:}];
53end
54
55rst.fM2 = fM2;
56clear b FM2
57
58for i=1:nSamples
59    for j=1:nPars
60        parameter_array(i,:,j)= rst.N(i,:,j);
61    end
62end
63
64progress = 1;
65parfor i=1:nSamples
66    for j=1:nPars
67        [~,~,RN{i,j}] = f_sim_score(parameter_array(i,:,j),stg,mmf);
68    end
69    send(D, "GSA N  ");
70end
71disp("GSA N  Runtime: " + string(datetime - time_begin) +...
72                "  All " + nSamples + " samples executed")
73
74for i=1:nSamples
75    for j=1:nPars
76        fN.sd(i,:,j) = [RN{i,j}.sd{:}];
77        fN.se(i,:,j) = RN{i,j}.se(:);
78        fN.st(i,:,j) = RN{i,j}.st;
79        fN.xfinal(i,:,j) = [RN{i,j}.xfinal{:}];
80    end
81end
82
83rst.fN = fN;
84clear c FN
85
86    function progress_track(name)
87        progress = progress + 1;
88        if mod(progress,ceil(nSamples/10)) == 0 && progress ~= nSamples
89            disp(name + "Runtime: " + string(datetime - time_begin) +...
90                "  Samples:" + progress + "/" + nSamples)
91        end
92    end
93end

For each parameter set given in the matrices M1, M2, and N it runs the function f_sim_score generating new matrices fM1, fM2, and fN respectively.

Code inspired by Geir Halnes et al. 2009 paper.

f_calc_sensitivities

Code

  1function rst = f_calc_sensitivities(rst,stg)
  2
  3rst = remove_sim_error(rst,stg);
  4
  5[rst.SiQ,rst.SiTQ,rst.Si,rst.SiT] = bootstrap(rst,stg);
  6end
  7
  8function [SiQ,SiTQ,Si,SiT]=bootstrap(rst,stg)
  9% calculates confidence intervals.
 10fM1 = rst.fM1;
 11fM2 = rst.fM2;
 12fN = rst.fN;
 13
 14scores_names_list = ["sd","se","st","xfinal"];
 15
 16[Si,SiT] = bootstrap_h(fM1,fM2,fN,stg,scores_names_list);
 17
 18if (isempty(stg.gsabootstrapsize))
 19    stg.gsabootstrapsize=ceil(sqrt(size(fM1.sd)));
 20end%if
 21
 22fM1q = cell(stg.gsabootstrapsize,1);
 23fM2q = cell(stg.gsabootstrapsize,1);
 24fNq = cell(stg.gsabootstrapsize,1);
 25
 26parfor j=1:stg.gsabootstrapsize
 27    [SiQh{j},SiTQh{j}] = bootstrap_hq(fM1,fM2,fN,stg,scores_names_list,j);
 28end%parfor
 29
 30for n = 1:size(scores_names_list,2)
 31    for j=1:stg.gsabootstrapsize
 32        eval("SiQ." + scores_names_list(n) + "(j,:,:) = SiQh{j}." + scores_names_list(n) + ";")
 33        eval("SiTQ." + scores_names_list(n) + "(j,:,:) = SiTQh{j}." + scores_names_list(n) + ";")
 34    end
 35end
 36end%function
 37
 38function [Si,SiT] = bootstrap_h(fM1,fM2,fN,stg,scores_names)
 39for n = 1:size(scores_names,2)
 40    eval("fM1h=fM1." + scores_names(n) + ";");
 41    eval("fM2h=fM2." + scores_names(n) + ";");
 42    eval("fNh=fN." + scores_names(n) + ";");
 43    
 44    [Sih,SiTh] = calcSobolSaltelli(fM1h,fM2h,fNh,stg);
 45    
 46    eval("Si." + scores_names(n) + "=Sih;");
 47    eval("SiT." + scores_names(n) + "=SiTh;");
 48end
 49end
 50
 51function [Si,SiT] = bootstrap_hq(fM1,fM2,fN,stg,scores_names,j)
 52for n = 1:size(scores_names,2)
 53    eval("fM1h=fM1." + scores_names(n) + ";");
 54    eval("fM2h=fM2." + scores_names(n) + ";");
 55    eval("fNh=fN." + scores_names(n) + ";");
 56    
 57    rng(j*stg.rseed)
 58    I=ceil(rand(size(fM1h,1),1)*size(fM1h,1));
 59    fM1q = fM1h(I,:);
 60    fM2q = fM2h(I,:);
 61    fNq = fNh(I,:,:);
 62    
 63    [Sih,SiTh] = calcSobolSaltelli(fM1q,fM2q,fNq,stg);
 64    eval("Si." + scores_names(n) + "=Sih;");
 65    eval("SiT." + scores_names(n) + "=SiTh;");
 66end
 67end
 68
 69function [Si,SiT] = calcSobolSaltelli(fM1,fM2,fN,stg)
 70%Code inspired by Geir Halnes et al. 2009 paper. (Halnes, Geir, et al. J. comp. neuroscience 27.3 (2009): 471.)
 71
 72[Nsamples,Nvars,Npars]=size(fN);
 73
 74if(stg.sasubmean) % Makes the model more stable
 75    fM1 = fM1 - mean(fM1,1);
 76    fM2 = fM2 - mean(fM2,1);
 77    for i=1:Npars
 78        fN(:,:,i) =  fN(:,:,i) - mean(fN(:,:,i),1);
 79    end
 80end
 81
 82EY2 = mean(fM1.*fM2); % Valid definition (see Halnes et. al. Appendix)
 83VY = sum(fM1.^2)/(Nsamples-1) - EY2;
 84VYT = sum(fM2.^2)/(Nsamples-1) - EY2;
 85
 86Si = zeros(Nvars,Npars);
 87SiT= zeros(Nvars,Npars);
 88
 89for i=1:Npars
 90    Si(:,i) = (sum(fM1.*fN(:,:,i))/(Nsamples-1) - EY2)./VY;
 91    SiT(:,i) = 1 - (sum(fM2.*fN(:,:,i))/(Nsamples-1) - EY2)./VYT;
 92end
 93end
 94
 95function rst = remove_sim_error(rst,stg)
 96error=[];
 97error_helper=[];
 98
 99for n = 1:size(rst.fM1.sd,1)
100    if max(rst.fM1.sd(n,:)) == stg.errorscore
101        error = [error,n];
102    end
103    if max(rst.fM2.sd(n,:)) == stg.errorscore
104        error = [error,n];
105    end
106    for m = 1:stg.parnum
107        if max(rst.fN.sd(n,:,m)) == stg.errorscore
108            error_helper = 1;
109        end
110    end
111    if error_helper == 1
112        error = [error,n];
113    end
114    error_helper = 0;
115end
116
117error = unique(error);
118
119if ~isempty(error)
120    for n = size(error,2):-1:1
121        rst.fM1.sd(error(n),:) = [];
122        rst.fM2.sd(error(n),:) = [];
123        rst.fN.sd(error(n),:,:) = [];
124        
125        rst.fM1.se(error(n),:) = [];
126        rst.fM2.se(error(n),:) = [];
127        rst.fN.se(error(n),:,:) = [];
128        
129        rst.fM1.st(error(n),:) = [];
130        rst.fM2.st(error(n),:) = [];
131        rst.fN.st(error(n),:,:) = [];
132        
133        rst.fM1.xfinal(error(n),:) = [];
134        rst.fM2.xfinal(error(n),:) = [];
135        rst.fN.xfinal(error(n),:,:) = [];
136    end
137end
138end

Takes the matrices fM1, fM2, and fN and calculates sensitivity indexes. It calculates indexes based on the following oputputs of the f_sim_score function:

Code modified from the Geir Halnes et al. 2009 paper.