diff --git a/functions/EET_indices.m b/functions/EET_indices.m new file mode 100644 index 0000000..c5ef8ae --- /dev/null +++ b/functions/EET_indices.m @@ -0,0 +1,256 @@ +function [ mi, sigma, EE, mi_sd, sigma_sd, mi_lb, sigma_lb, mi_ub, sigma_ub, mi_all, sigma_all ] = EET_indices(r,xmin,xmax,X,Y,design_type,varargin) +% +% Compute the sensitivity indices according to the Elementary Effects Test +% (Saltelli, 2008) or 'method of Morris' (Morris, 1991). +% These are: the mean (mi) of the EEs associated to input 'i', +% which measures the input influence; and the standard deviation (sigma) +% of the EEs, which measures its level of interactions with other inputs. +% For the mean EE, we use the version suggested by Campolongo et al. +% (2007),where absolute values of the EEs are used (this is to avoid that +% EEs with opposite sign would cancel each other out). +% +% Basic usage: +% [mi, sigma, EE] = EET_indices(r,xmin,xmax,X,Y,design_type) +% +% Input: +% r = number of sampling point - scalar +% xmin = lower bounds of input ranges - vector (1,M) +% xmax = upper bounds of input ranges - vector (1,M) +% X = matrix of sampling datapoints where EE must be computed +% - matrix (r*(M+1),M) +% Y = associated output values - vector (r*(M+1),1) +% design_type = design type (string) +% [options: 'radial','trajectory'] +% Output: +% mi = mean of the elementary effects - vector (1,M) +% sigma = standard deviation of the elementary effects - vector (1,M) +% EE = matrix of elementary effects - matrix (r,M) +% +% +% Advanced usage: +% +% [mi, sigma, EE] = EET_indices(r,xmin,xmax,X,Y,design_type,Nboot) +% [mi, sigma, EE] = EET_indices(r,xmin,xmax,X,Y,design_type,Nboot,alfa) +% +% Optional input: +% Nboot = number of resamples used for boostrapping (default:0) +% alfa = significance level for the confidence intervals estimated +% by bootstrapping (default: 0.05) +% In this case, the output 'mi' and 'sigma' are the mean and standard +% deviation of the EEs averaged over Nboot resamples. +% +% Advanced usage/2: +% +% [mi, sigma, EE, mi_sd, sigma_sd, mi_lb, sigma_lb, mi_ub, sigma_ub] = ... +% EET_indices(r,xmin,xmax,X,Y,design_type,Nboot) +% +% Optional output: +% mi_sd = standard deviation of 'mi' across Nboot resamples +% sigma_sd = standard deviation of 'sigma' across Nboot resamples +% mi_lb = lower bound of 'mi' (at level alfa) across Nboot resamples +% sigma_lb = lower bound of 'sigma' across Nboot resamples +% mi_ub = upper bound of 'mi' (at level alfa) across Nboot resamples +% sigma_ub = upper bound of 'sigma' across Nboot resamples +% - all the above are +% vector (1,M) if Nboot>1 +% (empty vector otherwise) +% Or: +% +% [mi, sigma, EE, mi_sd, sigma_sd, mi_lb, sigma_lb, mi_ub, sigma_ub, ... +% mi_all, sigma_all ] = EET_indices(r,xmin,xmax,X,Y,design_type,Nboot) +% +% Optional output: +% mi_all = Nboot estimates of 'mi' - matrix (Nboot,M) +% sigma_all = Nboot estimates of 'sigma' - matrix (Nboot,M) +% +% NOTE: If the vector Y includes any NaN values, the function will +% identify them and exclude them from further computation. A Warning message +% about the number of discarded NaN elements (and hence the actual number +% of samples used for estimating mi and sigma) will be displayed. +% +% REFERENCES: +% +% Morris, M.D. (1991), Factorial sampling plans for preliminary +% computational experiments, Technometrics, 33(2). +% +% Saltelli, A., et al. (2008), Global Sensitivity Analysis, The Primer, +% Wiley. +% +% Campolongo, F., Cariboni, J., Saltelli, A. (2007), An effective +% screening design for sensitivity analysis of large models. Environ. Model. +% Softw. 22 (10), 1509-1518. + +% This function is part of the SAFE Toolbox by F. Pianosi, F. Sarrazin +% and T. Wagener at Bristol University (2015). +% SAFE is provided without any warranty and for non-commercial use only. +% For more details, see the Licence file included in the root directory +% of this distribution. +% For any comment and feedback, or to discuss a Licence agreement for +% commercial use, please contact: francesca.pianosi@bristol.ac.uk +% For details on how to cite SAFE in your publication, please see: +% bristol.ac.uk/cabot/resources/safe-toolbox/ + +%%%%%%%%%%%%%% +% Check inputs +%%%%%%%%%%%%%% + +if ~isscalar(r); error('''r'' must be a scalar'); end +if r<=0; error('''r'' must be positive' ); end +if abs(r-round(r)); error('''r'' must be integer'); end +[N,M] = size(xmin) ; +[n,m] = size(xmax) ; +if N~=1 ;error('''xmin'' must be a row vector'); end +if n~=1 ;error('''xmax'' must be a row vector'); end +if M~=m ;error('''xmin'' and ''xmax'' must be the same size'); end +Dr = xmax - xmin ; +if any(Dr<=0) + error('all components of ''xmax'' must be higher than the corresponding ones in ''xmin''') +end +[n,m] = size(X) ; +if n~=r*(M+1) ;error('''X'' must have r*(M+1) rows'); end +if m~=M ;error('''X'' must have M columns'); end +[n,m] = size(Y) ; +if n~=r*(M+1) ;error('''Y'' must have r*(M+1) rows'); end +if m~=1 ;error('''Y'' must be a column vector'); end +if ~ischar(design_type); error('''design_type'' must be a string'); end +if all(isnan(Y)); error('all data in ''Y'' are NaN'); end + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Recover and check optional inputs +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +if nargin<7 + Nboot=1; +else + Nboot=varargin{1}; + if ~isscalar(Nboot); error('''Nboot'' must be scalar'); end + if Nboot<0; error('''Nboot'' must be nonnegative (if 0, bootstrapping is not used)' ); end + if abs(Nboot-round(Nboot)); error('''Nboot'' must be an integer'); end +end +if nargin<8 + alfa=0.05; +else + alfa=varargin{2}; + if ~isscalar(alfa); error('''alfa'' must be scalar'); end + if any([alfa<0,alfa>1]); error('''alfa'' must be in [0,1]' ); end +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Compute Elementary Effects +%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +EE = nan(r,M) ; % matrix of elementary effects +k = 1 ; +ki = 1 ; +for i=1:r + for j=1:M + if strcmp(design_type,'radial') % radial design: EE is the difference + % between output at one point in the i-th block and output at + % the 1st point in the block + EE(i,j) = ( Y(k+1) - Y(ki) ) / ( X(k+1,j)-X(ki,j) )*Dr(j) ; + elseif strcmp(design_type,'trajectory') % trajectory design: EE is the difference + % between output at one point in the i-th block and output at + % the previous point in the block (the "block" is indeed a + % trajectory in the input space composed of points that + % differ in one component at the time) + idx = find( abs( X(k+1,:)-X(k,:) ) > 0 ); % if using 'morris' + % sampling, the points in the block may not + % be in the proper order, i.e. each point in the block differs + % from the previous/next one by one component but we don't know + % which one; this is here computed and saved in 'idx' + if isempty(idx) ; error('X(%d,:) and X(%d,:) are equal',[k,k+1]); end + if length(idx)>1 ; error('X(%d,:) and X(%d,:) differ in more than one component',[k,k+1]); end + EE(i,idx) = ( Y(k+1) - Y(k) ) / ( X(k+1,idx)-X(k,idx) )*Dr(idx) ; + else + error('''design_type'' must be one among {''radial'',''trajectory''}') + end + k=k+1 ; + end + k=k+1; + ki=k ; +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Compute Mean and Standard deviation +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +if Nboot>1 + + bootsize=r; + B = floor((rand(bootsize,Nboot)*r+1)); + + mi_all = nan(Nboot,M) ; + sigma_all= nan(Nboot,M) ; + idx_EE = nan(Nboot,M) ; + for n=1:Nboot + [mi_all(n,:),sigma_all(n,:),idx_EE(n,:)]=compute_indices(EE(B(:,n),:)); + end + + mi = mean(mi_all) ; + mi_sd = std(mi_all) ; + mi_lb = sort(mi_all) ; mi_lb = mi_lb(max(1,round(Nboot*alfa/2)),:) ; + mi_ub = sort(mi_all) ; mi_ub = mi_ub (round(Nboot*(1-alfa/2)),:) ; + + sigma = mean(sigma_all) ; + sigma_sd = std(sigma_all) ; + sigma_lb = sort(sigma_all) ; sigma_lb = sigma_lb(max(1,round(Nboot*alfa/2)),:) ; + sigma_ub = sort(sigma_all) ; sigma_ub = sigma_ub (round(Nboot*(1-alfa/2)),:) ; + + % Print to screen a warning message if any NAN was found in Y + if sum(isnan(Y)) + fprintf('\n WARNING:') + fprintf('\n%d NaNs were found in Y',sum(isnan(Y))) + fprintf('\nAverage number of samples that could be used to evaluate mean ') + fprintf('and standard deviation of elementary effects is:') + fprintf('\nX%d: %1.0f',[1:M;mean(idx_EE)]); + fprintf('\n') + end + % Print to screen the sensitivity indices +% fprintf('\n\t mean(EE) std(EE)\n'); +% fprintf('X%d:\t %2.3f\t %2.3f\n',[ 1:M; mi; sigma ]); +else + + [mi,sigma,idx_EE]=compute_indices(EE); + + mi_sd = [] ; + sigma_sd = [] ; + mi_lb = [] ; + sigma_lb = [] ; + mi_ub = [] ; + sigma_ub = [] ; + mi_all = [] ; + sigma_all= [] ; + + % Print to screen a warning message if any NAN was found in Y + if sum(isnan(Y)) + fprintf('\n WARNING:') + fprintf('\n%d NaNs were found in Y',sum(isnan(Y))) + fprintf('\nThe number of samples that could be used to evaluate mean ') + fprintf('and standard deviation of elementary effects is:') + fprintf('\nX%d: %1.0f',[1:M;idx_EE]); + fprintf('\n') + end + +% fprintf('\n\t mean(EE) std(EE)\n'); +% fprintf('X%d:\t %2.3f\t %2.3f\n',[ 1:M; mi; sigma ]); +end + +%%%% built-in function + +function [mi,sigma,idx_EE ] = compute_indices(EE) +% EE matrix (r,M) +[~,M]=size(EE); + +idx_EE= nan(1,M); +mi = nan(1,M); +sigma = nan(1,M); + +for j=1:M + nan_EE=isnan(EE(:,j));% find any NaN in EE + idx_EE(j)=sum(~nan_EE);% total number of NaNs in EE + mi(j) = mean(abs(EE(~nan_EE,j)));% mean absolute value of EE (excluding NaNs) + sigma(j)= std(EE(~nan_EE,j));% std of EE (excluding NaNs) +end + diff --git a/functions/FPD_SA_Off.m b/functions/FPD_SA_Off.m index a7f1285..dd77776 100644 --- a/functions/FPD_SA_Off.m +++ b/functions/FPD_SA_Off.m @@ -1,120 +1,120 @@ -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Fast Partial Differential-based Sensitivity Analysis (Matlab) -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Copyright (C) 2022 -classdef FPD_SA_Off - properties - Name = 'Off-line Broad Learning System'; - end - - %% Functions and algorithm - methods (Static = true) - function model = SA(model,SelTrainA,NumEech4SA,sigfun,ThetaSel) - %% parameter initial - NumFeaOri = model.NumPerWin * model.NumWindow; - NumEnhOri = model.NumEnhance; - EnhMatSelOri = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); - NumClass = length(model.Beta(1,:)); - if model.Step == 0 - AddFeaPDD = model.Beta(1:NumFeaOri,:); - AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); - AddEnhSel = EnhMatSelOri; - AddRelSel = []; - AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); - OriFeaAddEnhWei = []; - AddFeaRelWei = []; - AddRelPDD = []; - else - NumAddFea = model.NumAddFea; - NumAddRel = model.NumAddRel; - NumAddEnh = model.NumAddEnh; - AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); - AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); - AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); - AddFeaEnhWei = model.AllFeaAddEnhWei{model.Step}(end-NumAddFea:end-1,:); - OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.Step}(1:end-NumAddFea-1,:); - AddFeaRelWei = model.AddFeaRelWei{model.Step}(1:NumAddFea,:); - AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); - AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); - end - - %% select important nodes - SelectNeruonSet = []; - for z = 1:NumClass - % calculate partial differential - if strcmp(sigfun,'logsig') - DiagAddEnhAll = AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - try - DiagAddRelAll = AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - catch - DiagAddRelAll = []; - end - else - DiagAddEnhAll = 1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - try - DiagAddRelAll = 1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - catch - DiagAddRelAll = []; - end - end - DiagAddEnh = sqrt(sum(DiagAddEnhAll.^2,1)./NumEech4SA); - try - DiagAddRel = sqrt(sum(DiagAddRelAll.^2,1)./NumEech4SA); - catch - DiagAddRel = []; - end - if model.Step == 0 - AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; - model.FeaPD{z} = AddFeaPDD + AddFeaEnhPDID; - model.AllPD{z} = [model.FeaPD{z};AddEnhPDD]; - else - OriFeaPD = model.FeaPD{z} + OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; - AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; - AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; - AddFeaPD = AddFeaPDD + AddFeaEnhPDID + AddFeaRelPDID; - model.FeaPD{z} = [OriFeaPD;AddFeaPD]; - for k = 1:model.Step-1 - model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+NumAddFea,:)... - = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); - end - model.AllPD{z} = [model.AllPD{z} ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; - end - % import nodes to z-th class - [~,Max_index] = max(model.AllPD{z},[],2); - Max_index_{z}=find(Max_index==z); - AllPD_z = model.AllPD{z}(:,z) ; - -% %% method of interseciton line - [row_descend,index_temp] = sort(AllPD_z,"descend"); - row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); - [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); - - %% method of tangent line -% [row_descend,index_temp] = sort(abs(AllPD_z),"descend"); -% abs_index = 1:length(row_descend); -% [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); - - - if row_descend(ban_index)>0 - else - [~,ban_index] = min(row_descend(row_descend>0)); - end - Selected_row = index_temp(1:ban_index); - SelectNeurons = intersect(Selected_row,Max_index_{z}); - - % selec - if length(SelectNeurons) < ceil(length(row_descend)*ThetaSel) - SelectNeruonSet = SelectNeruonSet; - else - SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); - end - end - model.BanNodes = setdiff(index_temp,SelectNeruonSet); - end - end %methodq -end %class - - - - - +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Fast Partial Differential-based Sensitivity Analysis (Matlab) +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Copyright (C) 2022 +classdef FPD_SA_Off + properties + Name = 'Off-line Broad Learning System'; + end + + %% Functions and algorithm + methods (Static = true) + function model = SA(model,SelTrainA,NumEech4SA,sigfun,ThetaSel) + %% parameter initial + NumFeaOri = model.NumPerWin * model.NumWindow; + NumEnhOri = model.NumEnhance; + EnhMatSelOri = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); + NumClass = length(model.Beta(1,:)); + if model.Step == 0 + AddFeaPDD = model.Beta(1:NumFeaOri,:); + AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); + AddEnhSel = EnhMatSelOri; + AddRelSel = []; + AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); + OriFeaAddEnhWei = []; + AddFeaRelWei = []; + AddRelPDD = []; + else + NumAddFea = model.NumAddFea; + NumAddRel = model.NumAddRel; + NumAddEnh = model.NumAddEnh; + AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); + AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); + AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); + AddFeaEnhWei = model.AllFeaAddEnhWei{model.Step}(end-NumAddFea:end-1,:); + OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.Step}(1:end-NumAddFea-1,:); + AddFeaRelWei = model.AddFeaRelWei{model.Step}(1:NumAddFea,:); + AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); + AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); + end + + %% select important nodes + SelectNeruonSet = []; + for z = 1:NumClass + % calculate partial differential + if strcmp(sigfun,'logsig') + DiagAddEnhAll = AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + try + DiagAddRelAll = AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + catch + DiagAddRelAll = []; + end + else + DiagAddEnhAll = 1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + try + DiagAddRelAll = 1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + catch + DiagAddRelAll = []; + end + end + DiagAddEnh = sqrt(sum(DiagAddEnhAll.^2,1)./NumEech4SA); + try + DiagAddRel = sqrt(sum(DiagAddRelAll.^2,1)./NumEech4SA); + catch + DiagAddRel = []; + end + if model.Step == 0 + AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; + model.FeaPD{z} = AddFeaPDD + AddFeaEnhPDID; + model.AllPD{z} = [model.FeaPD{z};AddEnhPDD]; + else + OriFeaPD = model.FeaPD{z} + OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; + AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; + AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; + AddFeaPD = AddFeaPDD + AddFeaEnhPDID + AddFeaRelPDID; + model.FeaPD{z} = [OriFeaPD;AddFeaPD]; + for k = 1:model.Step-1 + model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+NumAddFea,:)... + = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); + end + model.AllPD{z} = [model.AllPD{z} ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; + end + % import nodes to z-th class + [~,Max_index] = max(model.AllPD{z},[],2); + Max_index_{z}=find(Max_index==z); + AllPD_z = model.AllPD{z}(:,z) ; + +% %% method of interseciton line + [row_descend,index_temp] = sort(AllPD_z,"descend"); + row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); + [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); + + %% method of tangent line +% [row_descend,index_temp] = sort(abs(AllPD_z),"descend"); +% abs_index = 1:length(row_descend); +% [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); + + + if row_descend(ban_index)>0 + else + [~,ban_index] = min(row_descend(row_descend>0)); + end + Selected_row = index_temp(1:ban_index); + SelectNeurons = intersect(Selected_row,Max_index_{z}); + + % selec + if length(SelectNeurons) < ceil(length(row_descend)*ThetaSel) + SelectNeruonSet = SelectNeruonSet; + else + SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); + end + end + model.BanNodes = setdiff(index_temp,SelectNeruonSet); + end + end %methodq +end %class + + + + + diff --git a/functions/FPD_SA_Online.m b/functions/FPD_SA_Online.m index a1cfbe8..cc04c86 100644 --- a/functions/FPD_SA_Online.m +++ b/functions/FPD_SA_Online.m @@ -1,166 +1,175 @@ -classdef FPD_SA_Online - properties - Name = 'Off-line Broad Learning System'; - - end - %% Functions and algorithm - methods (Static = true) - function model = SA(model,SelTrainA,NumEech4SA,sigfun,mode) - NumFeaOri = model.NumPerWin * model.NumWindow; - NumEnhOri = model.NumEnhance; - NumAddFea = model.NumAddFea; - NumAddRel = model.NumAddRel; - NumAddEnh = model.NumAddEnh; - NumClass = length(model.Beta(1,:)); - if model.AddNodeStep == 0 - AddFeaPDD = model.Beta(1:NumFeaOri,:); - AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); - AddEnhSel = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); - AddRelSel = []; - AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); - OriFeaAddEnhWei = []; - AddFeaRelWei = []; - AddRelPDD = []; - elseif strcmp(mode,'AN') - AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); - AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); - AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); - AddFeaEnhWei = model.AllFeaAddEnhWei{model.AddNodeStep}(end-NumAddFea:end-1,:); - OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.AddNodeStep}(1:end-NumAddFea-1,:); - AddFeaRelWei = model.AddFeaRelWei{model.AddNodeStep}(1:NumAddFea,:); - AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); - AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); - else - AddDataOriEnhPDD = model.Beta(NumFeaOri+1:NumFeaOri+NumEnhOri,:); - AddDataOriEnhSel = SelTrainA(:,NumFeaOri+1:NumFeaOri+NumEnhOri); - end - SelectNeruonSet = []; - if strcmp(mode,'AN') || model.AddNodeStep == 0 - for z = 1:NumClass - if strcmp(sigfun,'logsig') - DiagAddEnhAll = AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - try - DiagAddRelAll = AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - catch - DiagAddRelAll = []; - end - else - DiagAddEnhAll = 1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - try - DiagAddRelAll = 1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - catch - DiagAddRelAll = []; - end - end - DiagAddEnh = sqrt(sum(DiagAddEnhAll.^2,1)./NumEech4SA); - try - DiagAddRel = sqrt(sum(DiagAddRelAll.^2,1)./NumEech4SA); - catch - DiagAddRel = []; - end - if model.AddNodeStep == 0 - AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; - model.FeaPD{z} = AddFeaPDD + AddFeaEnhPDID; - model.AllPD{z} = [model.FeaPD{z};AddEnhPDD]; - else - OriFeaPD = model.FeaPD{z} + OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; - AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; - AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; - AddFeaPD = AddFeaPDD + AddFeaEnhPDID + AddFeaRelPDID; - model.FeaPD{z} = [OriFeaPD;AddFeaPD]; - for k = 1:model.AddNodeStep-1 - model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+NumAddFea,:)... - = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); - end - model.AllPD{z} = [model.AllPD{z} ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; - end - [~,Max_index] = max(model.AllPD{z},[],2); - Max_index_{z}=find(Max_index==z); - AllPD_z = model.AllPD{z}(:,z) ; - [row_descend,index_temp] = sort(abs(AllPD_z),"descend"); - [row_descend,index_temp] = sort(AllPD_z,"descend"); - row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); - [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); - - - %% method of tangent line -% abs_index = 1:length(row_descend); -% [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); - - - if row_descend(ban_index)>0 - else - [~,ban_index] = min(row_descend(row_descend>0)); - end - Selected_row = index_temp(1:ban_index); - SelectNeurons = intersect(Selected_row,Max_index_{z}); - SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); - end - else - for z = 1:NumClass - if strcmp(sigfun,'logsig') - DiagOriEnh = AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - else - DiagOriEnh = 1-AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - end - DiagOriEnh = sqrt(sum(DiagOriEnh.^2,1)./NumEech4SA); - OriFeaOriEnhPDID = model.FeaEnhWei(1:NumFeaOri,:) * diag(DiagOriEnh) * AddDataOriEnhPDD; - model.FeaPD{z}(1:NumFeaOri,:) = model.FeaPD{z}(1:NumFeaOri,:) + OriFeaOriEnhPDID; - for i = 1:model.AddNodeStep - AddDataAddRelSel = SelTrainA(:,NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel); - AddDataAddEnhSel = SelTrainA(:,NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+NumAddEnh); - if strcmp(sigfun,'logsig') - DiagAddStepRel = AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - DiagAddStepEnh = AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); - else - DiagAddStepRel = 1-AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - DiagAddStepEnh = 1-AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); - end - DiagAddRel = sqrt(sum(DiagAddStepRel.^2,1)./NumEech4SA); - DiagAddEnh = sqrt(sum(DiagAddStepEnh.^2,1)./NumEech4SA); - AddFeaRelWei = model.AddFeaRelWei{i}(1:NumAddFea,:); - OriFeaAddEnhWei = model.AllFeaAddEnhWei{i}(1:NumFeaOri+i*NumAddFea,:); - AddRelPDD = model.Beta(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel,:); - AddEnhPDD = model.Beta(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+NumAddEnh,:); - AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; - OriFeaAddEnhPDID = OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; - model.FeaPD{z}(1:NumFeaOri+i*NumAddFea,:) = model.FeaPD{z}(1:NumFeaOri+i*NumAddFea,:) + OriFeaAddEnhPDID; - model.FeaPD{z}(NumFeaOri+(i-1)*NumAddFea + 1:NumFeaOri+i*NumAddFea,:) = model.FeaPD{z}(NumFeaOri+(i-1)*NumAddFea + 1:NumFeaOri+i*NumAddFea,:) + AddFeaRelPDID; - end - model.AllPD{z}(1:NumFeaOri,:) = model.FeaPD{z}(1:NumFeaOri,:); - for k = 1:model.AddNodeStep - model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+NumAddFea,:)... - = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); - end - [~,Max_index] = max(model.AllPD{z},[],2); - Max_index_{z}=find(Max_index==z); - AllPD_z = model.AllPD{z}(:,z) ; - [row_descend,index_temp] = sort(AllPD_z,"descend"); - row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); - [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); - - - %% method of tangent line -% abs_index = 1:length(row_descend); -% [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); - - - - if row_descend(ban_index)>0 - else - [~,ban_index] = min(row_descend(row_descend>0)); - end - Selected_row = index_temp(1:ban_index); - SelectNeurons = intersect(Selected_row,Max_index_{z}); - SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); - end - end - model.BanNodes = setdiff(index_temp,SelectNeruonSet); - end - end %methodq -end %class - - - - - +classdef FPD_SA_Online + properties + Name = 'On-line SASO-BLS'; + + end + %% Functions and algorithm + methods (Static = true) + function model = SA(model,SelTrainA,NumEech4SA,sigfun,mode,SASO_version) + NumFeaOri = model.NumPerWin * model.NumWindow; + NumEnhOri = model.NumEnhance; + NumAddFea = model.NumAddFea; + NumAddRel = model.NumAddRel; + NumAddEnh = model.NumAddEnh; + NumClass = length(model.Beta(1,:)); + if model.AddNodeStep == 0 + AddFeaPDD = model.Beta(1:NumFeaOri,:); + AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); + AddEnhSel = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); + AddRelSel = []; + AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); + OriFeaAddEnhWei = []; + AddFeaRelWei = []; + AddRelPDD = []; + elseif strcmp(mode,'AN') + AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); + AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); + AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); + AddFeaEnhWei = model.AllFeaAddEnhWei{model.AddNodeStep}(end-NumAddFea:end-1,:); + OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.AddNodeStep}(1:end-NumAddFea-1,:); + AddFeaRelWei = model.AddFeaRelWei{model.AddNodeStep}(1:NumAddFea,:); + AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); + AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); + else + AddDataOriEnhPDD = model.Beta(NumFeaOri+1:NumFeaOri+NumEnhOri,:); + AddDataOriEnhSel = SelTrainA(:,NumFeaOri+1:NumFeaOri+NumEnhOri); + end + SelectNeruonSet = []; + if strcmp(mode,'AN') || model.AddNodeStep == 0 + for z = 1:NumClass + if strcmp(sigfun,'logsig') + DiagAddEnhAll = AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + try + DiagAddRelAll = AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + catch + DiagAddRelAll = []; + end + else + DiagAddEnhAll = 1-AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + try + DiagAddRelAll = 1-AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + catch + DiagAddRelAll = []; + end + end + DiagAddEnh = sqrt(sum(DiagAddEnhAll.^2,1)./NumEech4SA); + try + DiagAddRel = sqrt(sum(DiagAddRelAll.^2,1)./NumEech4SA); + catch + DiagAddRel = []; + end + if model.AddNodeStep == 0 + AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; + model.FeaPD{z} = AddFeaPDD + AddFeaEnhPDID; + model.AllPD{z} = [model.FeaPD{z};AddEnhPDD]; + else + OriFeaPD = model.FeaPD{z} + OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; + AddFeaEnhPDID = AddFeaEnhWei * diag(DiagAddEnh) * AddEnhPDD; + AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; + AddFeaPD = AddFeaPDD + AddFeaEnhPDID + AddFeaRelPDID; + model.FeaPD{z} = [OriFeaPD;AddFeaPD]; + for k = 1:model.AddNodeStep-1 + model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+NumAddFea,:)... + = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); + end + model.AllPD{z} = [model.AllPD{z} ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; + end + [~,Max_index] = max(model.AllPD{z},[],2); + Max_index_{z}=find(Max_index==z); + AllPD_z = model.AllPD{z}(:,z) ; + [row_descend,index_temp] = sort(abs(AllPD_z),'descend'); + + + if strcmp(SASO_version,'P') + % method of performance + [row_descend,index_temp] = sort(AllPD_z,'descend'); + row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); + [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); + elseif strcmp(SASO_version,'M') + % method of miniature + abs_index = 1:length(row_descend); + [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); + end + + + if row_descend(ban_index)>0 + else + [~,ban_index] = min(row_descend(row_descend>0)); + end + Selected_row = index_temp(1:ban_index); + SelectNeurons = intersect(Selected_row,Max_index_{z}); + SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); + end + else + for z = 1:NumClass + if strcmp(sigfun,'logsig') + DiagOriEnh = AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + else + DiagOriEnh = 1-AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataOriEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + end + DiagOriEnh = sqrt(sum(DiagOriEnh.^2,1)./NumEech4SA); + OriFeaOriEnhPDID = model.FeaEnhWei(1:NumFeaOri,:) * diag(DiagOriEnh) * AddDataOriEnhPDD; + model.FeaPD{z}(1:NumFeaOri,:) = model.FeaPD{z}(1:NumFeaOri,:) + OriFeaOriEnhPDID; + for i = 1:model.AddNodeStep + AddDataAddRelSel = SelTrainA(:,NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel); + AddDataAddEnhSel = SelTrainA(:,NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+NumAddEnh); + if strcmp(sigfun,'logsig') + DiagAddStepRel = AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + DiagAddStepEnh = AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .*(1-AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:)); + else + DiagAddStepRel = 1-AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataAddRelSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + DiagAddStepEnh = 1-AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:) .* AddDataAddEnhSel((z-1)*NumEech4SA+1:z*NumEech4SA,:); + end + DiagAddRel = sqrt(sum(DiagAddStepRel.^2,1)./NumEech4SA); + DiagAddEnh = sqrt(sum(DiagAddStepEnh.^2,1)./NumEech4SA); + AddFeaRelWei = model.AddFeaRelWei{i}(1:NumAddFea,:); + OriFeaAddEnhWei = model.AllFeaAddEnhWei{i}(1:NumFeaOri+i*NumAddFea,:); + AddRelPDD = model.Beta(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel,:); + AddEnhPDD = model.Beta(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(i-1)+NumAddFea+NumAddRel+NumAddEnh,:); + AddFeaRelPDID = AddFeaRelWei * diag(DiagAddRel) * AddRelPDD; + OriFeaAddEnhPDID = OriFeaAddEnhWei * diag(DiagAddEnh) * AddEnhPDD; + model.FeaPD{z}(1:NumFeaOri+i*NumAddFea,:) = model.FeaPD{z}(1:NumFeaOri+i*NumAddFea,:) + OriFeaAddEnhPDID; + model.FeaPD{z}(NumFeaOri+(i-1)*NumAddFea + 1:NumFeaOri+i*NumAddFea,:) = model.FeaPD{z}(NumFeaOri+(i-1)*NumAddFea + 1:NumFeaOri+i*NumAddFea,:) + AddFeaRelPDID; + end + model.AllPD{z}(1:NumFeaOri,:) = model.FeaPD{z}(1:NumFeaOri,:); + for k = 1:model.AddNodeStep + model.AllPD{z}(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddRel)*(k-1)+NumAddFea,:)... + = model.FeaPD{z}(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); + end + + [~,Max_index] = max(model.AllPD{z},[],2); + Max_index_{z}=find(Max_index==z); + + AllPD_z = model.AllPD{z}(:,z) ; + [row_descend,index_temp] = sort(AllPD_z,'descend'); + + if strcmp(SASO_version,'P') + + row_descend_line = sort(linspace(min(row_descend),max(row_descend),length(model.Beta(:,1))),'descend'); + [~,ban_index] = min(abs(row_descend_line(2:end-1) - row_descend(2:end-1)')); + elseif strcmp(SASO_version,'M') + % method of tangent line + abs_index = 1:length(row_descend); + [~,ban_index] = min(sqrt(row_descend'.^2+abs_index.^2)); + end + + + + if row_descend(ban_index)>0 + else + [~,ban_index] = min(row_descend(row_descend>0)); + end + Selected_row = index_temp(1:ban_index); + SelectNeurons = intersect(Selected_row,Max_index_{z}); + SelectNeruonSet = union(SelectNeruonSet,SelectNeurons); + end + end + model.BanNodes = setdiff(index_temp,SelectNeruonSet); + end + end %methodq +end %class + + + + + diff --git a/functions/OTAT_Off.m b/functions/OTAT_Off.m new file mode 100644 index 0000000..8b228c9 --- /dev/null +++ b/functions/OTAT_Off.m @@ -0,0 +1,66 @@ +classdef OTAT_Off + properties + Name = 'other sa method'; + + end + %% Functions and algorithm + methods (Static = true) + function model = OT_SA(model,SelTrainA,num_resample,method) + M = length(SelTrainA(1,:)); + xmin = min(SelTrainA); + xmax = max(SelTrainA); + SampStrategy = 'lhs' ; + DistrFun = 'unif' ; + DistrPar=cell(M,1); for i=1:M; DistrPar{i}=[xmin(i) xmax(i)]; end + if any(strcmpi(method,{'SV_SA','GV_SA'})) + tau = 0.00001; + X = AAT_sampling(SampStrategy,M,DistrFun,DistrPar,2*num_resample); + [ XA, XB, XC ] = vbsa_resampling(X) ; + YA = XA * model.Beta ; % size (num_resample,10) + YB = XB * model.Beta ; % size (num_resample,10) + YC =XC * model.Beta ; % size (num_resample*M,10) + for i = 1:length(YA(1,:)) + [ Si(:,i), STi(:,i)] = vbsa_indices(YA(:,i),YB(:,i),YC(:,i)); + end + if strcmp(method,'SV_SA') + S_matrix = Si; + elseif strcmp(method,'GV_SA') + S_matrix = STi; + end + elseif strcmp(method,'EET_SA') + design_type='trajectory'; + tau = 0.001; + X = OAT_sampling(num_resample,M,DistrFun,DistrPar,SampStrategy,design_type); + Y = X * model.Beta; + for i =1:length(Y(1,:)) + Yi = Y(:,i); + [ S_matrix(:,i), ~ ] = EET_indices(num_resample,xmin,xmax,X,Yi,design_type); + end + end + FyFeature = max(S_matrix,[],2); + [FyFeature_sort, Sort_index] = sort(FyFeature,'descend'); + DeltaFeatureFy = zeros(length(FyFeature_sort)-1,1); + for i = 1:(length(FyFeature_sort)-1) + DeltaFeatureFy(i) = FyFeature_sort(i)-FyFeature_sort(i+1); + end + MeanFeatureFy = mean(DeltaFeatureFy); + Condition1 = DeltaFeatureFy./FyFeature_sort(1:end-1); + % condition 1 + Condition1_judge = (Condition1>tau); + SelectIndex1 = find(Condition1_judge==1); + SelectNeuron1 = Sort_index(SelectIndex1); + % condition 2 + Condition2_judge = (FyFeature_sort>1*MeanFeatureFy) ; + SelectIndex2 = find(Condition2_judge==1); + SelectNeuron2 = Sort_index(SelectIndex2); + % union + SelectNeruonSet = intersect(SelectNeuron1,SelectNeuron2); + model.BanNodes = setdiff(Sort_index,SelectNeruonSet); + end + end %methodq +end %class + + + + + diff --git a/functions/PD_TSA_Off.m b/functions/PD_TSA_Off.m index b29b4f7..414e1f8 100644 --- a/functions/PD_TSA_Off.m +++ b/functions/PD_TSA_Off.m @@ -1,113 +1,112 @@ -classdef PD_TSA_Off - properties - Name = 'Off-line Broad Learning System'; - - end - %% Functions and algorithm - methods (Static = true) - function model = TSA(model,SelTrainA,sigfun) - NumFeaOri = model.NumPerWin * model.NumWindow; - NumEnhOri = model.NumEnhance; - EnhMatSelOri = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); - NumClass = length(model.Beta(1,:)); - if model.Step == 0 - AddFeaPDD = model.Beta(1:NumFeaOri,:); - AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); - AddEnhSel = EnhMatSelOri; - AddRelSel = []; - AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); - OriFeaAddEnhWei = []; - AddFeaRelWei = []; - AddRelPDD = []; - - AddFeaEnhPDID3 = zeros(NumFeaOri,NumClass,length(SelTrainA(:,1))); - AddFeaRelPDID3 = []; - - else - NumAddFea = model.NumAddFea; - NumAddRel = model.NumAddRel; - NumAddEnh = model.NumAddEnh; - AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); - AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); - AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); - AddFeaEnhWei = model.AllFeaAddEnhWei{model.Step}(end-NumAddFea:end-1,:); - OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.Step}(1:end-NumAddFea-1,:); - AddFeaRelWei = model.AddFeaRelWei{model.Step}(1:NumAddFea,:); - AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); - AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); - - AddFeaEnhPDID3 = zeros(NumAddEnh,NumClass,length(SelTrainA(:,1))); - AddFeaRelPDID3 = zeros(NumAddRel,NumClass,length(SelTrainA(:,1))); - end - for z = 1:length(SelTrainA(:,1)) - if strcmp(sigfun,'logsig') - DiagAddEnh = diag(AddEnhSel(z,:) .*(1-AddEnhSel(z,:))); - try - DiagAddRel = diag(AddRelSel(z,:) .*(1-AddRelSel(z,:))); - catch - DiagAddRel = []; - end - else - DiagAddEnh = diag(1-AddEnhSel(z,:) .* AddEnhSel(z,:)); - try - DiagAddRel = diag(1-AddRelSel(z,:) .* AddRelSel(z,:)); - catch - DiagAddRel = []; - end - end - if model.Step == 0 - AddFeaEnhPDID3(:,:,z) = AddFeaEnhWei * DiagAddEnh * AddEnhPDD; - else - AddFeaEnhPDID3(:,:,z) = AddFeaEnhWei * DiagAddEnh * AddEnhPDD; - AddFeaRelPDID3(:,:,z) = AddFeaRelWei * DiagAddRel * AddRelPDD; - end - end - - - if model.Step == 0 - FeaEnhSMatrixInDSel= sqrt(sum(AddFeaEnhPDID3.^2,3)./length(SelTrainA(:,1))); - else - FeaEnhSMatrixInDSel = sqrt(sum(AddFeaEnhPDID3.^2,3)./length(SelTrainA(:,1))); - RelEnhanSMatrixInDSel = sqrt(sum(AddFeaRelPDID3.^2,3)./length(SelTrainA(:,1))); - end - - if model.Step == 0 - model.FeaPD = FeaEnhSMatrixInDSel + AddFeaPDD; - model.AllPD = [model.FeaPD;AddEnhPDD]; - else - OriFeaPD = model.FeaPD + OriFeaAddEnhWei * DiagAddEnh * AddEnhPDD; - AddFeaPD = AddFeaPDD + FeaEnhSMatrixInDSel + RelEnhanSMatrixInDSel; - model.FeaPD = [OriFeaPD;AddFeaPD]; - for k = 1:model.Step-1 - model.AllPD(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+NumAddFea,:)... - = model.FeaPD(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); - end - model.AllPD = [model.AllPD ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; - end - FyFeature = max(model.AllPD,[],2); - [FyFeature_sort, Sort_index] = sort(FyFeature,'descend'); - DeltaFeatureFy = zeros(length(FyFeature_sort)-1,1); - for i = 1:(length(FyFeature_sort)-1) - DeltaFeatureFy(i) = FyFeature_sort(i)-FyFeature_sort(i+1); - end - MeanFeatureFy = mean(DeltaFeatureFy); - - Condition1 = DeltaFeatureFy./FyFeature_sort(1:end-1); - Condition1_judge = (Condition1>0.001); - SelectIndex1 = find(Condition1_judge==1); - SelectNeuron1 = Sort_index(SelectIndex1); - - Condition2_judge = (FyFeature_sort>1*MeanFeatureFy) ; - SelectIndex2 = find(Condition2_judge==1); - SelectNeuron2 = Sort_index(SelectIndex2); - - SelectNeruonSet = intersect(SelectNeuron1,SelectNeuron2); - model.BanNodes = setdiff(Sort_index,SelectNeruonSet); - end - end %methodq -end %class - - - - - +classdef PD_TSA_Off + properties + Name = 'Traditional SA method'; + + end + %% Functions and algorithm + methods (Static = true) + function model = TSA(model,SelTrainA,sigfun) + NumFeaOri = model.NumPerWin * model.NumWindow; + NumEnhOri = model.NumEnhance; + EnhMatSelOri = SelTrainA(:,NumFeaOri + 1:NumFeaOri+NumEnhOri); + NumClass = length(model.Beta(1,:)); + if model.Step == 0 + AddFeaPDD = model.Beta(1:NumFeaOri,:); + AddEnhPDD = model.Beta(NumFeaOri + 1:NumFeaOri+NumEnhOri,:); + AddEnhSel = EnhMatSelOri; + AddRelSel = []; + AddFeaEnhWei = model.FeaEnhWei(1:NumFeaOri,:); + OriFeaAddEnhWei = []; + AddFeaRelWei = []; + AddRelPDD = []; + + AddFeaEnhPDID3 = zeros(NumFeaOri,NumClass,length(SelTrainA(:,1))); + AddFeaRelPDID3 = []; + + else + NumAddFea = model.NumAddFea; + NumAddRel = model.NumAddRel; + NumAddEnh = model.NumAddEnh; + AddFeaPDD = model.Beta(end - (NumAddFea+NumAddEnh+NumAddRel) + 1:end - (NumAddEnh+NumAddRel),:); + AddEnhSel = SelTrainA(:,end-NumAddEnh+1:end); + AddRelSel = SelTrainA(:,end-NumAddRel-NumAddEnh+1:end-NumAddEnh); + AddFeaEnhWei = model.AllFeaAddEnhWei{model.Step}(end-NumAddFea:end-1,:); + OriFeaAddEnhWei = model.AllFeaAddEnhWei{model.Step}(1:end-NumAddFea-1,:); + AddFeaRelWei = model.AddFeaRelWei{model.Step}(1:NumAddFea,:); + AddRelPDD = model.Beta(end - NumAddRel - NumAddEnh+1:end -NumAddEnh,:); + AddEnhPDD = model.Beta(end-NumAddEnh+1:end,:); + + AddFeaEnhPDID3 = zeros(NumAddEnh,NumClass,length(SelTrainA(:,1))); + AddFeaRelPDID3 = zeros(NumAddRel,NumClass,length(SelTrainA(:,1))); + end + for z = 1:length(SelTrainA(:,1)) + if strcmp(sigfun,'logsig') + DiagAddEnh = diag(AddEnhSel(z,:) .*(1-AddEnhSel(z,:))); + try + DiagAddRel = diag(AddRelSel(z,:) .*(1-AddRelSel(z,:))); + catch + DiagAddRel = []; + end + else + DiagAddEnh = diag(1-AddEnhSel(z,:) .* AddEnhSel(z,:)); + try + DiagAddRel = diag(1-AddRelSel(z,:) .* AddRelSel(z,:)); + catch + DiagAddRel = []; + end + end + if model.Step == 0 + AddFeaEnhPDID3(:,:,z) = AddFeaEnhWei * DiagAddEnh * AddEnhPDD; + else + AddFeaEnhPDID3(:,:,z) = AddFeaEnhWei * DiagAddEnh * AddEnhPDD; + AddFeaRelPDID3(:,:,z) = AddFeaRelWei * DiagAddRel * AddRelPDD; + end + end + + + if model.Step == 0 + FeaEnhSMatrixInDSel= sqrt(sum(AddFeaEnhPDID3.^2,3)./length(SelTrainA(:,1))); + else + FeaEnhSMatrixInDSel = sqrt(sum(AddFeaEnhPDID3.^2,3)./length(SelTrainA(:,1))); + RelEnhanSMatrixInDSel = sqrt(sum(AddFeaRelPDID3.^2,3)./length(SelTrainA(:,1))); + end + + if model.Step == 0 + model.FeaPD = FeaEnhSMatrixInDSel + AddFeaPDD; + model.AllPD = [model.FeaPD;AddEnhPDD]; + else + OriFeaPD = model.FeaPD + OriFeaAddEnhWei * DiagAddEnh * AddEnhPDD; + AddFeaPD = AddFeaPDD + FeaEnhSMatrixInDSel + RelEnhanSMatrixInDSel; + model.FeaPD = [OriFeaPD;AddFeaPD]; + for k = 1:model.Step-1 + model.AllPD(NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+1:NumFeaOri+NumEnhOri+(NumAddFea+NumAddRel+NumAddEnh)*(k-1)+NumAddFea,:)... + = model.FeaPD(NumFeaOri+NumAddFea*(k-1)+1:NumFeaOri+NumAddFea*k,:); + end + model.AllPD = [model.AllPD ; AddFeaPD ; AddRelPDD ; AddEnhPDD]; + end + FyFeature = max(model.AllPD,[],2); + [FyFeature_sort, Sort_index] = sort(FyFeature,'descend'); + DeltaFeatureFy = zeros(length(FyFeature_sort)-1,1); + for i = 1:(length(FyFeature_sort)-1) + DeltaFeatureFy(i) = FyFeature_sort(i)-FyFeature_sort(i+1); + end + MeanFeatureFy = mean(DeltaFeatureFy); + Condition1 = DeltaFeatureFy./FyFeature_sort(1:end-1); + Condition1_judge = (Condition1>0.001); + SelectIndex1 = find(Condition1_judge==1); + SelectNeuron1 = Sort_index(SelectIndex1); + + Condition2_judge = (FyFeature_sort>1*MeanFeatureFy) ; + SelectIndex2 = find(Condition2_judge==1); + SelectNeuron2 = Sort_index(SelectIndex2); + + SelectNeruonSet = intersect(SelectNeuron1,SelectNeuron2); + model.BanNodes = setdiff(Sort_index,SelectNeruonSet); + end + end %methodq +end %class + + + + + diff --git a/functions/SASO_Class.m b/functions/SASO_Class.m index 32447f4..9e51b0e 100644 --- a/functions/SASO_Class.m +++ b/functions/SASO_Class.m @@ -30,7 +30,6 @@ InitMed % Model initializaiton method BanNodes % List of baned nodes Step % Incremental step - NormMethod % Normlization method FeaPD % Partial differential of feature nodes AllPD % Partial differential of all nodes end @@ -38,7 +37,7 @@ %% Functions and algorithm methods %% Functions and algorithm - function Obj = SASO_Class(NumPerWin,NumWindow,NumEnhance,NumAddFea,NumAddRel,NumAddEnh,ShrScale,L2Param,BanIndex,Step,sigfun,InitMed,NormMethod) + function Obj = SASO_Class(NumPerWin,NumWindow,NumEnhance,NumAddFea,NumAddRel,NumAddEnh,ShrScale,L2Param,BanIndex,Step,sigfun,InitMed) Obj.NumPerWin = NumPerWin; Obj.NumWindow = NumWindow; Obj.NumEnhance = NumEnhance; @@ -50,8 +49,7 @@ Obj.sigfun = sigfun; Obj.InitMed = InitMed; Obj.BanNodes = BanIndex; - Obj.Step = Step; - Obj.NormMethod = NormMethod; + Obj.Step = Step; end %% Train function diff --git a/functions/SASO_Class_online.m b/functions/SASO_Class_online.m index 1556911..f7a1e92 100644 --- a/functions/SASO_Class_online.m +++ b/functions/SASO_Class_online.m @@ -1,409 +1,408 @@ -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Fast Sensitivity Analysis Based Online Self-Organizing Broad Learning System (Matlab) -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% Copyright (C) 2022 - -classdef SASO_Class_online - properties - Name = 'Increamental Broad Learning System'; - - NumPerWin % Feature nodes per window - NumWindow % Number of windows of feature nodes - NumEnhance % Number of enhancement nodes - NumAddFea % Number of feature nodes per increment step - NumAddRel % Number of enhancement nodes related to the incremental feature nodes per increment step - NumAddEnh % Number of enhancement nodes in each incremental learning - IncStep % Steps of incremental learning - ShrScale % The shrinkage scale of the enhancement nodes - RelScale % The shrinkage scale of the related added enhancement nodes - L2Param % The L2 regularization parameter - SpaInpFeaWei % Sparce weight matrix from input to feature layer - NormFeaTot = struct % Normlization setting of feature layer - FeaEnhWei % Weight matrix from feature layer to enhance layer - Beta % Weight matrix from feature layer and enhance layer to output - TotFeaSpa % total feature matrix - A_Matrix_Train % State matrix - AllFeaAddEnhWei % Weight from all feature nodes to added enhancement nodes - AddFeaRelWei % Weight from added featrue nodes to related enhancement nodes - AddEnhScale % The shrinkage scale of the added enhancement nodes - A_Inverse % Inverse of state matrix - sigfun % Activation funciton - InitMed % Model initializaiton method - BanNodes % List of baned nodes - FeaPD % Partial differential of feature nodes - AllPD % Partial differential of all nodes - AddNodeStep % Nodes add step - AddDataStep % Data add step - end - %% Functions and algorithm - methods - %% Functions and algorithm - function Obj = SASO_Class_online(NumPerWin,NumWindow,NumEnhance,NumAddFea,NumAddRel,NumAddEnh,ShrScale,L2Param,BanIndex,AddNodeStep,AddDataStep,sigfun,InitMed) - Obj.NumPerWin = NumPerWin; - Obj.NumWindow = NumWindow; - Obj.NumEnhance = NumEnhance; - Obj.NumAddFea = NumAddFea; - Obj.NumAddRel = NumAddRel; - Obj.NumAddEnh = NumAddEnh; - Obj.ShrScale = ShrScale; - Obj.L2Param = L2Param; - Obj.sigfun = sigfun; - Obj.InitMed = InitMed; - Obj.BanNodes = BanIndex; - Obj.AddNodeStep = AddNodeStep; - Obj.AddDataStep = AddDataStep; - end - %% BLS train function - function Obj = Train(Obj, Input, Target) - %% feature nodes - Input = zscore(Input')'; - InputMat = [Input .1 * ones(size(Input,1),1)]; %加了一列偏置后的输入 - Obj.TotFeaSpa=zeros(size(Input,1),Obj.NumWindow * Obj.NumPerWin); - for i = 1:Obj.NumWindow - InpFeaWei = MyClassTools.IntialMed(size(Input,2)+1,Obj.NumPerWin,Obj.InitMed); - FeaPerWin = mapminmax(InputMat * InpFeaWei); - Obj.SpaInpFeaWei{i} = Obj.Sparse_bls(FeaPerWin,InputMat,1e-3,50)'; - Fea_Temp = InputMat * Obj.SpaInpFeaWei{i}; - [Fea_Temp,NormFea] = mapminmax(Fea_Temp',0,1); - NormTotTemp(i) = NormFea; - Obj.TotFeaSpa(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i) = Fea_Temp'; - end - Obj.NormFeaTot = NormTotTemp; -% clear FeaPerWin InputMat input Fea_Temp; - %% enhancement nodes - FeaMat = [Obj.TotFeaSpa .1 * ones(size(Obj.TotFeaSpa,1),1)]; - if Obj.NumPerWin * Obj.NumWindow >= Obj.NumEnhance %正交化 - Obj.FeaEnhWei = orth(MyClassTools.IntialMed(Obj.NumWindow * Obj.NumPerWin + 1,Obj.NumEnhance,Obj.InitMed)); - else - Obj.FeaEnhWei = orth(MyClassTools.IntialMed(Obj.NumWindow * Obj.NumPerWin + 1,Obj.NumEnhance,Obj.InitMed)')'; - - end - Enhance = FeaMat * Obj.FeaEnhWei; - clear FeaMat; - Obj.ShrScale = Obj.ShrScale / max(max(Enhance)); - if strcmp(Obj.sigfun,'logsig') - Enhance = logsig(Enhance * Obj.ShrScale); - else - Enhance = tansig(Enhance * Obj.ShrScale); - end - Obj.A_Matrix_Train = [Obj.TotFeaSpa Enhance]; - clear Enhance; - Obj.A_Inverse = (Obj.A_Matrix_Train' * Obj.A_Matrix_Train + eye(size(Obj.A_Matrix_Train',1)) * (Obj.L2Param)) \ ( Obj.A_Matrix_Train'); - Obj.Beta = Obj.A_Inverse * Target; - end - - %% BLS incremtal function - function Obj = DataIncBLS(Obj,AddInput,AllTarget) - AddInput = zscore(AddInput')'; - AddInputMat = [AddInput .1 * ones(size(AddInput,1),1)]; - AddInputOriFea_Tot = []; - for i = 1:Obj.NumWindow+Obj.AddNodeStep - AddInputFea_Temp = AddInputMat * Obj.SpaInpFeaWei{i}; - AddInputFea_Temp = mapminmax('apply',AddInputFea_Temp',Obj.NormFeaTot(i))'; - AddInputOriFea_Tot = [AddInputOriFea_Tot AddInputFea_Temp]; - end - Obj.TotFeaSpa = [Obj.TotFeaSpa;AddInputOriFea_Tot]; - - if Obj.AddNodeStep == 0 - AddFeaMat = [AddInputOriFea_Tot .1 * ones(size(AddInputOriFea_Tot,1),1)]; - AddEnhance = AddFeaMat * Obj.FeaEnhWei; - if strcmp(Obj.sigfun,'logsig') - AddEnhance = logsig(AddEnhance * Obj.ShrScale); - elseif strcmp(Obj.sigfun,'tansig') - AddEnhance = tansig(AddEnhance * Obj.ShrScale); - end - A_Matrix_Add = [AddInputOriFea_Tot AddEnhance]; - else - AddInputOriFea = AddInputOriFea_Tot(:,1:Obj.NumWindow*Obj.NumPerWin); - AddInputOriFeaMat = [AddInputOriFea .1 * ones(size(AddInputOriFea_Tot,1),1)]; - AddInputOriEnh = AddInputOriFeaMat * Obj.FeaEnhWei; - if strcmp(Obj.sigfun,'logsig') - AddInputOriEnh = logsig(AddInputOriEnh * Obj.ShrScale); - elseif strcmp(Obj.sigfun,'tansig') - AddInputOriEnh = tansig(AddInputOriEnh * Obj.ShrScale); - end - A_Matrix_Add = [AddInputOriFea AddInputOriEnh]; - for j = 1:Obj.AddNodeStep - AddInputAddFea = AddInputOriFea_Tot(:,Obj.NumWindow*Obj.NumPerWin+(j-1)*Obj.NumAddFea+1:Obj.NumWindow*Obj.NumPerWin+j*Obj.NumAddFea); - AddInputAddFeaMat = [AddInputAddFea .1 * ones(size(AddInputAddFea,1),1)]; - AddInputRel = AddInputAddFeaMat * Obj.AddFeaRelWei{j}; - if strcmp(Obj.sigfun,'logsig') - AddInputRel = logsig(AddInputRel * Obj.RelScale(j)); - else - AddInputRel = tansig(AddInputRel * Obj.RelScale(j)); - end - AddInputOriFea = AddInputOriFea_Tot(:,1:Obj.NumWindow*Obj.NumPerWin+j*Obj.NumAddFea); - AddInputAllFeaMat = [AddInputOriFea .1 * ones(size(AddInputAddFea,1),1)]; - AddInputAddEnh = AddInputAllFeaMat * Obj.AllFeaAddEnhWei{j}; - if strcmp(Obj.sigfun,'logsig') - AddInputAddEnh = logsig(AddInputAddEnh * Obj.AddEnhScale(j)); - else - AddInputAddEnh = tansig(AddInputAddEnh * Obj.AddEnhScale(j)); - end - A_Matrix_Add = [A_Matrix_Add,AddInputAddFea,AddInputRel,AddInputAddEnh]; - end - - end - AddA_Inverse = (A_Matrix_Add' * A_Matrix_Add+eye(size(A_Matrix_Add',1)) * (Obj.L2Param)) \ ( A_Matrix_Add' ); - Obj.A_Inverse = [Obj.A_Inverse AddA_Inverse]; - Obj.Beta = Obj.A_Inverse * AllTarget; - Obj.A_Matrix_Train = [Obj.A_Matrix_Train;A_Matrix_Add]; - end - - %% BLS test function - function output = GetOutput(Obj,Data) - Data = zscore(Data')'; - InpMat = [Data .1 * ones(size(Data,1),1)]; - AllFea = zeros(size(Data,1),Obj.NumWindow * Obj.NumPerWin); - clear Data - for i=1:Obj.NumWindow - FeatTemp = InpMat * Obj.SpaInpFeaWei{i}; - FeatTemp = mapminmax('apply',FeatTemp',Obj.NormFeaTot(i))'; - AllFea(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i)=FeatTemp; - end - clear FeatTemp; - if Obj.AddNodeStep == 0 - FeaMat = [AllFea .1 * ones(size(AllFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AllEnh = logsig(FeaMat * Obj.FeaEnhWei * Obj.ShrScale); - else - AllEnh = tansig(FeaMat * Obj.FeaEnhWei * Obj.ShrScale); - end - AMatrix = [AllFea AllEnh]; - clear FeaMat; - clear AllEnh; - output = AMatrix * Obj.Beta; - else - OriFea = AllFea; - OriFeaMat = [OriFea .1 * ones(size(AllFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AMatrix = [OriFea logsig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale)]; - else - AMatrix = [OriFea tansig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale)]; - end - clear OriFeaMat AllFea - for j=1:Obj.AddNodeStep - AddFea = InpMat * Obj.SpaInpFeaWei{Obj.NumWindow+j}; - AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(Obj.NumWindow + j))'; - if isempty(AddFea) - AddRel = []; - else - AddFeaMat = [AddFea .1 * ones(size(AddFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AddRel = logsig(AddFeaMat * Obj.AddFeaRelWei{j} * Obj.RelScale(j)); - else - AddRel = tansig(AddFeaMat * Obj.AddFeaRelWei{j} * Obj.RelScale(j)); - end - end - OriFea = [OriFea AddFea]; - FeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AddEnh = logsig(FeaMat * Obj.AllFeaAddEnhWei{j} * Obj.AddEnhScale(j)); - else - AddEnh = tansig(FeaMat * Obj.AllFeaAddEnhWei{j} * Obj.AddEnhScale(j)); - end - AddTot = [AddFea AddRel AddEnh]; - AMatrix = [AMatrix AddTot]; - end - clear AddTot AllFea AddFea AddRel AddEnh FeaMat AddFeaMat InpMat - output = AMatrix * Obj.Beta; - end - end - - %% Nodes incremental BLS - function Obj = IncBLS(Obj,Input,Target) - Input = zscore(Input')'; - InputMat = [Input .1 * ones(size(Input,1),1)]; - AddInpFeaWei = MyClassTools.IntialMed(size(Input,2)+1,Obj.NumAddFea,Obj.InitMed); - - AddFeature = mapminmax(InputMat * AddInpFeaWei); -% clear AddInpFeaWei Input; - AddSapInpFeaWei = Obj.Sparse_bls(AddFeature,InputMat,1e-3,50)'; - Obj.SpaInpFeaWei{Obj.NumWindow + Obj.AddNodeStep} = AddSapInpFeaWei; -% clear AddFeature; - - AddFeaSpa = InputMat * AddSapInpFeaWei; -% clear InputMat - - [AddFeaSpa,AddNormFea] = mapminmax(AddFeaSpa',-1,1); - AddFeaSpa = AddFeaSpa'; - Obj.NormFeaTot(Obj.NumWindow + Obj.AddNodeStep) = AddNormFea; -% clear AddNormFea - - Obj.TotFeaSpa = [Obj.TotFeaSpa AddFeaSpa]; - FeaMat = [Obj.TotFeaSpa .1 * ones(size(Obj.TotFeaSpa,1),1)]; - if isempty(AddFeaSpa) - AddFeaSpa = []; - AddRel = []; - else - AddFeaMat = [AddFeaSpa .1 * ones(size(AddFeaSpa,1),1)]; - if Obj.NumAddFea >= Obj.NumAddRel - Obj.AddFeaRelWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumAddFea+1,Obj.NumAddRel,Obj.InitMed)); - - else - Obj.AddFeaRelWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumAddFea+1,Obj.NumAddRel,Obj.InitMed)')'; - - end - AddRel = AddFeaMat * Obj.AddFeaRelWei{Obj.AddNodeStep}; - Obj.RelScale(Obj.AddNodeStep) = Obj.ShrScale / max(max(AddRel)); - if strcmp(Obj.sigfun,'logsig') - AddRel = logsig(AddRel * Obj.RelScale(Obj.AddNodeStep)); - else - AddRel = tansig(AddRel * Obj.RelScale(Obj.AddNodeStep)); - end - clear AddFeaMat; - end - - if Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea >= Obj.NumAddEnh - Obj.AllFeaAddEnhWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea+1,Obj.NumAddEnh,Obj.InitMed)); - else - Obj.AllFeaAddEnhWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea+1,Obj.NumAddEnh,Obj.InitMed)')'; - end - - AddEnh = FeaMat * Obj.AllFeaAddEnhWei{Obj.AddNodeStep}; - Obj.AddEnhScale(Obj.AddNodeStep) = Obj.ShrScale / max(max(AddEnh)); - if strcmp(Obj.sigfun,'logsig') - AddEnh = logsig(AddEnh * Obj.AddEnhScale(Obj.AddNodeStep)); - else - AddEnh = tansig(AddEnh * Obj.AddEnhScale(Obj.AddNodeStep)); - end -% clear FeaMat - - A_Matrix_Add = [AddFeaSpa AddRel AddEnh]; - A_Matrix_Tot = [Obj.A_Matrix_Train A_Matrix_Add]; - clear AddFeaSpa AddRel AddEnh - Vec_D = Obj.A_Inverse * A_Matrix_Add; - Vec_C = A_Matrix_Add - Obj.A_Matrix_Train * Vec_D; -% clear A_Matrix_Add - if all(Vec_C(:)==0) - [~,w] = size(Vec_D); - Vec_B = (eye(w)-Vec_D'*Vec_D)\(Vec_D'*Obj.A_Inverse); - else - Vec_B = (Vec_C' * Vec_C+eye(size(Vec_C',1)) * (Obj.L2Param)) \ ( Vec_C' ); - end - Obj.A_Inverse = [Obj.A_Inverse-Vec_D*Vec_B;Vec_B]; - clear Vec_B Vec_C Vec_D - Obj.Beta = Obj.A_Inverse * Target; - Obj.A_Matrix_Train = A_Matrix_Tot; - end - - - %% SASO-BLS test funciton - function [Obj,output] = PrunOutput(Obj,Data,BanType,Target,UType) - Data = zscore(Data')'; - InpMat = [Data .1 * ones(size(Data,1),1)]; - OriFea=zeros(size(Data,1),Obj.NumWindow * Obj.NumPerWin); - clear Data - for i=1:Obj.NumWindow - AddFea = InpMat * Obj.SpaInpFeaWei{i}; - AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(i))'; - OriFea(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i)=AddFea; - end - OriFeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - OriEnh = logsig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale); - else - OriEnh = tansig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale); - end - OriAMatrix = [OriFea OriEnh]; - clear OriEnh - OriFeaSA0 = OriFea; - if strcmp(BanType,'FeatureNodes') || strcmp(BanType,'All') - OriBanFea = Obj.BanNodes(Obj.BanNodes <= Obj.NumWindow * Obj.NumPerWin); - OriFeaSA0(:,OriBanFea) = 0 ; - end - - clear OutputFeature_Temp; - OriFeaMat0 = [OriFeaSA0 .1 * ones(size(OriFeaSA0,1),1)]; - if strcmp(Obj.sigfun,'logsig') - OriEnhSA0 = logsig(OriFeaMat0 * Obj.FeaEnhWei * Obj.ShrScale); - else - OriEnhSA0 = tansig(OriFeaMat0 * Obj.FeaEnhWei * Obj.ShrScale); - end - - OriAMatrix0 = [OriFeaSA0 OriEnhSA0]; - AllAddFeaSA0 = []; - clear OriEnhSA0 - %% Incremental learning - if Obj.AddNodeStep > 0 - for i = 1:Obj.AddNodeStep - AddFea = InpMat * Obj.SpaInpFeaWei{i+Obj.NumWindow}; - AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(i+Obj.NumWindow))'; - AddFeaMat = [AddFea .1 * ones(size(AddFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AddRel = logsig(AddFeaMat * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); - else - AddRel = tansig(AddFeaMat * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); - end - OriFea = [OriFea AddFea]; - OriFeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AddEnh = logsig(OriFeaMat * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); - else - AddEnh = tansig(OriFeaMat * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); - end - - AMatrix = [OriAMatrix AddFea AddRel AddEnh]; - AddFeaSA0 = AddFea; - AllAddFeaSA0 = [AllAddFeaSA0 AddFeaSA0]; - clear AddRel AddEnh AddFea OriFeaMat - if strcmp(BanType,'FeatureNodes') || strcmp(BanType,'All') - AddBanFea = Obj.BanNodes(Obj.BanNodes > Obj.NumWindow * Obj.NumPerWin + Obj.NumEnhance+(i-1) * (Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)&... - Obj.BanNodes <= Obj.NumWindow * Obj.NumPerWin + Obj.NumEnhance+(i-1) * (Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)+Obj.NumAddFea); - AddFeaSA0(:,AddBanFea-Obj.NumWindow * Obj.NumPerWin - Obj.NumEnhance-(i-1)*(Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)) = 0; - clear AddBanFea - AddFeaMatSA0 = [AddFeaSA0 .1 * ones(size(AddFeaSA0,1),1)]; - AllFeaMatSA0 = [OriFeaSA0 AllAddFeaSA0 .1 * ones(size(OriFeaSA0,1),1)]; - if strcmp(Obj.sigfun,'logsig') - AddRelSA0 = logsig(AddFeaMatSA0 * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); - AddEnhSA0 = logsig(AllFeaMatSA0 * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); - else - AddRelSA0 = tansig(AddFeaMatSA0 * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); - AddEnhSA0 = tansig(AllFeaMatSA0 * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); - end - clear AddFeaMatSA0 AllFeaMatSA0 - OriAMatrix0 = [OriAMatrix0 AddFeaSA0 AddRelSA0 AddEnhSA0]; - end - end - end - clear AddFeaSA0 AddRelSA0 AddEnhSA0 AllFeaMatSA0 AddFeaMatSA0 AddBanFea OriAMatrix - if strcmp(BanType,'EnhanNodes') - AMatrix(:,Obj.BanNodes) = [] ; - else - OriAMatrix0(:, Obj.BanNodes) = []; - AMatrix = OriAMatrix0; - end - clear OriAMatrix0 - if strcmp(UType,'update') - Obj.Beta = (AMatrix' * AMatrix+eye(size(AMatrix',1)) * (Obj.L2Param)) \ ( AMatrix' * Target); - end - - clear FeaMat; - clear OutputEnhance; - output = AMatrix * Obj.Beta; - end - - function wk = Sparse_bls(Obj,A,b,lam,itrs) - AA = (A') * A; - m = size(A,2); - n = size(b,2); - x = zeros(m,n); - wk = x; - ok=x; - uk=x; - L1=eye(m)/(AA+eye(m)); - L2=L1*A'*b; - for i = 1:itrs - tempc=ok-uk; - ck = L2+L1*tempc; - ok = max( ck+uk - lam,0 ) - max( -ck-uk - lam ,0); - uk=uk+(ck-ok); - wk=ok; - end - end - - - end %method -end %class - - +classdef SASO_Class_online + properties + Name = 'Increamental Broad Learning System'; + + NumPerWin % Feature nodes per window + NumWindow % Number of windows of feature nodes + NumEnhance % Number of enhancement nodes + NumAddFea % Number of feature nodes per increment step + NumAddRel % Number of enhancement nodes related to the incremental feature nodes per increment step + NumAddEnh % Number of enhancement nodes in each incremental learning + IncStep % Steps of incremental learning + ShrScale % The shrinkage scale of the enhancement nodes + RelScale % The shrinkage scale of the related added enhancement nodes + L2Param % The L2 regularization parameter + SpaInpFeaWei % Sparce weight matrix from input to feature layer + NormFeaTot = struct % Normlization setting of feature layer + FeaEnhWei % Weight matrix from feature layer to enhance layer + Beta % Weight matrix from feature layer and enhance layer to output + TotFeaSpa % total feature matrix + A_Matrix_Train % State matrix + AllFeaAddEnhWei % Weight from all feature nodes to added enhancement nodes + AddFeaRelWei % Weight from added featrue nodes to related enhancement nodes + AddEnhScale % The shrinkage scale of the added enhancement nodes + A_Inverse % Inverse of state matrix + sigfun % Activation funciton + InitMed % Model initializaiton method + BanNodes % List of baned nodes + FeaPD % Partial differential of feature nodes + AllPD % Partial differential of all nodes + AddNodeStep % Nodes add step + AddDataStep % Data add step + + end + %% Functions and algorithm + methods + %% Functions and algorithm + function Obj = SASO_Class_online(NumPerWin,NumWindow,NumEnhance,NumAddFea,NumAddRel,NumAddEnh,ShrScale,L2Param,BanIndex,AddNodeStep,AddDataStep,sigfun,InitMed) + Obj.NumPerWin = NumPerWin; + Obj.NumWindow = NumWindow; + Obj.NumEnhance = NumEnhance; + Obj.NumAddFea = NumAddFea; + Obj.NumAddRel = NumAddRel; + Obj.NumAddEnh = NumAddEnh; + Obj.ShrScale = ShrScale; + Obj.L2Param = L2Param; + Obj.sigfun = sigfun; + Obj.InitMed = InitMed; + Obj.BanNodes = BanIndex; + Obj.AddNodeStep = AddNodeStep; + Obj.AddDataStep = AddDataStep; + + + + + end + %% Train + function Obj = Train(Obj, Input, Target) + %% feature nodes + Input = zscore(Input')'; + InputMat = [Input .1 * ones(size(Input,1),1)]; %加了一列偏置后的输入 + Obj.TotFeaSpa=zeros(size(Input,1),Obj.NumWindow * Obj.NumPerWin); + for i = 1:Obj.NumWindow + InpFeaWei = MyClassTools.IntialMed(size(Input,2)+1,Obj.NumPerWin,Obj.InitMed); + FeaPerWin = mapminmax(InputMat * InpFeaWei); + Obj.SpaInpFeaWei{i} = Obj.Sparse_bls(FeaPerWin,InputMat,1e-3,50)'; + Fea_Temp = InputMat * Obj.SpaInpFeaWei{i}; + [Fea_Temp,NormFea] = mapminmax(Fea_Temp',0,1); + NormTotTemp(i) = NormFea; + Obj.TotFeaSpa(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i) = Fea_Temp'; + end + Obj.NormFeaTot = NormTotTemp; +% clear FeaPerWin InputMat input Fea_Temp; + %% enhancement nodes + FeaMat = [Obj.TotFeaSpa .1 * ones(size(Obj.TotFeaSpa,1),1)]; + if Obj.NumPerWin * Obj.NumWindow >= Obj.NumEnhance + Obj.FeaEnhWei = orth(MyClassTools.IntialMed(Obj.NumWindow * Obj.NumPerWin + 1,Obj.NumEnhance,Obj.InitMed)); + else + Obj.FeaEnhWei = orth(MyClassTools.IntialMed(Obj.NumWindow * Obj.NumPerWin + 1,Obj.NumEnhance,Obj.InitMed)')'; + + end + Enhance = FeaMat * Obj.FeaEnhWei; + clear FeaMat; + Obj.ShrScale = Obj.ShrScale / max(max(Enhance)); + if strcmp(Obj.sigfun,'logsig') + Enhance = logsig(Enhance * Obj.ShrScale); + else + Enhance = tansig(Enhance * Obj.ShrScale); + end + Obj.A_Matrix_Train = [Obj.TotFeaSpa Enhance]; + clear Enhance; + Obj.A_Inverse = (Obj.A_Matrix_Train' * Obj.A_Matrix_Train + eye(size(Obj.A_Matrix_Train',1)) * (Obj.L2Param)) \ ( Obj.A_Matrix_Train'); + Obj.Beta = Obj.A_Inverse * Target; + end + + %% data incremtal process + function Obj = DataIncBLS(Obj,AddInput,AllTarget) + AddInput = zscore(AddInput')'; + AddInputMat = [AddInput .1 * ones(size(AddInput,1),1)]; + AddInputOriFea_Tot = []; + for i = 1:Obj.NumWindow+Obj.AddNodeStep + AddInputFea_Temp = AddInputMat * Obj.SpaInpFeaWei{i}; + AddInputFea_Temp = mapminmax('apply',AddInputFea_Temp',Obj.NormFeaTot(i))'; + AddInputOriFea_Tot = [AddInputOriFea_Tot AddInputFea_Temp]; + end + Obj.TotFeaSpa = [Obj.TotFeaSpa;AddInputOriFea_Tot]; + + if Obj.AddNodeStep == 0 + AddFeaMat = [AddInputOriFea_Tot .1 * ones(size(AddInputOriFea_Tot,1),1)]; + AddEnhance = AddFeaMat * Obj.FeaEnhWei; + % Obj.AddDataScale(Obj.AddDataStep) = Obj.ShrScale / max(max(AddEnhance)); + if strcmp(Obj.sigfun,'logsig') + AddEnhance = logsig(AddEnhance * Obj.ShrScale); + elseif strcmp(Obj.sigfun,'tansig') + AddEnhance = tansig(AddEnhance * Obj.ShrScale); + end + A_Matrix_Add = [AddInputOriFea_Tot AddEnhance]; + else + AddInputOriFea = AddInputOriFea_Tot(:,1:Obj.NumWindow*Obj.NumPerWin); + AddInputOriFeaMat = [AddInputOriFea .1 * ones(size(AddInputOriFea_Tot,1),1)]; + AddInputOriEnh = AddInputOriFeaMat * Obj.FeaEnhWei; + if strcmp(Obj.sigfun,'logsig') + AddInputOriEnh = logsig(AddInputOriEnh * Obj.ShrScale); + elseif strcmp(Obj.sigfun,'tansig') + AddInputOriEnh = tansig(AddInputOriEnh * Obj.ShrScale); + end + A_Matrix_Add = [AddInputOriFea AddInputOriEnh]; + for j = 1:Obj.AddNodeStep + AddInputAddFea = AddInputOriFea_Tot(:,Obj.NumWindow*Obj.NumPerWin+(j-1)*Obj.NumAddFea+1:Obj.NumWindow*Obj.NumPerWin+j*Obj.NumAddFea); + AddInputAddFeaMat = [AddInputAddFea .1 * ones(size(AddInputAddFea,1),1)]; + AddInputRel = AddInputAddFeaMat * Obj.AddFeaRelWei{j}; + if strcmp(Obj.sigfun,'logsig') + AddInputRel = logsig(AddInputRel * Obj.RelScale(j)); + else + AddInputRel = tansig(AddInputRel * Obj.RelScale(j)); + end + AddInputOriFea = AddInputOriFea_Tot(:,1:Obj.NumWindow*Obj.NumPerWin+j*Obj.NumAddFea); + AddInputAllFeaMat = [AddInputOriFea .1 * ones(size(AddInputAddFea,1),1)]; + AddInputAddEnh = AddInputAllFeaMat * Obj.AllFeaAddEnhWei{j}; + if strcmp(Obj.sigfun,'logsig') + AddInputAddEnh = logsig(AddInputAddEnh * Obj.AddEnhScale(j)); + else + AddInputAddEnh = tansig(AddInputAddEnh * Obj.AddEnhScale(j)); + end + A_Matrix_Add = [A_Matrix_Add,AddInputAddFea,AddInputRel,AddInputAddEnh]; + end + + end + AddA_Inverse = (A_Matrix_Add' * A_Matrix_Add+eye(size(A_Matrix_Add',1)) * (Obj.L2Param)) \ ( A_Matrix_Add' ); + Obj.A_Inverse = [Obj.A_Inverse AddA_Inverse]; + Obj.Beta = Obj.A_Inverse * AllTarget; + Obj.A_Matrix_Train = [Obj.A_Matrix_Train;A_Matrix_Add]; + end + + function output = GetOutput(Obj,Data) + Data = zscore(Data')'; + InpMat = [Data .1 * ones(size(Data,1),1)]; + AllFea = zeros(size(Data,1),Obj.NumWindow * Obj.NumPerWin); + clear Data + for i=1:Obj.NumWindow + FeatTemp = InpMat * Obj.SpaInpFeaWei{i}; + FeatTemp = mapminmax('apply',FeatTemp',Obj.NormFeaTot(i))'; + AllFea(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i)=FeatTemp; + end + clear FeatTemp; + if Obj.AddNodeStep == 0 + FeaMat = [AllFea .1 * ones(size(AllFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AllEnh = logsig(FeaMat * Obj.FeaEnhWei * Obj.ShrScale); + else + AllEnh = tansig(FeaMat * Obj.FeaEnhWei * Obj.ShrScale); + end + AMatrix = [AllFea AllEnh]; + clear FeaMat; + clear AllEnh; + output = AMatrix * Obj.Beta; + else + OriFea = AllFea; + OriFeaMat = [OriFea .1 * ones(size(AllFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AMatrix = [OriFea logsig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale)]; + else + AMatrix = [OriFea tansig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale)]; + end + clear OriFeaMat AllFea + for j=1:Obj.AddNodeStep + AddFea = InpMat * Obj.SpaInpFeaWei{Obj.NumWindow+j}; + AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(Obj.NumWindow + j))'; + if isempty(AddFea) + AddRel = []; + else + AddFeaMat = [AddFea .1 * ones(size(AddFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AddRel = logsig(AddFeaMat * Obj.AddFeaRelWei{j} * Obj.RelScale(j)); + else + AddRel = tansig(AddFeaMat * Obj.AddFeaRelWei{j} * Obj.RelScale(j)); + end + end + OriFea = [OriFea AddFea]; + FeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AddEnh = logsig(FeaMat * Obj.AllFeaAddEnhWei{j} * Obj.AddEnhScale(j)); + else + AddEnh = tansig(FeaMat * Obj.AllFeaAddEnhWei{j} * Obj.AddEnhScale(j)); + end + AddTot = [AddFea AddRel AddEnh]; + AMatrix = [AMatrix AddTot]; + end + clear AddTot AllFea AddFea AddRel AddEnh FeaMat AddFeaMat InpMat + output = AMatrix * Obj.Beta; + end + end + + %% Nodes incremtal process + function Obj = IncBLS(Obj,Input,Target) + Input = zscore(Input')'; + InputMat = [Input .1 * ones(size(Input,1),1)]; + AddInpFeaWei = MyClassTools.IntialMed(size(Input,2)+1,Obj.NumAddFea,Obj.InitMed); + + AddFeature = mapminmax(InputMat * AddInpFeaWei); + clear AddInpFeaWei Input; + AddSapInpFeaWei = Obj.Sparse_bls(AddFeature,InputMat,1e-3,50)'; + Obj.SpaInpFeaWei{Obj.NumWindow + Obj.AddNodeStep} = AddSapInpFeaWei; + clear AddFeature; + AddFeaSpa = InputMat * AddSapInpFeaWei; + clear InputMat + + [AddFeaSpa,AddNormFea] = mapminmax(AddFeaSpa',-1,1); + AddFeaSpa = AddFeaSpa'; + Obj.NormFeaTot(Obj.NumWindow + Obj.AddNodeStep) = AddNormFea; + clear AddNormFea + + Obj.TotFeaSpa = [Obj.TotFeaSpa AddFeaSpa]; + FeaMat = [Obj.TotFeaSpa .1 * ones(size(Obj.TotFeaSpa,1),1)]; + if isempty(AddFeaSpa) + AddFeaSpa = []; + AddRel = []; + else + AddFeaMat = [AddFeaSpa .1 * ones(size(AddFeaSpa,1),1)]; + if Obj.NumAddFea >= Obj.NumAddRel + Obj.AddFeaRelWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumAddFea+1,Obj.NumAddRel,Obj.InitMed)); + + else + Obj.AddFeaRelWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumAddFea+1,Obj.NumAddRel,Obj.InitMed)')'; + + end + AddRel = AddFeaMat * Obj.AddFeaRelWei{Obj.AddNodeStep}; + Obj.RelScale(Obj.AddNodeStep) = Obj.ShrScale / max(max(AddRel)); + if strcmp(Obj.sigfun,'logsig') + AddRel = logsig(AddRel * Obj.RelScale(Obj.AddNodeStep)); + else + AddRel = tansig(AddRel * Obj.RelScale(Obj.AddNodeStep)); + end + clear AddFeaMat; + end + if Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea >= Obj.NumAddEnh + Obj.AllFeaAddEnhWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea+1,Obj.NumAddEnh,Obj.InitMed)); + else + Obj.AllFeaAddEnhWei{Obj.AddNodeStep} = orth(MyClassTools.IntialMed(Obj.NumWindow*Obj.NumPerWin+Obj.AddNodeStep*Obj.NumAddFea+1,Obj.NumAddEnh,Obj.InitMed)')'; + end + AddEnh = FeaMat * Obj.AllFeaAddEnhWei{Obj.AddNodeStep}; + Obj.AddEnhScale(Obj.AddNodeStep) = Obj.ShrScale / max(max(AddEnh)); + if strcmp(Obj.sigfun,'logsig') + AddEnh = logsig(AddEnh * Obj.AddEnhScale(Obj.AddNodeStep)); + else + AddEnh = tansig(AddEnh * Obj.AddEnhScale(Obj.AddNodeStep)); + end + clear FeaMat + A_Matrix_Add = [AddFeaSpa AddRel AddEnh]; + A_Matrix_Tot = [Obj.A_Matrix_Train A_Matrix_Add]; + clear AddFeaSpa AddRel AddEnh + Vec_D = Obj.A_Inverse * A_Matrix_Add; + Vec_C = A_Matrix_Add - Obj.A_Matrix_Train * Vec_D; + clear A_Matrix_Add + if all(Vec_C(:)==0) + [~,w] = size(Vec_D); + Vec_B = (eye(w)-Vec_D'*Vec_D)\(Vec_D'*Obj.A_Inverse); + else + Vec_B = (Vec_C' * Vec_C+eye(size(Vec_C',1)) * (Obj.L2Param)) \ ( Vec_C' ); + end + Obj.A_Inverse = [Obj.A_Inverse-Vec_D*Vec_B;Vec_B]; + clear Vec_B Vec_C Vec_D + Obj.Beta = Obj.A_Inverse * Target; + Obj.A_Matrix_Train = A_Matrix_Tot; + end + + + %% Test + + + + function [Obj,output] = PrunOutput(Obj,Data,BanType,Target,UType) + Data = zscore(Data')'; + InpMat = [Data .1 * ones(size(Data,1),1)]; + OriFea=zeros(size(Data,1),Obj.NumWindow * Obj.NumPerWin); + clear Data + for i=1:Obj.NumWindow + AddFea = InpMat * Obj.SpaInpFeaWei{i}; + AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(i))'; + OriFea(:,Obj.NumPerWin*(i-1)+1:Obj.NumPerWin*i)=AddFea; + end + OriFeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + OriEnh = logsig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale); + else + OriEnh = tansig(OriFeaMat * Obj.FeaEnhWei * Obj.ShrScale); + end + OriAMatrix = [OriFea OriEnh]; + clear OriEnh + OriFeaSA0 = OriFea; + if strcmp(BanType,'FeatureNodes') || strcmp(BanType,'All') + OriBanFea = Obj.BanNodes(Obj.BanNodes <= Obj.NumWindow * Obj.NumPerWin); + OriFeaSA0(:,OriBanFea) = 0 ; + end + + clear OutputFeature_Temp; + OriFeaMat0 = [OriFeaSA0 .1 * ones(size(OriFeaSA0,1),1)]; + if strcmp(Obj.sigfun,'logsig') + OriEnhSA0 = logsig(OriFeaMat0 * Obj.FeaEnhWei * Obj.ShrScale); + else + OriEnhSA0 = tansig(OriFeaMat0 * Obj.FeaEnhWei * Obj.ShrScale); + end + + OriAMatrix0 = [OriFeaSA0 OriEnhSA0]; + AllAddFeaSA0 = []; + clear OriEnhSA0 + %% Incremental learning + if Obj.AddNodeStep > 0 + for i = 1:Obj.AddNodeStep + AddFea = InpMat * Obj.SpaInpFeaWei{i+Obj.NumWindow}; + AddFea = mapminmax('apply',AddFea',Obj.NormFeaTot(i+Obj.NumWindow))'; + AddFeaMat = [AddFea .1 * ones(size(AddFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AddRel = logsig(AddFeaMat * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); + else + AddRel = tansig(AddFeaMat * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); + end + OriFea = [OriFea AddFea]; + OriFeaMat = [OriFea .1 * ones(size(OriFea,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AddEnh = logsig(OriFeaMat * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); + else + AddEnh = tansig(OriFeaMat * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); + end + + AMatrix = [OriAMatrix AddFea AddRel AddEnh]; + AddFeaSA0 = AddFea; + AllAddFeaSA0 = [AllAddFeaSA0 AddFeaSA0]; + clear AddRel AddEnh AddFea OriFeaMat + if strcmp(BanType,'FeatureNodes') || strcmp(BanType,'All') + AddBanFea = Obj.BanNodes(Obj.BanNodes > Obj.NumWindow * Obj.NumPerWin + Obj.NumEnhance+(i-1) * (Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)&... + Obj.BanNodes <= Obj.NumWindow * Obj.NumPerWin + Obj.NumEnhance+(i-1) * (Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)+Obj.NumAddFea); + AddFeaSA0(:,AddBanFea-Obj.NumWindow * Obj.NumPerWin - Obj.NumEnhance-(i-1)*(Obj.NumAddFea+Obj.NumAddEnh+Obj.NumAddRel)) = 0; + clear AddBanFea + AddFeaMatSA0 = [AddFeaSA0 .1 * ones(size(AddFeaSA0,1),1)]; + AllFeaMatSA0 = [OriFeaSA0 AllAddFeaSA0 .1 * ones(size(OriFeaSA0,1),1)]; + if strcmp(Obj.sigfun,'logsig') + AddRelSA0 = logsig(AddFeaMatSA0 * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); + AddEnhSA0 = logsig(AllFeaMatSA0 * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); + else + AddRelSA0 = tansig(AddFeaMatSA0 * Obj.AddFeaRelWei{i} * Obj.RelScale(i)); + AddEnhSA0 = tansig(AllFeaMatSA0 * Obj.AllFeaAddEnhWei{i} * Obj.AddEnhScale(i)); + end + clear AddFeaMatSA0 AllFeaMatSA0 + OriAMatrix0 = [OriAMatrix0 AddFeaSA0 AddRelSA0 AddEnhSA0]; + end + end + end + clear AddFeaSA0 AddRelSA0 AddEnhSA0 AllFeaMatSA0 AddFeaMatSA0 AddBanFea OriAMatrix + if strcmp(BanType,'EnhanNodes') + AMatrix(:,Obj.BanNodes) = [] ; + else + OriAMatrix0(:, Obj.BanNodes) = []; + AMatrix = OriAMatrix0; + end + clear OriAMatrix0 + if strcmp(UType,'update') + Obj.Beta = (AMatrix' * AMatrix+eye(size(AMatrix',1)) * (Obj.L2Param)) \ ( AMatrix' * Target); + end + + clear FeaMat; + clear OutputEnhance; + output = AMatrix * Obj.Beta; + end + + function wk = Sparse_bls(Obj,A,b,lam,itrs) + AA = (A') * A; + m = size(A,2); + n = size(b,2); + x = zeros(m,n); + wk = x; + ok=x; + uk=x; + L1=eye(m)/(AA+eye(m)); + L2=L1*A'*b; + for i = 1:itrs + tempc=ok-uk; + ck = L2+L1*tempc; + ok = max( ck+uk - lam,0 ) - max( -ck-uk - lam ,0); + uk=uk+(ck-ok); + wk=ok; + end + end + + + end %method +end %class + +