%README____________________________________________________________________ % This file contains basic functions for running Spatial Filter Network SFN algorithm. % file includes the following '.m' files: % % SFN_demo.m: a demo script calling toy data generation, SFN training and SFN classification functions. % genToyData.m: genToyData function generates artificial epochs. % SFN_train_LM: creates a SFN network and train with the Levenberg-Marquardt algorithm % SFN_train_BP: create a SFN network and train with the Back propagation algorithm % SFN_test: Classify input data with SFN and generate outputs. % % DATE: 02.10.2014 % AUTHOR: Ayhan Yuksel % Contact: yukselay@itu.edu.tr, ayhanyks@yahoo.com %SFN_demo.m______________________________________________________________________ %SFN_demo calls toy data generation function and SFN train & test %functions. Edit SFN_demo for running the algorithms with different parameters. %DATE: 02.10.2014 %AUTHOR: Ayhan Yuksel %Contact: yukselay@itu.edu.tr, ayhanyks@yahoo.com % % Copyright (c) 2014, Yuksel A. % % All rights reserved. % % % % Redistribution and use in source and binary forms, with or without % % modification, are permitted provided that the following conditions are % % met: % % % % * Redistributions of source code must retain the above copyright % % notice, this list of conditions and the following disclaimer. % % * Redistributions in binary form must reproduce the above copyright % % notice, this list of conditions and the following disclaimer in % % the documentation and/or other materials provided with the distribution % % % % THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" % % AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE % % IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE % % ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE % % LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR % % CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF % % SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS % % INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN % % CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) % % ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE % % POSSIBILITY OF SUCH DAMAGE. % toy data properties: K=100;%number of epochs N=2; %channels in an epoch T=100;%samples in an epoch C=2; %number of classes in the data set (Labels: 1,2,...C) %covariances may be given as input: cov{1}=[2,1;1,2]; cov{2}=[2,-1;-1,2]; [DATA LBL]=genToyData(K,N,T,C,'covariances',cov); %generate train and test sets: rprm=randperm(K); TR_K=fix(K/2); %half of the data is training data TR_IDX=rprm(1:TR_K); TS_IDX=rprm(TR_K+1 : end); TR_EP=DATA(TR_IDX); TR_LB=LBL(TR_IDX); TS_EP=DATA(TS_IDX); TS_LB=LBL(TS_IDX); %plot the data if its in two dimension: if(N==2) figure; colormap(lines(C)) for k=1:K epdt=DATA{k}; fdt(k,:)=log(var(epdt)); end scatter(fdt(:,1),fdt(:,2),25,LBL); title('(var(log)) feature of original data (Train+Test)'); xlabel('f_1'); ylabel('f_2'); end %run SFN EMIN=1e-2; %minimum error to stop iterating ITRMAX=4000; %maximum iteration M=2; %hidden network size. %levenberg-marquardt training: SFN=SFN_train_LM(TR_EP,TR_LB,'EMIN',EMIN,'ITRMAX',ITRMAX,'M',M,'dbg',1); %back propagation training: % SFN=SFN_train_BP(TR_EP,TR_LB,'EMIN',EMIN,'ITRMAX',ITRMAX,'M',M,'dbg',1,'mu',1e-3); %classify data [Y1,F]=SFN_test(SFN,TS_EP); %generate desicion table: dtable=zeros(C,C); for k=1:numel(Y1) y0=TS_LB(k); y1=Y1(k); dtable(y0,y1)=dtable(y0,y1)+1; end %calculate accuracy: acc=100*trace(dtable)/sum(sum(dtable)); disp(sprintf('acc tst=%0.3f', acc)); %plot the spatially filtered data and class borders (if its in two dimension, if M==2): if(M==2) figure; colormap(lines(C)) scatter(F(:,1),F(:,2),25,TS_LB); hold on %plot the class border(s) xmin=min(F(:,1)); ymin=min(F(:,2)); xmax=max(F(:,1)); ymax=max(F(:,2)); V=SFN.V; b=SFN.b; O=size(SFN.V,2); for o=1:O x1=xmin; y1=-(V(1,o)*xmin + b(o))/V(2,o); x2=xmax; y2=-(V(1,o)*xmax + b(o))/V(2,o); hold on; plot([x1,x2],[y1,y2],'lineStyle','-.','Color',[0,0,0],'LineWidth',2); end title('(var(log)) feature of spatailly filtered data with SFN (Test)'); xlabel('f_1'); ylabel('f_2'); hold off; end % % % % % % % %genToyData.m______________________________________________________________________ %[EPOCHES, LABELS,COV]=genToyData(K,N,T,C,varargin) %genToyData function generates artificial epochs. %input arguments: %K: number of epochs (positive integer) %N: number of channels (positive integer) %T: number of samples in one epoch (positive integer) %C: number of classes (different covariance matrices) in the training set (positive integer) %optional input argument(s): %'Covariances', COV: covariance matrices of each class. (cell array) If this % parameter is not defined, covariance matrices are selected randomly % %Output arguments: %EPOCHES: generated epoches (cell array with K cells) %LABELS: class label of generated epoches (array with K elements) %COV: covariance matrices (cell array with C cells) %DATE: 02.10.2014 %AUTHOR: Ayhan Yuksel %Contact: yukselay@itu.edu.tr, ayhanyks@yahoo.com % % Copyright (c) 2014, Yuksel A. % % All rights reserved. % % % % Redistribution and use in source and binary forms, with or without % % modification, are permitted provided that the following conditions are % % met: % % % % * Redistributions of source code must retain the above copyright % % notice, this list of conditions and the following disclaimer. % % * Redistributions in binary form must reproduce the above copyright % % notice, this list of conditions and the following disclaimer in % % the documentation and/or other materials provided with the distribution % % % % THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" % % AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE % % IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE % % ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE % % LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR % % CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF % % SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS % % INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN % % CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) % % ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE % % POSSIBILITY OF SUCH DAMAGE. function [EPOCHES, LABELS,COV]=genToyData(K,N,T,C,varargin) nVarargs = length(varargin); for k = 1:2:nVarargs str=varargin{k}; if(strcmpi(str,'Covariances')) COV=varargin{k+1}; else fprintf('%s could not recognized\n',varargin{k}); end end %generate epoch labels clsid=1:C; LABELS=randi(C,K,1); %generate random covariance matrices (spd matrices) spesific to each class: if(~exist('COV','var')) for c=1:C d=rand(N,1)+0.01; % positive diagonal matrice , eigenvalues W=randn(N,N); % eigen vectors COV{c}=W'*diag(d)*W; % covariance matrices end end %generate epochs (NxT) mu=zeros(1,N); for k=1:K c=LABELS(k); EPOCHES{k}=mvnrnd(mu, COV{c},T); end % % % % % % % %SFN_train_LM.m__________________________________________________________________ %function SFNNET=SFN_train_LM(TREP,TRLB,varargin) %create a SFN network and train with the Levenberg-Marquardt algorithm % %INPUTS: %TREP: train set epochs (cell array) %TRLB: train set labels (array) % %Optional arguments: %'M', M: number of neurons in hidden layer. (default: equals to number of %classs) %'EMIN', EMIN : error value for terminating the training (default: 1e-1) %'ITRMAX', ITRMAX : maximum number of iterations for terminating the training (default: 1000) %'mu',mu : initial combination coefficient for LM method. (default: 100) %'beta', beta: mu multiplier/divider for LM method (default: 2) %'W',W : initial spatial filter layer matrix (defaullt: randomly generated) %'V',V : initial classifier layer matrix (defaullt: randomly generated) %'b',b : initial classifier layer bias vector (defaullt: randomly generated) %'dbg',dbg :debug level (verbosity) (default: 0, no verbosity) % % %OUTPUTS: %SFNNET: structure holding the following elements: %.W: spatial filter layer weight matrix %.V: classifier layer weight matrix %.b: classifier layer biases %.lastErr: final error value of the network at the end of the training. %.itr: final iteration value of the network at the end of the training. %.mu: final combination coefficient value at the end of the training. %.CLS: set of class labels %DATE: 02.10.2014 %AUTHOR: Ayhan Yuksel %Contact: yukselay@itu.edu.tr, ayhanyks@yahoo.com % % Copyright (c) 2014, Yuksel A. % % All rights reserved. % % % % Redistribution and use in source and binary forms, with or without % % modification, are permitted provided that the following conditions are % % met: % % % % * Redistributions of source code must retain the above copyright % % notice, this list of conditions and the following disclaimer. % % * Redistributions in binary form must reproduce the above copyright % % notice, this list of conditions and the following disclaimer in % % the documentation and/or other materials provided with the distribution % % % % THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" % % AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE % % IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE % % ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE % % LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR % % CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF % % SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS % % INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN % % CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) % % ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE % % POSSIBILITY OF SUCH DAMAGE. function SFNNET=SFN_train_LM(TREP,TRLB,varargin) K=numel(TREP); %number of epochs in training set N=size(TREP{1},2); %number of channels, number of input neurons T=size(TREP{1},1); %number of samples in an epoch CLSLB=unique(TRLB); %class labels C=numel(CLSLB); %number of classes in the trainin set %number of output neurons if(C==2) O=1; %create desired output vectors for k=1:K cls=TRLB(k); if(cls==CLSLB(1)) DSR(k,1)=1; else DSR(k,1)=-1; end end else O=C; %create desired output vectors DSR=-ones(K,O); for k=1:K cls=TRLB(k); cx=find(CLSLB==cls); DSR(k,cx)=1; end end %defult params EMIN=1e-1; %maximum acceptable error ITRMAX=1000; %maximum number of iteration M=C; %number of neurons in hidden layer. beta=2; mu=100; dbg=0; %verbosity %Network weights W V and b %read varargin nVarargs = length(varargin); for k = 1:2:nVarargs str=varargin{k}; if(strcmpi(str,'M')) M=varargin{k+1}; elseif(strcmpi(str,'EMAX')) EMIN=varargin{k+1}; elseif(strcmpi(str,'ITRMAX')) ITRMAX=varargin{k+1}; elseif(strcmpi(str,'mu')) mu=varargin{k+1}; elseif(strcmpi(str,'beta')) beta=varargin{k+1}; %network weight may be given as a parameter elseif(strcmpi(str,'W')) W=varargin{k+1}; elseif(strcmpi(str,'V')) V=varargin{k+1}; elseif(strcmpi(str,'b')) b=varargin{k+1}; elseif(strcmpi(str,'dbg')) dbg=varargin{k+1}; else fprintf('%s could not recognized\n',varargin{k}); end end if(~exist('W')) W=randn(N,M)/10; end if(~exist('V')) V=randn(M,O)/10; end if(~exist('b')) b=randn(O,1)/10; end ME=EMIN; %mean error itr=0; %iteration counter E=zeros(K*O,1); %error of each output for each epoch ME_=ME; %previous error value. while(ME_>=EMIN && itr1) mu=mu*beta; W=W_; V=V_; b=b_; if(dbg>0) disp(sprintf('ens=%d u=%d %0.8f, %0.8f',itr, mu, ME_, ME)); end else W_=W; V_=V; b_=b; % WG=W; % VG=V; % bG=b; % mu=mu/beta; if(dbg>0) disp(sprintf('ens=%d u=%d %0.8f, %0.8f *',itr, mu, ME_, ME)); end ME_=mean(E.*E); end if(itr