Skip to main content
. 2022 Aug 7;22(15):5891. doi: 10.3390/s22155891
Algorithm 1 DDAE Weight Training Algorithm
Input: DIFF value, network architecture, max_epoch, dropout_rate p, and learning rate α;
Output: Trained weights ω and b;
 1: Randomly initialize ω and b;
 2: while epoch<Max_epoch do
 3:    Randomly select a mini-batch from inputs;
 4:    // Forward propagation;
 5:    // L is the number of layers of the DDAE;
 6:    for l = 2:L-2 do
 7:      if the current layer is a dropout layer then
 8:         rj(l)Bernoulli(p)
 9:         y˜(l)=r(l)·y(l)
 10:       zi(l+1)=ωi(l+1)y˜(l)+bi(l+1)
 11:       yi(l+1)=f(zi(l+1))
 12:      else
 13:         // The current layer is a hidden layer;
 14:         z(l)=ω(l)y(l)+b(l)
 15:         y(l)=fl(z(l))
 16:      end if
 17:    end for
 18:    //Loss function;
 19:    Loss=i=1output_sizeyi·logyi^
 20: end while