Skip to main content
. 2023 Sep 2;15(17):3636. doi: 10.3390/polym15173636
Algorithm 1. MATLAB® code for GRNN parallel training algorithm
% Setting the spread constant population vector with step s
Spread = [Spread_min:s:Spread_max];

% Parallel computing loop for calling the built-in GRNN training function
parfor ii = 1:length(Spread)

  pop_GRNN(ii).net = newgrnn(Inputs_train,Targets_train,Spread(ii));
end

% Calling the built-in simulation function of the trained GRNN with
% training inputs in parallel computing mode
for ii = 1:length(Spread)

  Outputs_train = sim(pop_GRNN(ii).net,Inputs_train,...
    ‘useParallel’,’yes’);

  % Calculation of the absolute error of the trained network
  Err = Targets_train − Outputs_train;

  % Calculation of the average absolute percentage error of the trained
  % network
  pre_MAPE = abs(Err./Targets_train);
  mean_MAPE = mean(pre_MAPE(isfinite(pre_MAPE))) * 100;

  % Conditional storage of the corresponding variables in the pop_GRNN
  % structure
   if mean_MAPE < max_MAPE
    pop_GRNN(ii).Spread = Spread(ii);
    pop_GRNN(ii).Outputs = Outputs_train;
    pop_GRNN(ii).MAPE = mean_MAPE;
  else

    % Premature termination of the cycle
    break
  end
end

% Identification of the MAPE maximum value index in the pop_GRNN
% structure
[~,k] = max([pop_GRNN.MAPE]);

% Setting the variables of the found values of the corresponding
% parameters
if ~isempty([pop_GRNN(k).Spread])
  spread = pop_GRNN(k).Spread;
  net_GRNN = pop_GRNN(k).net;
  Outputs_GRNN_train = pop_GRNN(k).Outputs;

  % Removing empty fields from the pop_GRNN structure
  pop_GRNN = pop_GRNN(1:length([pop_GRNN.Spread]),:);
else

  % Terminate execution
  return
end