Skip to main content
. 2021 Jun 30;18(13):6991. doi: 10.3390/ijerph18136991
% entering the input data i.e., thickness
x = [ 0.37 0.41 0.48 0.49 0.47 0.37 0.41 0.48 0.49 0.47];
% entering the output data i.e., THL
t = [542.79 511.79 797.09 628.68 764.79 237.8 229.5 371.7 269.87 266];
% Choose a Training Function
trainFcn = ‘trainlm’; % Levenberg-Marquardt backpropagation. ‘trainlm’ is usually fastest.
% Create a Fitting Network
hiddenLayerSize = 3;% number of hidden neurons.
netthl = fitnet (hiddenLayerSize,trainFcn);
% Setup Division of Data for Training, Validation, Testing
netthl.divideParam.trainRatio = 70/100;
netthl.divideParam.valRatio = 15/100;
netthl.divideParam.testRatio = 15/100;
netthl.trainParam.epochs=3000;% number of training epochs.
% Train the Network
[netthl,tr] = train (netthl,x,t);
% Test the Network
y = netthl (x);
e = gsubtract (t,y);
performance = perform (netthl,t,y)
% View the Network
view (netthl)
% Plots
figure, plotperform (tr)
figure, plottrainstate (tr)
figure, ploterrhist (e)
figure, plotregression (t,y)
% using the regression analysis to judge the network performance
[m,b,r]=postreg (y,t);
% saving the trained network
save netthl;