# R script to perform the Elman and Jordan neural network models with leave-one-out-cross-validation method # load packages library(RSNNS) library(parallel) # finding the optimal number of units in a Elman neural network hidden layer min_size <- 5 max_size <- 25 size <- seq(min_size,max_size) #performing a leave-one-out-cross-validation method get_best_elman <- function(size) { n <- length(y_train) mse <- 0 for (a in 1:n) { x_train_loo <- x_train[-a,] y_train_loo <- y_train[-a] #The set.seed function is used to aid reproducibility set.seed(2018) fit <- elman(x_train_loo, y_train_loo, size = size, learnFuncParams = 0.75, maxit = 5000) pred <- predict(fit, x_train[a,]) mse[a] <- (y_train[a] - pred)^2 print(data.frame(cbind(size, a))) } mean_mse <- mean(mse) return(c(size, mean_mse)) } # multicores on Linux system.time( res_p <- mclapply(1:length(size), FUN = function(x) { get_best_elman(size[x])}, mc.cores = 12) ) df <- data.frame(matrix(unlist(res_p), ncol = 2, byrow = TRUE)) names(df) <- c("size", "mean_mse") best_elman_model <- df[df[,2] == min(df[,2]),] best_elman_model # finding the optimal number of units in a Jordan neural network hidden layer min_size <- 5 max_size <- 25 size <- seq(min_size,max_size) #performing a leave-one-out-cross-validation method get_best_jordan <- function(size) { n <- length(y_train) mse <- 0 for (a in 1:n) { x_train_loo <- x_train[-a,] y_train_loo <- y_train[-a] fit <- jordan(x_train_loo, y_train_loo, size = size, learnFuncParams = 0.55, maxit = 5000) pred <- predict(fit, x_train[a,]) mse[a] <- (y_train[a] - pred)^2 print(data.frame(cbind(size, a))) } mean_mse <- mean(mse) return(c(size, mean_mse)) } # multicores on Linux system.time( res_p <- mclapply(1:length(size), FUN = function(x) { get_best_jordan(size[x])}, mc.cores = 12) ) df <- data.frame(matrix(unlist(res_p), ncol = 2, byrow = TRUE)) names(df) <- c("size", "mean_mse") best_jordan_model <- df[df[,2] == min(df[,2]),] best_jordan_model