## install the following packages #install.packages("asnipe") #install.packages("Matrix") #install.packages("survival") #install.packages("combinat") #install.packages("foreach") #install.packages("doParallel") #install.packages("matrixStats) # set working directory that contains the file 'NBDA code 1.2.15.r' and the 'error_and_sensitivity_function parallel.r' setwd("C:/Users/bssw/Desktop/Leeds University/PhD/Simulations - sensitivity of NBDA/Online Resources") source("OR3_NBDA code 1.2.15.r") # sources functions source("OR2_sensitivity functions.R") # sources functions # A) read simulated data set (observation x individuals matrix) sightings <- read.csv("OR5_simulated observational data.csv") # B) extract how many times each dyad has been seen together and how many times they have been seen apart # these two matrices will be used to create a social network with observational error using a Bayesian approach. together_apart_output <- together_apart(sightings=sightings) write.csv(together_apart_output[,,1], file="together_matrix.csv") # save 'together' matrix write.csv(together_apart_output[,,2], file="apart_matrix.csv") # save 'apart' matrix # C) determine how many times individuals have been seen and choose cut-off points cutoff <- sort(unique(colSums(sightings))) # extract colSums (=number of sightings) cutoff <- cutoff[-length(cutoff)] # remove the maximum number of sightings (as there have to be at least 2 individuals that make the cut), otherwise association matrices cannot be calculated and the function returns an error. cutoff <- cutoff[-length(cutoff)] # remove second last as well # D) run a first simulation with dropping learners and s=8 (social learning) sim1 <- sensitivity_NBDA_ind_error(x=together_apart_output, # with s=8 and dropping learners sightings=sightings, cutoff=cutoff, association_index="SRI", iterations=10000, s=8, num_ind_learn=20, cores=2, keep_learners = FALSE, delta_AICc = 2) save(sim1, file="sensitivity_NBDA_with_error_false neg_drop learners.RData") # save the sim1 object write.csv(sim1$raw,"sensitivity_NBDA_with_error_false neg_drop learners_RAW.csv" ) write.csv(sim1$summary,"sensitivity_NBDA_with_error_false neg_drop learners_SUMMARY.csv" ) # E) run a second simulation with keeping learners and s=8 (social learning) sim2 <- sensitivity_NBDA_ind_error(x=together_apart_output, # with s=8 and keeping learners sightings=sightings, cutoff=cutoff, association_index="SRI", iterations=10000, s=8, num_ind_learn=20, cores=2, keep_learners = TRUE, delta_AICc=2) save(sim2, file="sensitivity_NBDA_with_error_false neg_keep learners.RData") # save the sim2 object write.csv(sim2$raw,"sensitivity_NBDA_with_error_false neg_keep learners_RAW.csv" ) write.csv(sim2$summary,"sensitivity_NBDA_with_error_false neg_keep learners_SUMMARY.csv" ) # F) run a third simulation with dropping learners and s=0 (asocial learning) sim3 <- sensitivity_NBDA_ind_error(x=together_apart_output, # with s=0 and dropping learners sightings=sightings, cutoff=cutoff, association_index="SRI", iterations=10000, s=0, num_ind_learn=20, cores=2, keep_learners = FALSE, delta_AICc = 2) save(sim3, file="sensitivity_NBDA_with_error_false pos_drop learners.RData") # save the sim3 object write.csv(sim3$raw,"sensitivity_NBDA_with_error_false pos_drop learners_RAW.csv" ) write.csv(sim3$summary,"sensitivity_NBDA_with_error_false pos_drop learners_SUMMARY.csv" ) # G) run a fourth simulation with keeping learners and s=0 (asocial learning) sim4 <- sensitivity_NBDA_ind_error(x=together_apart_output, # with s=0 and keeping learners sightings=sightings, cutoff=cutoff, association_index="SRI", iterations=10000, s=0, num_ind_learn=20, cores=2, keep_learners = TRUE, delta_AICc = 2) save(sim4, file="sensitivity_NBDA_with_error_false pos_keep learners.RData") # save the sim4 object write.csv(sim4$raw,"sensitivity_NBDA_with_error_false pos_keep learners_RAW.csv" ) write.csv(sim4$summary,"sensitivity_NBDA_with_error_false pos_keep learners_SUMMARY.csv" ) ######################################################################################################### ## plot results of sim1 windowsFonts(A = windowsFont("Times New Roman")) par(mfrow=c(1,2)) par(mar=c(5.1,4,3,1)) # set figure margins plot(sim1$summary[,"cutoff"], sim1$summary[,"perc_social_learning_supported"], # plot the proportion of social models supported against the cut-off points. type="b", ylim=c(0,100), xlab="Cut-off points", ylab="Percent", family="A", main="a) dropping learners", cex.main=1) par(new=TRUE) # prevent R from opening a new figure plot(sim1$summary[,"cutoff"], sim1$summary[,"within_95%_CI"], # plot estimates for s falling within 95% CI into the same plot type="b", xaxt="n", yaxt="n", xlab="", ylab="", col="red", pch=2, ylim=c(0,100), family="A") # set limits of y-axis to match your minimum and maximum estimates par(family="serif") legend(x=1.5,y=20, pch=c(1,2), legend=c("Social models supported","s withing 95% C.I."), # add a legend lty=c(1,4), col=c("black","red"), bty="n", cex=0.9) ####################################################################################################### ## plot results of sim2 par(mar=c(5.1,2,3,3)) # set figure margins plot(sim2$summary[,"cutoff"], sim2$summary[,"perc_social_learning_supported"], # plot the proportion of social models supported against the cut-off points. type="b", ylim=c(0,100), xlab="Cut-off points", ylab="", main="b) retaining learners", cex.main=1) par(new=TRUE) # prevent R from opening a new figure plot(sim2$summary[,"cutoff"], sim2$summary[,"within_95%_CI"], # plot estimates of s falling within 95% CI into the same plot type="b", xaxt="n", yaxt="n", xlab="", ylab="", col="red", pch=2, ylim=c(0,100)) # set limits of y-axis to match your minimum and maximum estimates par(family="serif") legend(x=1.5,y=20, pch=c(1,2), legend=c("Social models supported", "s within 95% C.I."), # add a legend lty=c(1,4), col=c("black","red"), bty="n", cex=0.9) ######################################################################################################## ## plot results of sim3 windowsFonts(A = windowsFont("Times New Roman")) par(mfrow=c(1,2)) par(mar=c(5.1,4,3,1)) # set figure margins plot(sim3$summary[,"cutoff"], sim3$summary[,"perc_social_learning_supported"], # plot the proportion of social models supported against the cut-off points. type="b", ylim=c(0,100), xlab="Cut-off points", ylab="Percent", family="A", main="a) dropping learners", cex.main=1) par(new=TRUE) # prevent R from opening a new figure plot(sim3$summary[,"cutoff"], sim3$summary[,"within_95%_CI"], # plot estimates of s falling within 95% CI into the same plot type="b", xaxt="n", yaxt="n", xlab="", ylab="", col="red", pch=2, ylim=c(0,100), family="A") # set limits of y-axis to match your minimum and maximum estimates par(family="serif") legend(x=1.5,y=50, pch=c(1,2), legend=c("Social models supported", "s within 95% C.I."), # add a legend lty=c(1,4), col=c("black","red"), bty="n", cex=0.9) ######################################################################################################### # plot results of sim4 par(mar=c(5.1,2,3,3)) # set figure margins plot(sim4$summary[,"cutoff"], sim4$summary[,"perc_social_learning_supported"], # plot the proportion of social models supported against the cut-off points. type="b", ylim=c(0,100), xlab="Cut-off points", ylab="Percent", family="A", main="b) retaining learners", cex.main=1) par(new=TRUE) # prevent R from opening a new figure plot(sim4$summary[,"cutoff"], sim4$summary[,"within_95%_CI"], # plot estimates of s falling within 95% CI into the same plot type="b", xaxt="n", yaxt="n", xlab="", ylab="", col="red", pch=2, ylim=c(0,100), family="A") # set limits of y-axis to match your minimum and maximum estimates par(family="serif") legend(x=1.5,y=50, pch=c(1,2), legend=c("Social models supported", "s within 95% C.I."), # add a legend lty=c(1,4), col=c("black","red"), bty="n", cex=0.9) ########################################################################################################