Skip to content

Commit

Permalink
Merge branch 'main' of github.com:Rstats-courses/CursoR-AEET-2025
Browse files Browse the repository at this point in the history
  • Loading branch information
Pakillo committed Jan 16, 2025
2 parents b24ee68 + e0b67f8 commit b32a7ac
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 61 deletions.
7 changes: 3 additions & 4 deletions materiales.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ Datasets: [zip file](data/datasets.zip) [individual datasets](https://github.com

- Common-sense stats: [slides](slides/Common_Sense_Stats/CommonSense.html)

- Data management: [slides](slides/data_management.html), [slides_pdf](slides/data_management.pdf)
- Data management: [slides](slides/data_management.html), [slides_pdf](slides/data_management.pdf), [script](scripts/data_management.R)

- Data visualisation: [slides](slides/data_visualization.html), [slides_pdf](slides/data_visualization.pdf)
- Data visualisation: [slides](slides/data_visualization.html), [slides_pdf](slides/data_visualization.pdf), [script](scripts/data_visualization.R)


### MARTES
Expand All @@ -23,12 +23,11 @@ Datasets: [zip file](data/datasets.zip) [individual datasets](https://github.com
- Git & GitHub: [slides](slides/github.html)



### MIÉRCOLES

- GLMM: [slides](slides/GLMs.pdf), [scripts](https://github.com/Rstats-courses/CursoR-AEET-2025/tree/main/scripts)

- Reproducible workflows: [slides](slides/ReproducibleScience.pdf), [Rmd slides](slides/Rmarkdown_intro.html), [examples](scripts/Rmarkdown_examples.zip)
- Reproducible workflows: [slides](slides/ReproducibleScience.pdf), [Rmd slides](slides/Rmarkdown_intro.html), [Rmd_examples](scripts/Rmd_examples.zip)


### JUEVES
Expand Down
136 changes: 81 additions & 55 deletions scripts/Functional_programming.R
Original file line number Diff line number Diff line change
@@ -1,54 +1,12 @@
#This script plays with null models as a way to learn functions and
#practice loops.


#Let's start by optimizing the code for simple ploting

data(iris)
#head(iris)

setosa <- subset(iris, Species == "setosa")
plot(setosa$Sepal.Length ~ setosa$Petal.Length, las = 1,
xlab = "Sepal length", ylab = "Petal length")
versicolor <- subset(iris, Species == "versicolor")
plot(versicolor$Sepal.Length ~ versicolor$Petal.Length, las = 1,
xlab = "Sepal length", ylab = "Petal length")
virginica <- subset(iris, Species == "virginica")
plot(virginica$Sepal.Length ~ virginica$Petal.Length, las = 1,
xlab = "Sepal length", ylab = "Petal length")

#Better
species <- c("setosa", "versicolor", "virginica")
#species <- unique(iris$Species)

for (i in species) {
temp <- subset(iris, Species == i)
plot(temp$Sepal.Length ~ temp$Petal.Length, las = 1,
xlab = "Sepal length", ylab = "Petal length")
}

#Even better
plot_species <- function(sp, data) {
temp <- subset(data, Species == sp)
plot(temp$Sepal.Length ~ temp$Petal.Length, las = 1,
xlab = "Sepal length", ylab = "Petal length")
}
lapply(species, plot_species, data = iris)
#or
purrr::map(species, plot_species, data = iris)

#Or if data is heavy
library(future.apply)
plan(multisession)
future_lapply(species, plot_species, data = iris)


#A simple null model used to test correlations----

#The data
abundance <- c(1,3,4,7,8,13)
body_size <- c(9,6,3,3,1,1)
plot(abundance ~ body_size)
abundance <- sort(runif(n = 6, min = 1, max = 13))
body_size <- sort(runif(n = 6, min = 1, max = 9), decreasing = TRUE)
plot(abundance ~ body_size, las = 1)
(corr <- cor(abundance,body_size))
cor.test(abundance,body_size)

Expand All @@ -62,10 +20,14 @@ cor.test(abundance,body_size)
cor(sample(body_size, size = length(body_size), replace = FALSE), abundance)

#we do it 1000 times
cor_dis <- c()
init <- Sys.time()
#cor_dis <- c()
cor_dis <- rep(NA, 1000)
for (k in 1:1000){
cor_dis[k] <- cor(sample(body_size, size = length(body_size), replace = FALSE), abundance)
}
end <- Sys.time()
print(init - end)

#plot
hist(cor_dis)
Expand All @@ -78,7 +40,7 @@ length(cor_dis[which(cor_dis < corr)])/1000

#Another example----
#We observe a pattern: How uneven are abundance distributions?
abundance <- c(1,3,4,7,8,13)
abundance <- round(runif(n = 6, min = 1, max = 13))
#calculate pielou's evenees (shannon/logarithm of number of species)
#Shannon The proportion of species i relative to the total number of species
#(pi) is calculated, and then multiplied by the natural logarithm of
Expand All @@ -100,25 +62,31 @@ abundance2 <- table(rand)
#Calculate J of the simulated community
J2 <- J(abundance2)
# now make it 1000 times
out <- c()
for(i in 1:100){
init <- Sys.time()
#out <- c()
out <- rep(NA, 1000)
for(i in 1:1000){
rand <- sample(c(1:6), sum(abundance), replace = TRUE)
abundance2 <- hist(rand, breaks = seq(0,6,1))$counts
abundance2 <- table(rand)
J2 <- J(abundance2)
out <- c(out, J2)
}
end <- Sys.time()
print(init - end)

#calculate p-value
hist(out)
lines(c(eve, eve), c(0,20), col = "red")
lines(c(eve, eve), c(0,1000), col = "red")
(p <- pnorm(eve, mean = mean(out, na.rm = TRUE), sd = sd(out, na.rm = TRUE)))
#Or using the real distribution directly
length(out[which(out < eve)])/1000

#functionalize it
null_function <- function(x, iterations = 1000){
# make a loop
#calulate observed values
n <- length(x)
eve <- J(x)
eve <- J(x) #observed eveness
# make a loop
out_bs <- c()
for(i in 1:iterations){
rand <- sample(c(1:n), sum(x), replace = TRUE)
Expand All @@ -133,11 +101,69 @@ null_function <- function(x, iterations = 1000){
round(out, digits = 2)))
}

null_function(c(1,2,3,4,5,6,7,8,9))
null_function(round(runif(100, 30,100)))
null_function(c(3,4,4,4,5,6,6,8,8))
null_function(c(1,2,2,3,4,6,8,9,9))
null_function(c(1,1,1,1,1,1,1,1,100))

#make a package!

#What if the loop is too slow... we can use lapply
init <- Sys.time()
null_function(round(runif(1000,30,35)))
end <- Sys.time()
print(init - end)

#functionalize using lappy
null_function_apply <- function(x, iterations = 1000){
#calulate observed values
n <- length(x)
eve <- J(x) #observed eveness
# Use lapply for the loop
out_bs <- lapply(1:iterations, function(i) {
rand <- sample(c(1:n), sum(x), replace = TRUE)
x2 <- table(rand)
J(x2)
})
# Convert the list to a vector
out_bs <- unlist(out_bs)
#And test significance
out <- length(out_bs[which(out_bs < eve)])/iterations
print(paste("The eveness value observed is ",
round(eve, digits = 2), "and the probablility of ocurring by chance is ",
round(out, digits = 2)))
}

init <- Sys.time()
null_function_apply(round(runif(1000,30,35)))
end <- Sys.time()
print(init - end)

#even faster
#functionalize the part inside of the loop only
null_function_fapply <- function(x, iterations = 1000){
#calulate observed values
n <- length(x)
eve <- J(x) #observed eveness
# Use lapply for the loop
out_bs <- future_lapply(1:iterations, function(i) {
rand <- sample(c(1:n), sum(x), replace = TRUE)
x2 <- table(rand)
J(x2)
}, future.seed = NULL)
# Convert the list to a vector
out_bs <- unlist(out_bs)
#And test significance
out <- length(out_bs[which(out_bs < eve)])/iterations
print(paste("The eveness value observed is ",
round(eve, digits = 2), "and the probablility of ocurring by chance is ",
round(out, digits = 2)))
}
library(future.apply)
plan(multisession)
init <- Sys.time()
null_function_fapply(round(runif(1000,30,35)))
end <- Sys.time()
print(init - end)

#null model 2.
#We want to test now if body size is driving the evenness patterns.
Expand Down
Binary file added scripts/Rmd_examples.zip
Binary file not shown.
4 changes: 2 additions & 2 deletions scripts/multivariante_notes.R
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,8 @@ orditorp(Herb_community.mds,display="species",col="red",air=0.01)

#Testing hypothesis PERMANOVA----
#First test: Are centroids different?
adonis(Herb_community ~ Habitat, method = "bray")
adonis(Herb_community ~ DayNight, method = "bray")
adonis2(Herb_community ~ Habitat, method = "bray")
adonis2(Herb_community ~ DayNight, method = "bray")
#Second test: Is the spread different?
b <- betadisper(vegdist(Herb_community, method = "bray"), group = Habitat)
anova(b)
Expand Down

0 comments on commit b32a7ac

Please sign in to comment.