`ro warning=FALSE, message=FALSE, comment=NA, tidy=FALSE, cache=TRUE, verbose=TRUE, cache.path="may/" or`
## Code for the May Model
\begin{equation}
X_{t+1} = X_t \exp\left( r \left(1 - \frac{ X_t }{ K } \right) - \frac{ a * X_t ^ {Q - 1} }{X_t ^ Q + H ^ Q} \right)
\end{equation}
We will use parameters r = .75, k = 10, a=1.7, H=1, Q = 3. In this model Q is a parameter that will force the system through a bifurcation point at a = 2.
For each of the warning signal statistics in question,
we need to generate the distibution over all replicates
and then over replicates which have been selected conditional
on having experienced a crash.
Load the required libraries
``` {r libraries}
library(earlywarning)
library(reshape2) # data manipulation
library(data.table) # data manipulation
library(ggplot2) # graphics
library(snowfall) # parallel
````
This just sets our plotting preferences
```{r plotting-theme}
theme_publish <- theme_set(theme_bw(12))
theme_publish <-
theme_update(legend.key=theme_blank(),
panel.grid.major=theme_blank(),panel.grid.minor=theme_blank(),
plot.background=theme_blank(), legend.title=theme_blank())
```
### Conditional distribution
Then we fix a set of paramaters we will use for the simulation function. Since we will simulate 20,000 replicates with 5,000 pts a piece, we can save memory by performing the conditional selection on the ones that crash as we go along and disgard the others.
``` {r sim_may}
threshold <- 1.5
select_crashes <- function(n){
n <- n/10 # doesn't need as long as the individual-based
sn <-
sapply(1:1000, function(rep){
x <- vector(mode="double", length=n)
x[1] <- 8 # positive equilibrium
z <- rlnorm(n, 0, .1)
r = .75; k = 10; a=1.55; H=1; Q = 3
for(t in 1:n){
x[t+1] = z[t] * x[t] * exp(r * (1 - x[t] / k) - a * x[t] ^ (Q - 1) / (x[t] ^ Q + H ^ Q))
}
x
})
crashed <- which(sn[n,] < threshold)
sn[,crashed]
}
```
Initialize the parallel enviromment
``` {r parallel, cache=FALSE}
sfInit(parallel=FALSE)
```
and run the simulations:
``` {r simdat}
sfExportAll()
examples <- sfLapply(1:20, function(i) select_crashes(50000))
dat <- melt(as.matrix(as.data.frame(examples, check.names=FALSE)))
names(dat) = c("time", "reps", "value")
levels(dat$reps) <- 1:length(levels(dat$reps)) # use numbers for reps
````
``` {r testing}
ggplot(subset(dat, reps %in% levels(dat$reps)[1:9])) +
geom_line(aes(time, value)) +
facet_wrap(~reps, scales="free")
````
Zoom in on the relevant area of data near the crash
``` {r subsetdata, dependson="simdat" }
require(plyr)
zoom <- ddply(dat, "reps", function(X){
tip <- min(which(X$valuethreshold))
var <- dt[, warningtrend(data.frame(time=time, value=value), window_var), by=reps]$V1
acor <- dt[, warningtrend(data.frame(time=time, value=value), window_autocorr), by=reps]$V1
dat <- melt(data.frame(Variance=var, Autocorrelation=acor))
````
### Null distribution
To compare against the expected distribution of these statistics, we create another set of simulations without conditioning on having experienced a chance transition, on which we perform the identical analysis.
``` {r may_null}
select_crashes <- function(n){
n <- n/10 # doesn't need as long as the individual-based
sn <-
sapply(1:1000, function(rep){
x <- vector(mode="double", length=n)
x[1] <- 8 # positive equilibrium
z <- rlnorm(n, 0, .1)
r = .75; k = 10; a=1.55; H=1; Q = 3
for(t in 1:n){
x[t+1] = z[t] * x[t] * exp(r * (1 - x[t] / k) - a * x[t] ^ (Q - 1) / (x[t] ^ Q + H ^ Q))
}
x
})
sn[1:201,]
}
````
``` {r simdat_null, dependson="may_null"}
sfLibrary(populationdynamics)
sfExportAll()
examples <- sfLapply(1:10, function(i) select_crashes(50000))
nulldat <- melt(as.matrix(as.data.frame(examples, check.names=FALSE)))
nulldat <- melt(examples)
names(nulldat) = c("time", "reps", "value")
levels(nulldat$reps) <- 1:length(levels(dat$reps))
````
Zoom in on the relevant area of data near the crash
``` {r nullzoom, dependson="simdat_null"}
require(plyr)
nullzoom <- ddply(nulldat, "reps", function(X){
data.frame(time=X$time, value=X$value)
})
````
Compute the warning signals on the null distribution for comparison:
``` {r nullmelt, dependson="nullzoom"}
nulldt <- data.table(nullzoom)
nullvar <- nulldt[, warningtrend(data.frame(time=time, value=value), window_var), by=reps]$V1
nullacor <- nulldt[, warningtrend(data.frame(time=time, value=value), window_autocorr), by=reps]$V1
nulldat <- melt(data.frame(Variance=nullvar, Autocorrelation=nullacor))
````
Plot the final figure:
``` {r fig3, dependson="nullmelt"}
ggplot(dat) +
geom_histogram(aes(value, y=..density..), binwidth=0.3, alpha=.5) +
facet_wrap(~variable) + xlim(c(-1, 1)) +
geom_density(data=nulldat, aes(value), adjust=3) +
xlab("Kendall's tau") + theme_bw()
````
``` {r Figure3, dev="CairoPS", fig.ext="eps", fig.width=6, fig.height=5, include=FALSE}
ggplot(dat) + geom_histogram(aes(value, y=..density..), binwidth=0.3, alpha=.5) +
facet_wrap(~variable) + xlim(c(-1, 1)) +
geom_density(data=nulldat, aes(value), adjust=3) + xlab("Kendall's tau") + theme_bw()
````
``` {r save_final_data}
write.csv(dat, file="Figure3_dat.csv")
write.csv(nulldat, file="Figure3_nulldat.csv")
```