#timeList_sorting_for_verts.R library(paleotree) # Get ages in paleotree format: timeList <- dget("dinos_timeList_02-28-14.txt") #DROP OUTGROUP timeList[[2]]<-timeList[[2]][-which(rownames(timeList[[2]])=="OUTGROUP"),] #seqTimeList res<-seqTimeList(timeList) res1<-seqTimeList(timeList,weightSampling=TRUE) layout(1:2) boxplot(res$nIntervals,res1$nIntervals) boxplot(res$nTaxa,res1$nTaxa) sampPars<-t(sapply(res$timeLists,function(x) optimPaleo(make_durationFreqDisc(x))$par)) sampPars1<-t(sapply(res1$timeLists,function(x) optimPaleo(make_durationFreqDisc(x))$par)) meanIntLen<-sapply(res$timeLists,function(x) mean(-apply(x[[1]],1,diff))) meanIntLen1<-sapply(res1$timeLists,function(x) mean(-apply(x[[1]],1,diff))) sampRate<-sapply(1:length(meanIntLen),function(x) sProb2sRate(R=sampPars[x,2],int.length=meanIntLen[x])) sampRate1<-sapply(1:length(meanIntLen),function(x) sProb2sRate(R=sampPars1[x,2],int.length=meanIntLen1[x])) #drop all infinite samp rate values as erroneous sRateClean<-sampRate[!is.infinite(sampRate)] sRateClean1<-sampRate1[!is.infinite(sampRate1)] # nTaxaClean<-res$nTaxa[!is.infinite(sampRate)] nTaxaClean1<-res1$nTaxa[!is.infinite(sampRate1)] # nIntClean<-res$nIntervals[!is.infinite(sampRate)] nIntClean1<-res1$nIntervals[!is.infinite(sampRate1)] # meanIntLenClean<-meanIntLen[!is.infinite(sampRate)] meanIntLenClean1<-meanIntLen1[!is.infinite(sampRate1)] layout(1) boxplot(sRateClean,sRateClean1,names=c("Unweighted","Weighted"), ylab="Est. Sampling Rate per Lmy (Inf Results Dropped)") mtext(text=paste("Median:",round(median(sRateClean),3), " Median:", round(median(sRateClean1),3)),1,2) layout(1:3) plot(sRateClean,nTaxaClean,xlab="Est. Sampling Rate (Inf Dropped)",ylab="Number of Taxa Saved", main="Unweighted Analyses") plot(sRateClean,nIntClean,xlab="Est. Sampling Rate (Inf Dropped)",ylab="Number of Intervals Saved") plot(sRateClean,meanIntLenClean,xlab="Est. Sampling Rate (Inf Dropped)",ylab="Mean Interval Length") layout(1:3) plot(nTaxaClean,meanIntLenClean,xlab="Number of Taxa Saved",ylab="Mean Interval Length", main="Unweighted Analyses") plot(nIntClean,meanIntLenClean,xlab="Number of Intervals Saved",ylab="Mean Interval Length") plot(nTaxaClean,nIntClean,xlab="Number of Taxa Saved",ylab="Number of Intervals Saved") ############ #fit to a gamma distribution library(MASS) library(paleoTS) #compare different models for fit #gamma,exponential,log-normal barplot(akaike.wts(c( IC(fitdistr(sRateClean,"gamma")$loglik,2,n=length(sRateClean)), IC(fitdistr(sRateClean,"exponential")$loglik,1,n=length(sRateClean)), IC(fitdistr(sRateClean,"log-normal")$loglik,2,n=length(sRateClean)) )),names=c("gamma","exponential","log-normal"),ylab="Akaike Weights") #lnorm layout(1:2) modelFit<-fitdistr(sRateClean,"log-normal")$estimate qqplot(sRateClean,qlnorm(ppoints(length(sRateClean)),modelFit[1],modelFit[2]) ,xlab="Data Quantiles",ylab="log-normal Model Quantiles",main="Unweighted") abline(0,1) modelFit1<-fitdistr(sRateClean1,"log-normal")$estimate qqplot(sRateClean1,qlnorm(ppoints(length(sRateClean1)),modelFit1[1],modelFit1[2]) ,xlab="Data Quantiles",ylab="log-normal Model Quantiles",main="Weighted") abline(0,1) #gamma layout(1:2) modelFit<-fitdistr(sRateClean,"gamma")$estimate qqplot(sRateClean,qgamma(ppoints(length(sRateClean)),shape=modelFit[1],rate=modelFit[2]) ,xlab="Data Quantiles",ylab="Gamma Model Quantiles",main="Unweighted") abline(0,1) modelFit1<-fitdistr(sRateClean1,"gamma")$estimate qqplot(sRateClean1,qgamma(ppoints(length(sRateClean1)),shape=modelFit1[1],rate=modelFit1[2]) ,xlab="Data Quantiles",ylab="Gamma Model Quantiles",main="Weighted") abline(0,1) #par estimates fitdistr(sRateClean,"log-normal")$estimate # hist(rlnorm(100,meanlog=-4.01,sdlog=1.79)) ############################# #now extinction rate #get in per Lmy extRate<-sampPars[,1]/meanIntLen extRate1<-sampPars1[,1]/meanIntLen1 extClean<-extRate[!is.infinite(sampRate)] extClean1<-extRate1[!is.infinite(sampRate1)] boxplot(extClean,extClean1) median(extClean) median(extClean1) layout(1) boxplot(extClean,extClean1,names=c("Unweighted","Weighted"), ylab="Est. Extinct Rate per Lmy (Inf Samp. Rate Results Dropped)") mtext(text=paste("Median:",round(median(extClean),3), " Median:", round(median(extClean1),3)),1,2) #compare different models for fit #gamma,exponential,log-normal barplot(akaike.wts(c( IC(fitdistr(extClean,"gamma")$loglik,2,n=length(extClean)), IC(fitdistr(extClean,"exponential")$loglik,1,n=length(extClean)), IC(fitdistr(extClean,"log-normal")$loglik,2,n=length(extClean)) )),names=c("gamma","exponential","log-normal"),ylab="Akaike Weights") #gamma layout(1:2) modelFit<-fitdistr(extClean,"gamma")$estimate qqplot(extClean,qgamma(ppoints(length(extClean)),shape=modelFit[1],rate=modelFit[2]) ,xlab="Data Quantiles",ylab="Gamma Model Quantiles",main="Unweighted") abline(0,1) modelFit1<-fitdistr(extClean1,"gamma")$estimate qqplot(extClean1,qgamma(ppoints(length(extClean1)),shape=modelFit1[1],rate=modelFit1[2]) ,xlab="Data Quantiles",ylab="Gamma Model Quantiles",main="Weighted") abline(0,1) #par estimates fitdistr(extClean,"gamma")$estimate # hist(rgamma(100,shape=0.248,rate=0.265)) ######################## layout(1:2) plot(sRateClean,extClean,xlab="Sampling Rate",ylab="Extinction Rate") sampRateSim<-rlnorm(100,meanlog=-4.01,sdlog=1.79) extRateSim<-rgamma(100,shape=0.248,rate=0.265) plot(sampRateSim,extRateSim,xlab="Simulated Sampling Rate",ylab="Simulated Extinction Rate")