Michigan Deer Antler Diameter

Spatio-temporal modeling of areal data with space-time interaction

Gary Roloff
Sean Sultaire
John Humphreys
Contact: John (humph173@msu.edu)

date() 
## [1] "Mon Aug 26 15:45:00 2019"

Options

library(knitr)
library(rgl)
opts_knit$set(verbose = FALSE)
knit_hooks$set(webgl = hook_webgl)

Set Directory

setwd("~/Michigan_State/SLP_Beam_Diam")

Load Packages:

suppressMessages(library(INLA))
suppressMessages(library(rgdal))
suppressMessages(library(reshape2))
suppressMessages(library(raster))
suppressMessages(library(fields))
suppressMessages(library(FactoMineR))
suppressMessages(library(missMDA))
suppressMessages(library(rnoaa))
suppressMessages(library(climdex.pcic))
suppressMessages(library(seas))
suppressMessages(library(factoextra))
suppressMessages(library(corrplot))
suppressMessages(library(rasterVis))
suppressMessages(library(rgeos))
suppressMessages(library(maptools))
suppressMessages(library(mapproj))
suppressMessages(library(spdep))
suppressMessages(library(ggplot2))
suppressMessages(library(ggthemes))
suppressMessages(library(GISTools))
suppressMessages(library(lattice))
suppressMessages(library(gridExtra))
suppressMessages(library(xtable))
suppressMessages(library(Rmisc))
suppressMessages(library(dplyr))
suppressMessages(library(dismo))
suppressMessages(library(tseries))
suppressMessages(library(data.table))
suppressMessages(library(readr))
suppressMessages(library(stringr))
suppressMessages(library(kableExtra))
suppressMessages(library(lettercase))
suppressMessages(library(lubridate))
suppressMessages(library(astsa))
source("RScripts.R") 

Load and Clean Antler Data (1987-2013)

Beamdata = read.csv("./beam_records_all_051118.csv", stringsAsFactors=FALSE) #1987-2013

SexR13 = Beamdata %>% 
            filter(cseason == "Early Firearm" | 
                   cseason == "Regular Firearm" |
                   cseason == "Late Firearm") %>%
             select(county, year, sex, age)

SexR13$sex = ifelse(SexR13$sex == 1, "M", "F")

MyBeam13 = Beamdata %>%
           mutate(LBeam = lbeam,
                  RBeam = rbeam,
                  Points = points,
                  Date = dmy(date)) %>%
           select(county, year, Date, LBeam, RBeam, Points, season, sex, age) %>%
           filter(county != 99,
                  age == 1) #selecting yearling

Table to ID Regions

Regs.df = as.data.frame(
                    Beamdata %>%
                      group_by(county) %>%
                      summarize(Reg = median(region)))[1:83,]

Load and Clean Antler Data (2014-2016)

Beamdata16 = read.csv("./deeCheckStationData2014_2016.csv", stringsAsFactors=FALSE) #2014-2016

DupCriteria = c("Harvest_Year", "Harvest_Date", "Harvest_Section", "Harvest_TRS", "Gender", 
                "Age_Abbr", "LBeam_Diameter_Number", "RBeam_Diameter_Number", 
                "Total_Points_Number", "Data_Collector", "Station_Number")

Beamdata16$Duplicate = duplicated(Beamdata16[, DupCriteria]) | duplicated(Beamdata16[, DupCriteria], fromLast = TRUE)

length(which(Beamdata16$Duplicate == TRUE))
## [1] 2503
Beamdata16$Dups = duplicated(Beamdata16[, DupCriteria])

SexR16 = Beamdata16 %>% 
            filter(Dups == FALSE,
                   Harvest_Season == "Early Firearm       " | 
                   Harvest_Season == "Regular Firearm     " |
                   Harvest_Season == "Late Firearm        ") %>%
            mutate(sex = Gender,
                   age = Age_Abbr,
                   year = Harvest_Year,
                   County = trimws(Harvest_County)) %>%
            select(County, year, sex, age)

MyBeam16 = Beamdata16 %>%
           mutate(Date = as.Date(Harvest_Date, "%m/%d/%Y"),
                  County = trimws(Harvest_County),
                  Points_Number = Total_Points_Number,
                  year = Harvest_Year,
                  season = Harvest_Season,
                  sex = Gender, 
                  age = Age_Abbr) %>%
           select(County, year, season, Date, sex, age, LBeam_Diameter, RBeam_Diameter, Dups, Points_Number) %>%
           filter(age == 1,
                  Dups == FALSE,
                  LBeam_Diameter != "B ",
                  LBeam_Diameter != "U ",
                  LBeam_Diameter != "",
                  is.na(LBeam_Diameter)==FALSE,
                  RBeam_Diameter != "B ",
                  RBeam_Diameter != "U ",
                  RBeam_Diameter != "",
                  is.na(RBeam_Diameter)==FALSE)

MyBeam16 =  MyBeam16 %>%
            mutate(LBeam = as.numeric(LBeam_Diameter),
                   RBeam = as.numeric(RBeam_Diameter),
                   Points = Points_Number) %>%
            select(-c(Points_Number, LBeam_Diameter, RBeam_Diameter, Dups))

Load and Clean Antler Data (2017)

Beamdata17 = read.csv("./deeCheckStationData2017.csv", stringsAsFactors=FALSE) #2017 #2014-2016

Beamdata17$Entered_DateTime = as.POSIXct(Beamdata17$Entered_DateTime, format = "%d/%m/%Y %I:%M:%S %p")


DupCriteria = c("Season_Year", "Harvest_Date", "Corrected_Section_Taken", "Beam1_Number", "Beam1_Number", 
                "Recorder_Name", "Registration_Sex", "Points_Number", "Corrected_Unit", "Station_Number", "Entered_DateTime")

Beamdata17$Duplicate = duplicated(Beamdata17[, DupCriteria]) | duplicated(Beamdata17[, DupCriteria], fromLast = TRUE)

length(which(Beamdata17$Duplicate == TRUE))
## [1] 10985
Beamdata17$Dups = duplicated(Beamdata17[, DupCriteria])

SexR17 = Beamdata17 %>% 
            filter(Dups == FALSE,
                   Harvest_Season == "Early Firearm" | 
                   Harvest_Season == "Regular Firearm" |
                   Harvest_Season == "Late Firearm") %>%
            mutate(sex = Registration_Sex,
                   age = Registration_Age,
                   year = Season_Year,
                   county = Calculated_County_Number_Taken) %>%
            select(county, year, sex, age)

MyBeam17 = Beamdata17 %>%
           mutate(Date = as.Date(Harvest_Date, "%m/%d/%Y"),
                  County = trimws(Corrected_County_Name_Taken),
                  County = gsub("Saint", "St.", County),
                  year = Season_Year,
                  season = Harvest_Season,
                  sex = Registration_Sex, 
                  age = Registration_Age) %>%
           select(County, year, season, Date, sex, age, Beam1_Number, Beam2_Number, Dups, Points_Number) %>%
           filter(age == 1,
                  Dups == FALSE,
                  is.na(Beam1_Number)==FALSE,
                  is.na(Beam2_Number)==FALSE)

MyBeam17 =  MyBeam17 %>%
            mutate(LBeam = as.numeric(Beam1_Number),
                   RBeam = as.numeric(Beam2_Number),
                   Points = Points_Number) %>%
            select(-c(Beam1_Number, Beam2_Number, Points_Number, Dups))

Get County Boundaries

Source: http://gis-michigan.opendata.arcgis.com/datasets/counties-v17a

Counties = readOGR(dsn = "./Counties_v17a", 
                   layer = "Counties_v17a", 
                   stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\Michigan_State\SLP_Beam_Diam\Counties_v17a", layer: "Counties_v17a"
## with 83 features
## It has 15 fields
## Integer64 fields read as strings:  OBJECTID FIPSNUM

Add County Names and Codes

LookUp.tab = Counties@data[, c("OBJECTID", "NAME")]

MyBeam13$County = with(LookUp.tab,
                     NAME[match(
                       MyBeam13$county,
                            OBJECTID)])

MyBeam16$county = with(LookUp.tab,
                       OBJECTID[match(
                         MyBeam16$County,
                              NAME)])

MyBeam17$county = with(LookUp.tab,
                       OBJECTID[match(
                         MyBeam17$County,
                                  NAME)])

Combine All Years

MyBeam = rbind(MyBeam13, MyBeam16, MyBeam17)

Sex Ratio

SexR16$county = with(LookUp.tab,
                       OBJECTID[match(
                         SexR16$County,
                                  NAME)])

SexR16 = SexR16 %>% select(-County)

SR.df = rbind(SexR13, SexR16, SexR17) %>% 
         mutate(county = as.integer(county)) %>%
               filter(county >=1 & county <= 83)

SR.df$Age = ifelse(SR.df$age == "0", "Fawn", 
              ifelse(SR.df$age == "1", "Yearling", "Adult"))
  

Fawn1 = as.data.frame(
              SR.df %>%
                 filter(Age == "Fawn") %>%
                 group_by(county, year) %>%
                 summarize(Fwn = length(age)))

YB1 = as.data.frame(
              SR.df %>%
                 filter(Age == "Yearling",
                        sex == "M") %>%
                 group_by(county, year) %>%
                 summarize(YB = length(age)))

YD1 = as.data.frame(
              SR.df %>%
                 filter(Age == "Yearling",
                        sex == "F") %>%
                 group_by(county, year) %>%
                 summarize(YD = length(age)))

B1 = as.data.frame(
              SR.df %>%
                 filter(Age == "Adult",
                        sex == "M") %>%
                 group_by(county, year) %>%
                 summarize(B = length(age)))

D1 = as.data.frame(
              SR.df %>%
                 filter(Age == "Adult",
                        sex == "F") %>%
                 group_by(county, year) %>%
                 summarize(D = length(age)))

Yr.lvl = levels(factor(SR.df$year))

SR_87_13.df = as.data.frame(
                    cbind(
                      county = rep(1:83, each = length(Yr.lvl)),
                      year = rep(levels(factor(SR.df$year)), n = 1:83)))

SR_87_13.df = join(SR_87_13.df,  Fawn1, by=c("county", "year"), type="left")
SR_87_13.df = join(SR_87_13.df,  YB1, by=c("county", "year"), type="left")
SR_87_13.df = join(SR_87_13.df,  YD1, by=c("county", "year"), type="left")
SR_87_13.df = join(SR_87_13.df,  B1, by=c("county", "year"), type="left")
SR_87_13.df = join(SR_87_13.df,  D1, by=c("county", "year"), type="left")

SR_87_13.df = SR_87_13.df %>%
               mutate(N = B + D,
                      SR = B/D,
                      pYB = YB/(YB + B),
                      pYD = YD/(YD + D),
                      FD.ratio = Fwn/D,
                      DB.ratio = pYD/pYB,
                      B.mort = pYB*0.9,
                      Avl.B = (B+YB)/0.9,
                      Avl.D = Avl.B*DB.ratio,
                      Avl.F = Avl.D*FD.ratio,
                      N2 = Avl.B + Avl.D + Avl.F)

View Counties

Region outlined in red show the 12 counties subject to APRs.

MapCent = gCentroid(Counties, byid=T)
cnt.df = as.data.frame(MapCent@coords)
cnt.df$id = rownames(cnt.df)
cnt.df$Name = Counties$NAME

APR.cnty = gUnaryUnion(
            subset(Counties, 
                 NAME == "Antrim"  | 
                 NAME == "Benzie"  | 
                 NAME == "Charlevoix"  | 
                 NAME == "Emmet"  | 
                 NAME == "Grand Traverse"  | 
                 NAME == "Kalkaska"  | 
                 NAME == "Lake"  | 
                 NAME == "Manistee"  |
                 NAME == "Mason"  | 
                 NAME == "Missaukee"  | 
                 NAME == "Osceola"  | 
                 NAME == "Wexford"))

wmap_df = fortify(Counties)
## Regions defined for each Polygons
apr.cnty = fortify(APR.cnty)


ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "tan", col="gray21") +
        geom_polygon(data=apr.cnty, 
                   aes(long,lat, group=group),
                   fill = "gray", col="red", alpha=0.5) +
        geom_point(data=cnt.df, 
                   aes(x, y, 
                       group=cnt.df$id), size = 0) +
        geom_text(data=cnt.df, aes(label = cnt.df$Name, 
                                   x = x, y = y, 
                                   group=cnt.df$Name),
                                   cex=2,  col = "black") +
        xlab("Longitude") +
        ylab("Latitude") +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              plot.background = element_rect(fill = "gray80", linetype="solid"),
              panel.border = element_blank(),
              legend.title = element_text(size=16, face="bold"),
              legend.key = element_rect(fill = "gray80", linetype=0),
              legend.background = element_rect(fill = "gray80", linetype=0),
              axis.title.x = element_text(size=18, face="bold"),
              axis.title.y = element_text(size=18, face="bold"),
              axis.text.x = element_text(size=10, face="bold"),
              axis.text.y = element_text(size=10, face="bold"),
              plot.title = element_blank())

Quick peek at response variable

hist(MyBeam$LBeam)

hist(MyBeam$RBeam)

hist(MyBeam$Points)

Check Data

dim(MyBeam)
## [1] 423888     10
head(MyBeam)
##   county year       Date LBeam RBeam Points season sex age      County
## 1      4 2012 2012-11-29    NA    NA     NA      1   2   1      Alpena
## 2      4 2012 2012-11-29    NA    NA     NA      1   2   1      Alpena
## 3      4 2012 2012-11-29     6     5      2      1   1   1      Alpena
## 4      4 2012 2012-11-29    NA    NA     NA      1   2   1      Alpena
## 5     60 2012 2012-11-29    NA    NA     NA      1   2   1 Montmorency
## 6      4 2012 2012-11-29    15    15      4      1   1   1      Alpena
Co.names = levels(factor(MyBeam$County))
Co.names
##  [1] "Alcona"         "Alger"          "Allegan"        "Alpena"        
##  [5] "Antrim"         "Arenac"         "Baraga"         "Barry"         
##  [9] "Bay"            "Benzie"         "Berrien"        "Branch"        
## [13] "Calhoun"        "Cass"           "Charlevoix"     "Cheboygan"     
## [17] "Chippewa"       "Clare"          "Clinton"        "Crawford"      
## [21] "Delta"          "Dickinson"      "Eaton"          "Emmet"         
## [25] "Genesee"        "Gladwin"        "Gogebic"        "Grand Traverse"
## [29] "Gratiot"        "Hillsdale"      "Houghton"       "Huron"         
## [33] "Ingham"         "Ionia"          "Iosco"          "Iron"          
## [37] "Isabella"       "Jackson"        "Kalamazoo"      "Kalkaska"      
## [41] "Kent"           "Keweenaw"       "Lake"           "Lapeer"        
## [45] "Leelanau"       "Lenawee"        "Livingston"     "Luce"          
## [49] "Mackinac"       "Macomb"         "Manistee"       "Marquette"     
## [53] "Mason"          "Mecosta"        "Menominee"      "Midland"       
## [57] "Missaukee"      "Monroe"         "Montcalm"       "Montmorency"   
## [61] "Muskegon"       "Newaygo"        "Oakland"        "Oceana"        
## [65] "Ogemaw"         "Ontonagon"      "Osceola"        "Oscoda"        
## [69] "Otsego"         "Ottawa"         "Presque Isle"   "Roscommon"     
## [73] "Saginaw"        "Sanilac"        "Schoolcraft"    "Shiawassee"    
## [77] "St. Clair"      "St. Joseph"     "Tuscola"        "Van Buren"     
## [81] "Washtenaw"      "Wayne"          "Wexford"
Years.s = levels(factor(MyBeam$year))
Years.s
##  [1] "1987" "1988" "1989" "1990" "1991" "1992" "1993" "1994" "1995" "1996"
## [11] "1997" "1998" "1999" "2000" "2001" "2002" "2003" "2004" "2005" "2006"
## [21] "2007" "2008" "2009" "2010" "2011" "2012" "2013" "2014" "2015" "2016"
## [31] "2017"

Load Crop Data

Source: https://www.nass.usda.gov/Statistics_by_State/Michigan/Publications/County_Estimates/index.php

Crop.df = read.csv("./MI_Crops_1987_2017.csv", stringsAsFactors=FALSE)

Crop.df = Crop.df %>%
          select(Year, County, Commodity, Data.Item, Value) 

head(Crop.df)
##   Year County Commodity                                  Data.Item   Value
## 1 2017  CLARE      CORN                       CORN - ACRES PLANTED   5,900
## 2 2017  CLARE      CORN              CORN, GRAIN - ACRES HARVESTED   4,000
## 3 2017  CLARE      CORN   CORN, GRAIN - PRODUCTION, MEASURED IN BU 580,000
## 4 2017  CLARE      CORN CORN, GRAIN - YIELD, MEASURED IN BU / ACRE     145
## 5 2017  CLARE      OATS                     OATS - ACRES HARVESTED     580
## 6 2017  CLARE      OATS                       OATS - ACRES PLANTED     600
Keep = c("HARVESTED|PRODUCTION|PLANTED|YIELD")
Crop.df$Item = str_extract(Crop.df$Data.Item, Keep)
Crop.df = Crop.df[complete.cases(Crop.df), ]

Crop.df = Crop.df %>% 
          filter(County != "OTHER (COMBINED) COUNTIES") %>%
          mutate(Year = factor(as.character(Year)),
                 Variable = paste(Commodity, Item, sep="_"),
                 Value = gsub("[[:punct:]]", "", Value),
                 County = str_title_case(str_lowercase(County)),
                 County = gsub("St", "St.", County),
                 County = factor(as.character(County)),
                 Variable = factor(as.character(Variable)),
                 Value = as.numeric(Value)) %>%
          select(-c(Commodity, Data.Item, Item)) 
          

Crop.df = dcast(Crop.df, 
                formula = County+Year ~ Variable, 
                fill = 0, value.var = "Value", 
                fun.aggregate = sum, drop = FALSE)

Crop.df[Crop.df == 0] = NA

head(Crop.df)
##   County Year BEANS_HARVESTED BEANS_PLANTED BEANS_PRODUCTION BEANS_YIELD
## 1 Alcona 1987             380           400             3800        2000
## 2 Alcona 1988             200           300             2000        2000
## 3 Alcona 1989             180           200             2200        1220
## 4 Alcona 1990             250           300             4000        1600
## 5 Alcona 1991              NA            NA               NA          NA
## 6 Alcona 1992             420           500             7000        1670
##   CORN_HARVESTED CORN_PLANTED CORN_PRODUCTION CORN_YIELD OATS_HARVESTED
## 1           1900         2100          158500       1140            800
## 2           1870         2000           47000        845            900
## 3           2150         2200           64000        549           1100
## 4           1900         2000           78000        671            780
## 5           2000         2000          106500        680            400
## 6           1800         3000           74500        658            330
##   OATS_PLANTED OATS_PRODUCTION OATS_YIELD SOYBEANS_HARVESTED
## 1         1000           41300        516                 50
## 2         1300           21500        239                 50
## 3         1200           65000        591                 80
## 4          900           45000        577                 NA
## 5          500           17000        425                 NA
## 6          400           18000        545                 70
##   SOYBEANS_PLANTED SOYBEANS_PRODUCTION SOYBEANS_YIELD SUGARBEETS_HARVESTED
## 1               50                1350             27                   NA
## 2               50                1100             22                   NA
## 3               80                2100            263                   NA
## 4               NA                  NA             NA                   NA
## 5               NA                  NA             NA                   NA
## 6              100                 850            121                   NA
##   SUGARBEETS_PLANTED SUGARBEETS_PRODUCTION SUGARBEETS_YIELD
## 1                 NA                    NA               NA
## 2                 NA                    NA               NA
## 3                 NA                    NA               NA
## 4                 NA                    NA               NA
## 5                 NA                    NA               NA
## 6                 NA                    NA               NA
##   WHEAT_HARVESTED WHEAT_PLANTED WHEAT_PRODUCTION WHEAT_YIELD
## 1            1000          1200            44000          88
## 2            1800          1900            65000         722
## 3            1700          1800            80000         942
## 4            1380          1400            68000         986
## 5             940          1000            36000         766
## 6            1340          1400            54000         806

Reduce Crop Data

Crop.imp = as.data.frame(imputePCA(Crop.df[,3:26], 
                         ncp=22, maxiter = 999)$completeObs)

Crop.pca = PCA(Crop.imp, 
               scale.unit = TRUE, 
               ncp = 5, 
               graph = FALSE)

SVD.df = as.data.frame(Crop.pca$svd$U)
names(SVD.df) = paste("SVD", paste(1:5), sep="")

apply(SVD.df,2,range)
##           SVD1      SVD2      SVD3      SVD4      SVD5
## [1,] -1.665620 -4.179704 -4.569588 -8.145038 -6.353382
## [2,]  7.136421  7.903224 14.350771  4.975552  4.533176
Crop.df = cbind(Crop.df, SVD.df)

CorTab = cor(Crop.df[,3:31], use="pairwise.complete")
corrplot(CorTab)

Clean Crop Data

Also adding to antler data.

Yr.lv = levels(factor(Crop.df$Year))

for(i in 1:length(Yr.lv)){
  
            tLU.tab = Crop.df %>%
                      filter(Year ==  Yr.lv[i])
            
            tBm.df = MyBeam %>%
                     filter(year ==  Yr.lv[i])
            
            kp.df = join(tBm.df, tLU.tab, by="County", type="left")
            
         if(i == 1){MyBeam2 = kp.df} 
          else{MyBeam2 = rbind(MyBeam2, kp.df)}
}

MyBeam2$Year = MyBeam2$year

Scale and Center Crops

Cov.df = MyBeam2[,12:35]

for(i in 1:ncol(Cov.df)){
      Cov.df[,i] = as.numeric(scale(Cov.df[,i], scale = T, center = T))
}

names(Cov.df) = paste("s", names(Cov.df), sep="")

apply(Cov.df, 2, range, na.rm=T)
##      sBEANS_HARVESTED sBEANS_PLANTED sBEANS_PRODUCTION sBEANS_YIELD
## [1,]        -0.516943     -0.5266704        -0.4802203    -1.395671
## [2,]         6.631812      6.3816079         9.0315488     3.315625
##      sCORN_HARVESTED sCORN_PLANTED sCORN_PRODUCTION sCORN_YIELD
## [1,]       -1.015392     -1.032370       -0.9000729   -1.008246
## [2,]        3.701332      3.547101        3.2989534    2.967560
##      sOATS_HARVESTED sOATS_PLANTED sOATS_PRODUCTION sOATS_YIELD
## [1,]      -0.7240608    -0.7678715       -0.6243832  -0.8351188
## [2,]      15.6707788    14.1562125       19.0740135   3.4519994
##      sSOYBEANS_HARVESTED sSOYBEANS_PLANTED sSOYBEANS_PRODUCTION
## [1,]           -1.005589         -1.006778            -0.960451
## [2,]            2.979522          2.979297             3.682223
##      sSOYBEANS_YIELD sSUGARBEETS_HARVESTED sSUGARBEETS_PLANTED
## [1,]      -0.9376649            -0.8963345          -0.9034098
## [2,]       2.2643967             3.2545439           3.2181718
##      sSUGARBEETS_PRODUCTION sSUGARBEETS_YIELD sWHEAT_HARVESTED
## [1,]             -0.8348587         -1.861187       -0.7790677
## [2,]              4.5159958          1.980259        4.8728372
##      sWHEAT_PLANTED sWHEAT_PRODUCTION sWHEAT_YIELD
## [1,]     -0.7862516        -0.6990415   -0.9665892
## [2,]      5.0319243         5.6534157    3.0431404
MyBeam2 = cbind(MyBeam2, Cov.df)

Load and Clean PRISM Monthy Climate Data

First = read.csv("./PRISM_series1_198701_200112.csv", stringsAsFactors=FALSE)
Second = read.csv("./PRISM_series2_200201_201312.csv", stringsAsFactors=FALSE)
Third = read.csv("./PRISM_series3_201401_201712.csv", stringsAsFactors=FALSE)

PRISM = rbind(First, Second, Third)

PRISM = PRISM %>%
        mutate(Idx = 1:nrow(PRISM),
               Year = as.integer(substr(Date, 1, 4)),
               Month = as.integer(substr(Date, 6, 7)))

PRISM$Season = ifelse(PRISM$Month == 12 | PRISM$Month <= 2, "Winter",
                      ifelse(PRISM$Month >= 3 & PRISM$Month <= 5, "Spring",
                             ifelse(PRISM$Month >= 6 & PRISM$Month <= 8, "Summer",
                                    ifelse(PRISM$Month >= 9 & PRISM$Month <= 11, "Fall",
                                           NA))))

Calculate Climatic Variables.

Cnty_lvs = levels(factor(PRISM$Name))
Yr_lvs = levels(factor(PRISM$Year))
Seas_lvs = levels(factor(PRISM$Season))

for(i in 1:length(Cnty_lvs)){
  
    tmp.df = PRISM %>% filter(Name == Cnty_lvs[i])
    
    for(j in 1:length(Yr_lvs)){
      
      tmp.df2 = tmp.df %>% filter(Year == Yr_lvs[j])
      
      Clim.df = as.data.frame(
                  tmp.df2 %>%
                     group_by(Season) %>%
                     summarise(PPTMax = max(ppt_in),
                               PPTMin = min(ppt_in),
                               PPTAvg = mean(ppt_in),
                               TempMax = max(tmax_f),
                               TempMin = min(tmin_f),
                               TempAvg = min(tmean_f),
                               DewAvg = mean(tdmean_f),
                               VPMax = max(vpdmax_hPa),
                               VPMin = min(vpdmin_hPa)))
      
      Clim.F = Clim.df[1,2:10]
      names(Clim.F) = paste("F_", names(Clim.df[2:10]), sep="")
      
      Clim.Sp = Clim.df[2,2:10]
      names(Clim.Sp) = paste("Sp_", names(Clim.df[2:10]), sep="")

      Clim.Su = Clim.df[3,2:10]
      names(Clim.Su) = paste("Su_", names(Clim.df[2:10]), sep="")
      
      Clim.W = Clim.df[4,2:10]
      names(Clim.W) = paste("W_", names(Clim.df[2:10]), sep="")
      
      BioClim.df =  as.data.frame(
                      biovars(tmp.df2$ppt_in, 
                              tmp.df2$tmin_f, 
                              tmp.df2$tmax_f))

      ClimVar = cbind(Clim.F, Clim.Sp, Clim.Su, Clim.W, BioClim.df)
      
      ClimVar$Year2 =  Yr_lvs[j]
      ClimVar$county2 =  Cnty_lvs[i]
      
      if(j == 1){Yr.df = ClimVar}
          else{Yr.df = rbind(Yr.df, ClimVar)}
    }
      
if(i == 1){FClim.df = Yr.df}
          else{FClim.df = rbind(FClim.df, Yr.df)}
      
}

FClim.df$County = with(LookUp.tab,
                     NAME[match(
                       FClim.df$county2,
                            OBJECTID)])

Scale and Center

Cov.df = FClim.df[,1:55]

for(i in 1:ncol(Cov.df)){
      Cov.df[,i] = as.numeric(scale(Cov.df[,i], scale = T, center = T))
}

names(Cov.df) = paste("s", names(Cov.df), sep="")

apply(Cov.df, 2, range, na.rm=T)
##      sF_PPTMax sF_PPTMin sF_PPTAvg sF_TempMax sF_TempMin sF_TempAvg
## [1,] -2.153105 -2.065611 -2.249690  -3.181243  -4.146686  -3.770235
## [2,]  5.351474  4.667946  4.251536   2.443820   2.379860   2.415509
##      sF_DewAvg  sF_VPMax  sF_VPMin sSp_PPTMax sSp_PPTMin sSp_PPTAvg
## [1,] -3.920180 -2.206683 -1.612481  -2.103558  -1.902878  -2.407476
## [2,]  2.976639  3.299410  5.183839   4.994053   4.047536   3.926692
##      sSp_TempMax sSp_TempMin sSp_TempAvg sSp_DewAvg sSp_VPMax sSp_VPMin
## [1,]   -3.258150   -4.113935   -3.661339  -3.190592 -2.741134 -2.168546
## [2,]    2.266514    2.823693    3.113358   2.679437  2.578725  5.000602
##      sSu_PPTMax sSu_PPTMin sSu_PPTAvg sSu_TempMax sSu_TempMin sSu_TempAvg
## [1,]  -2.215521  -2.306875  -2.565763   -3.443568   -2.731916   -2.772590
## [2,]   5.862130   3.949674   3.877409    2.983498    2.682124    2.667825
##      sSu_DewAvg sSu_VPMax sSu_VPMin sW_PPTMax sW_PPTMin sW_PPTAvg
## [1,]  -4.021431 -2.793244 -1.444566 -2.196807 -1.924136 -2.166591
## [2,]   2.776874  4.118927  5.503902  4.484602  5.221264  5.073001
##      sW_TempMax sW_TempMin sW_TempAvg sW_DewAvg sW_VPMax  sW_VPMin
## [1,]  -3.375214  -3.480738  -3.392307 -3.234305 -2.84207 -2.184457
## [2,]   2.923848   2.424367   2.321205  2.801817  4.70639  3.867811
##          sbio1     sbio2     sbio3     sbio4     sbio5     sbio6     sbio7
## [1,] -3.441152 -3.115318 -3.205317 -2.593757 -3.446926 -3.480215 -2.828364
## [2,]  2.416824  3.213514  3.609070  3.593606  2.985402  2.431332  3.515457
##          sbio8     sbio9    sbio10    sbio11    sbio12    sbio13    sbio14
## [1,] -3.997229 -1.810934 -3.224679 -3.499509 -2.870396 -2.153374 -2.018767
## [2,]  1.656631  2.941832  2.460256  2.511412  4.128713  5.156541  4.005414
##         sbio15    sbio16    sbio17    sbio18    sbio19
## [1,] -2.914072 -2.523214 -2.557151 -2.503395 -2.058034
## [2,]  4.070966  3.963281  3.806903  3.871831  5.040367
FClim.df = cbind(FClim.df, Cov.df)

Reduce Climate Data

Clims.pca = PCA(FClim.df[,95:113], 
               scale.unit = TRUE, 
               ncp = 5, 
               graph = FALSE)

SVD2.df = as.data.frame(Clims.pca$svd$U)
names(SVD2.df) = paste("SVD2", paste(1:5), sep="")

FClim.df = cbind(FClim.df, SVD2.df)

Combine Climate to Antler Data

Yr.lv = levels(factor(FClim.df$Year))

for(i in 1:length(Yr.lv)){
  
            tLU.tab = FClim.df %>%
                      filter(Year2 ==  Yr.lv[i])
            
            tBm.df = MyBeam2 %>%
                     filter(year ==  Yr.lv[i])
            
            kp.df = join(tBm.df, tLU.tab, by="County", type="left")
            
         if(i == 1){MyBeam3 = kp.df} 
          else{MyBeam3 = rbind(MyBeam3, kp.df)}
}

MyBeam3 = MyBeam3 %>%
          select(-c(county2, Year2))

Regional Climate Analysis

Station Locations

Stat.loc = read.csv("./MI_WI_11-6B.csv", stringsAsFactors=FALSE) 

Stat.loc = Stat.loc[!duplicated(Stat.loc$STATION), ] %>%
              filter(LONGITUDE != "unknown",
                     LATITUDE != "unknown") %>%
              mutate(LONGITUDE = as.numeric(LONGITUDE),
                     LATITUDE = as.numeric(LATITUDE))


Locs = cbind(Stat.loc$LONGITUDE, Stat.loc$LATITUDE)

Stat.pnts = SpatialPointsDataFrame(Locs , Stat.loc)
proj4string(Stat.pnts) = proj4string(Counties)

plot(Counties)
plot(Stat.pnts, col = "red", add=T)

Get NOAA Historical Climate Data (GHCN)

Previously run, time intensive.

#Stations = substr(unique(read.csv("./MI_WI_11-6B.csv", stringsAsFactors=FALSE)$STATION), 7, 17)

#Clim.df = meteo_pull_monitors(Stations,
#                                date_min = "1987-01-01",
#                                date_max = "2017-12-31")

#Clim.df = as.data.frame(Clim.df) %>%
#            select(id, date, prcp, snow, snwd, tavg, tmax, tmin, dapr, mdpr, mdsf)

#write.csv(Clim.df, "./NOAA_Clim.csv")

#token "VmLVwVYuKJixCseSiebNvcoObqeMirtI"

Climate Time Series Indicies

Snow.df = read.csv("./NOAA_Clim.csv", stringsAsFactors=FALSE) %>% #preprocessed above
            mutate(date = as.Date(date, "%Y-%m-%d"),
                   Date = as.PCICt(as.character(date), 
                                   cal = "gregorian", 
                                   format = "%Y-%m-%d"),
                   Year = year(date),
                   Month = month(date),
                   Day = day(date),
                   DOY = yday(date),
                   precip = prcp/10, #to mm
                   t_max = tmax/10, #to C 
                   t_min = tmin/10) #to C 

Snow.df[is.na(Snow.df)] = 0

Snow.df$t_avg = (Snow.df$t_max + Snow.df$t_min)/2

Yr.lvs = levels(factor(Snow.df$Year))

loc.lvs = levels(factor(unique(Snow.df$id)))


for(i in 1:length(loc.lvs)){
  
   snw2.tmp = Snow.df %>% filter(id == loc.lvs[i])

   Srt.yr = as.numeric(min(levels(factor(snw2.tmp$Year))))
   End.yr = as.numeric(max(levels(factor(snw2.tmp$Year))))
   
   #ClimDex Extremes
   ci = climdexInput.raw(snw2.tmp$t_max,
                         snw2.tmp$t_min,
                         snw2.tmp$precip,
                         snw2.tmp$Date, 
                         snw2.tmp$Date,  
                         snw2.tmp$Date,  
                         base.range=c(Srt.yr, End.yr))
   
   Frm1.df = as.data.frame(
                  cbind(
                    GrowSeas = climdex.gsl(ci, gsl.mode = "GSL"), #Grow season length
                    PrecEx10mm = climdex.r10mm(ci), #precip exceeding 10mm per day
                    PrecEx20mm = climdex.r20mm(ci), #precip exceeding 20mm per day
                    DSubZero = climdex.id(ci), #count of days maximum temp below 0
                    Prec95pct = climdex.r95ptot(ci), #precip over 95th %
                    MonMxDT = climdex.txx(ci, freq = "annual"), #Annual Max Daily temp
                    MMx1DPrec = climdex.rx1day(ci, freq = "annual"), #Ann Maximum 1-day Precipitation
                    AMinT = climdex.tnn(ci, freq = "annual"), #Annual Minimum Of Daily Minimum Temperature
                    AMxT = climdex.txn(ci, freq = "annual"), #Annual Minimum Of Daily Maximum Temperature
                    SumDur = climdex.su(ci), #Summer duration by year
                    FrstDays = climdex.fd(ci))) #Frost days
   
   Frm1.df$Year = row.names(Frm1.df)
   
   if(range(snw2.tmp$precip)[1] == 0 & range(snw2.tmp$precip)[2] == 0){
          Frm1.df$PrecDep = NA
          Frm1.df$SnwDep = NA}
   
      else{
         #Precip Departure
         dat.ss = seas.sum(snw2.tmp, var = "precip", width="month")
         dat.sn = seas.norm(dat.ss, var = "precip", fun = "median", norm = "days")
         dat.dep = precip.dep(snw2.tmp, dat.sn, var="precip" )
         PrecDep = as.data.frame(
                      dat.dep %>%
                      group_by(Year) %>%
                      summarize(Dep = mean(dep, na.rm=T)))
         
         Frm1.df$PrecDep = with(PrecDep,
                             Dep[match(
                                Frm1.df$Year,
                                    Year)])
      
         #Snow Departure
         dat.ss = seas.sum(snw2.tmp, var = "snow", width="month")
         dat.sn = seas.norm(dat.ss, var = "snow", fun = "median", norm = "days")
         dat.dep = precip.dep(snw2.tmp, dat.sn, var="snow" )
         SnwDep = as.data.frame(
                      dat.dep %>%
                      group_by(Year) %>%
                      summarize(Dep = mean(dep, na.rm=T)))
         
         Frm1.df$SnwDep = with(SnwDep,
                             Dep[match(
                              Frm1.df$Year,
                                    Year)])
            }

   Frm1.df$Station = paste(loc.lvs[i])
    
      if(i == 1){ClimChn = Frm1.df}
          else{ClimChn = rbind(ClimChn, Frm1.df)}
    
}

Combine Climate Indices to Antler Data

Cnty.pnts = SpatialPointsDataFrame(cnt.df[,1:2], cnt.df)
proj4string(Cnty.pnts) = proj4string(Stat.pnts)

suppressMessages(library(spatstat))

nProj = "+proj=utm +zone=10 +datum=NAD83 +ellps=GRS80 +towgs84=0,0,0"

Cnty.pp = spTransform(Cnty.pnts, nProj)
Stat.pp = spTransform(Stat.pnts, nProj)

Cnty.pp = as.ppp(Cnty.pp)
Cnty.pp$Name = cnt.df$Name

Stat.pp = as.ppp(Stat.pp)
Stat.pp$Name = substr(Stat.pnts$STATION, 7, 17)

Dist.df = as.data.frame(crossdist(Cnty.pp, Stat.pp))
colnames(Dist.df) = Stat.pp$Name
row.names(Dist.df) = Cnty.pp$Name

Cnty.lvs = levels(factor(Cnty.pp$Name))

for(i in 1:length(Yr.lv)){
  
    tLU.tab = ClimChn %>%
              filter(Year ==  Yr.lv[i])
    
    tBm.df = MyBeam3 %>%
             filter(year ==  Yr.lv[i])
    
    for(j in 1:length(Cnty.lvs)){
      
        tBm2.df = tBm.df %>%
             filter(County ==  Cnty.lvs[j])
        
        if(dim(tBm2.df)[1] == 0){next}
            else{
              prox = arrange(melt(Dist.df %>%
                          mutate(Cnty = row.names(Dist.df)) %>%
                          filter(Cnty == Cnty.lvs[j]), "Cnty"), value)
              
              tLU.tab$Wt = with(prox,
                                 value[match(
                                  tLU.tab$Station,
                                        variable)])
              
              tLU.tab = arrange(tLU.tab[complete.cases(tLU.tab),], Wt)[1:4,]
       
              tLU.tab$WtPro = 1-Scale(tLU.tab$Wt)
              
              W.mean = function(x){weighted.mean(x, tLU.tab$WtPro, na.rm = T)}
              
              Vals = apply(tLU.tab[, c(1:11, 13:14)], 2,  W.mean)
              
              An.sm = as.data.frame(matrix(nrow = dim(tBm2.df)[1], ncol = 13))
              colnames(An.sm) = names(Vals)
              
              for(tx in 1:dim(tBm2.df)[1]){
              An.sm[tx,] = as.numeric(apply(tLU.tab[, c(1:11, 13:14)], 2,  W.mean))}
              
              tBm2.df = cbind(tBm2.df, An.sm)
              
            if(j == 1){Cnty.df = tBm2.df}
              else{Cnty.df = rbind(Cnty.df, tBm2.df)}
              
            }
    }    
        
if(i == 1){Yr.frm = Cnty.df}
    else{Yr.frm = rbind(Yr.frm, Cnty.df)}
      
    }
    
MyBeam3 = Yr.frm
        
Cov.df = Yr.frm[,180:192]

for(i in 1:ncol(Cov.df)){
      Cov.df[,i] = as.numeric(scale(Cov.df[,i], scale = T, center = T))
  }

names(Cov.df) = paste("s", names(Cov.df), sep="")

apply(Cov.df, 2, range, na.rm=T)

MyBeam3 = cbind(MyBeam3, Cov.df)

Add Sex Ratio

SR_87_13X = SR_87_13.df %>% 
              mutate(year = as.character(year),
                     county = as.character(county)) %>%
              select(county, year, DB.ratio, FD.ratio)

MyBeam3 = join(MyBeam3, SR_87_13X, by=c("county", "year"), type="left")

Average Beam Diameter

MyBeam3$AvgBeam = ifelse(is.na(MyBeam3$RBeam)==TRUE & is.na(MyBeam3$LBeam)==TRUE, NA,
                    ifelse(is.na(MyBeam3$RBeam)==TRUE & is.na(MyBeam3$LBeam)==FALSE, MyBeam3$LBeam,
                        ifelse(is.na(MyBeam3$RBeam)==FALSE & is.na(MyBeam3$LBeam)==TRUE, MyBeam3$RBeam,
                            ifelse(is.na(MyBeam3$RBeam)==FALSE & is.na(MyBeam3$LBeam)==FALSE, 
                                   (MyBeam3$LBeam + MyBeam3$RBeam)/2, NA))))

Oceanic Nino Index

http://origin.cpc.ncep.noaa.gov/products/analysis_monitoring/ensostuff/ONI_v5.php

ONI.df = read.csv("C:/Users/humph173/Documents/NOAA_ONI/ONI.csv", stringsAsFactors=FALSE) %>%
         filter(Year >= 1987 & Year <= 2017)
 
MyBeam3$Month = month(MyBeam3$Date)

ONI.lookup = as.data.frame(
                  cbind(names(ONI.df)[2:13], 1:12))

names(ONI.lookup) = c("Seas", "Month")

MyBeam3$ONISeas = with(ONI.lookup,
                       Seas[match(
                         MyBeam3$Month,
                              Month)])

MyBeam3$ONISeas[is.na(MyBeam3$ONISeas)] = "OND"

 for(i in 1:length(Yr.lv)){
   
        tmp.frm = MyBeam3 %>% filter(Year == Yr.lv[i])
        ONI.tmp = melt(ONI.df %>% filter(Year == Yr.lv[i]), "Year")
        
        tmp.frm$ONI.indx = with(ONI.tmp,
                             value[match(
                               tmp.frm$ONISeas,
                                    variable)])
        
        if(i == 1){OutRes = tmp.frm}
            else{OutRes = rbind(OutRes, tmp.frm)}
 }

MyBeam3 = OutRes

Spatio-Temporal Model Construction

Temporal Index

MyBeam3$Year1 = MyBeam3$Year2 = 
                MyBeam3$Year3 = 
                MyBeam3$Year4 =
                MyBeam3$Year5 =
                MyBeam3$Year6 =
                MyBeam3$Year.int = as.integer(as.factor(MyBeam3$year))

Spatial Index

Counties$Region = 1:nrow(Counties@data)

Region.tab = Counties@data[, c("NAME", "Region")]

MyBeam3$Region = MyBeam3$Region1 = 
                 MyBeam3$Region2 = 
                 MyBeam3$Region3 = 
                 MyBeam3$Region4 = 
                 MyBeam3$Region5 = 
                 MyBeam3$Region6 = 
                 MyBeam3$Region.int = 
                  with(Region.tab,
                     Region[match(
                       as.character(MyBeam3$County),
                            NAME)])

Space-Time Interaction Index

MyBeam3$Region.Yr = paste(MyBeam3$County, MyBeam3$year, sep="")

County Grouping Effect

Grouping Counties Upper, Lower, and NW12 subject to APRs in 2013.

##The NW 12
MyBeam3$APR = ifelse(MyBeam3$County == "Antrim"  | 
                 MyBeam3$County == "Benzie"  | 
                 MyBeam3$County == "Charlevoix"  | 
                 MyBeam3$County == "Emmet"  | 
                 MyBeam3$County == "Grand Traverse"  | 
                 MyBeam3$County == "Kalkaska"  | 
                 MyBeam3$County == "Lake"  | 
                 MyBeam3$County == "Manistee"  |
                 MyBeam3$County == "Mason"  | 
                 MyBeam3$County == "Missaukee"  | 
                 MyBeam3$County == "Osceola"  | 
                 MyBeam3$County == "Wexford", "NW12", "Other")

MyBeam3$Pen = with(Counties@data, #Upper vs. lower
                   PENINSULA[match(
                               MyBeam3$county,
                                       OBJECTID)])

MyBeam3$RegAPR = ifelse(MyBeam3$APR != "NW12", MyBeam3$Pen, "APR") #Upper, Lowern and NW12

Spatial Neighborhood Graph

nb = poly2nb(Counties, 
             queen = T)

nb2INLA("J", nb)
J = inla.read.graph("J")

summary(nb)
## Neighbour list object:
## Number of regions: 83 
## Number of nonzero links: 420 
## Percentage nonzero weights: 6.096676 
## Average number of links: 5.060241 
## Link number distribution:
## 
##  1  2  3  4  5  6  7  8 
##  1  6  8 21  8 17 19  3 
## 1 least connected region:
## 41 with 1 link
## 3 most connected regions:
## 19 61 67 with 8 links

View Spatial Neighborhood Graph

plot(Counties, col='gray', border='blue')
xy = coordinates(Counties)
plot(nb, xy, col='red', lwd=2, add=TRUE)

Check for Colinearity

library(perturb)
## 
## Attaching package: 'perturb'
## The following object is masked from 'package:raster':
## 
##     reclassify
Colin.df = MyBeam3 %>%
            select(sbio1, sbio2, sW_PPTMax,sCORN_PLANTED, sWHEAT_PLANTED, 
                   sSOYBEANS_PLANTED, sGrowSeas, sPrecEx10mm, sPrecEx20mm, sDSubZero, sPrec95pct, sMonMxDT,
                   sMMx1DPrec, sAMinT, sAMxT,  sSumDur, sFrstDays,  sPrecDep, sSnwDep)

Colin.df = Colin.df[complete.cases(Colin.df),]

CorCov = cor(Colin.df)

corrplot(CorCov)

CI = colldiag(CorCov)
CI
## Condition
## Index    Variance Decomposition Proportions
##            intercept sbio1 sbio2 sW_PPTMax sCORN_PLANTED sWHEAT_PLANTED
## 1    1.000 0.001     0.001 0.001 0.001     0.000         0.000         
## 2    1.386 0.000     0.000 0.001 0.001     0.000         0.000         
## 3    1.537 0.001     0.000 0.001 0.001     0.001         0.001         
## 4    1.928 0.006     0.000 0.001 0.000     0.000         0.000         
## 5    2.190 0.001     0.000 0.001 0.013     0.000         0.002         
## 6    2.507 0.048     0.000 0.001 0.018     0.000         0.001         
## 7    3.337 0.078     0.000 0.005 0.024     0.000         0.001         
## 8    3.550 0.020     0.000 0.006 0.082     0.000         0.001         
## 9    4.539 0.103     0.004 0.016 0.015     0.000         0.002         
## 10   4.826 0.005     0.000 0.014 0.000     0.002         0.007         
## 11   6.740 0.004     0.030 0.094 0.066     0.000         0.044         
## 12  10.735 0.022     0.031 0.318 0.011     0.012         0.015         
## 13  11.925 0.030     0.004 0.003 0.152     0.006         0.002         
## 14  13.618 0.021     0.127 0.129 0.136     0.004         0.123         
## 15  14.575 0.012     0.003 0.002 0.016     0.012         0.037         
## 16  18.778 0.237     0.103 0.017 0.193     0.025         0.012         
## 17  23.028 0.190     0.019 0.325 0.180     0.518         0.093         
## 18  25.098 0.001     0.014 0.024 0.073     0.116         0.007         
## 19  29.218 0.222     0.664 0.040 0.020     0.303         0.652         
##    sSOYBEANS_PLANTED sGrowSeas sPrecEx10mm sPrecEx20mm sDSubZero
## 1  0.000             0.000     0.000       0.000       0.001    
## 2  0.000             0.000     0.000       0.000       0.001    
## 3  0.001             0.001     0.002       0.001       0.000    
## 4  0.000             0.001     0.001       0.000       0.000    
## 5  0.000             0.003     0.000       0.000       0.002    
## 6  0.000             0.001     0.001       0.001       0.004    
## 7  0.000             0.002     0.003       0.000       0.000    
## 8  0.000             0.009     0.001       0.000       0.000    
## 9  0.000             0.001     0.004       0.000       0.000    
## 10 0.000             0.000     0.005       0.012       0.002    
## 11 0.000             0.009     0.008       0.001       0.016    
## 12 0.038             0.003     0.001       0.015       0.235    
## 13 0.001             0.003     0.221       0.008       0.052    
## 14 0.052             0.042     0.117       0.030       0.038    
## 15 0.159             0.004     0.296       0.034       0.124    
## 16 0.275             0.061     0.011       0.106       0.027    
## 17 0.143             0.061     0.166       0.142       0.428    
## 18 0.241             0.060     0.047       0.469       0.003    
## 19 0.087             0.741     0.115       0.180       0.067    
##    sPrec95pct sMonMxDT sMMx1DPrec sAMinT sAMxT sSumDur sFrstDays sPrecDep
## 1  0.000      0.000    0.000      0.001  0.000 0.000   0.001     0.000   
## 2  0.000      0.002    0.001      0.000  0.001 0.001   0.000     0.000   
## 3  0.002      0.000    0.001      0.000  0.000 0.000   0.000     0.000   
## 4  0.002      0.000    0.010      0.001  0.001 0.001   0.000     0.010   
## 5  0.000      0.004    0.000      0.000  0.001 0.001   0.000     0.032   
## 6  0.000      0.006    0.000      0.000  0.001 0.002   0.002     0.008   
## 7  0.007      0.005    0.005      0.001  0.006 0.001   0.000     0.029   
## 8  0.000      0.001    0.001      0.000  0.002 0.000   0.021     0.003   
## 9  0.001      0.048    0.012      0.003  0.010 0.003   0.001     0.001   
## 10 0.001      0.023    0.008      0.018  0.016 0.001   0.001     0.051   
## 11 0.006      0.002    0.041      0.026  0.001 0.003   0.017     0.017   
## 12 0.015      0.013    0.005      0.097  0.002 0.000   0.028     0.007   
## 13 0.051      0.008    0.083      0.085  0.151 0.035   0.197     0.001   
## 14 0.028      0.000    0.010      0.145  0.074 0.061   0.028     0.000   
## 15 0.098      0.007    0.166      0.082  0.160 0.001   0.068     0.008   
## 16 0.002      0.052    0.175      0.051  0.013 0.546   0.074     0.345   
## 17 0.069      0.002    0.120      0.110  0.011 0.045   0.171     0.430   
## 18 0.718      0.407    0.156      0.342  0.377 0.161   0.003     0.007   
## 19 0.000      0.419    0.206      0.038  0.174 0.137   0.388     0.051   
##    sSnwDep
## 1  0.000  
## 2  0.006  
## 3  0.001  
## 4  0.007  
## 5  0.004  
## 6  0.014  
## 7  0.009  
## 8  0.045  
## 9  0.070  
## 10 0.105  
## 11 0.006  
## 12 0.164  
## 13 0.056  
## 14 0.125  
## 15 0.153  
## 16 0.021  
## 17 0.027  
## 18 0.184  
## 19 0.003

Model Fitting Set

MyBeam3.tst = MyBeam3 %>% filter(is.na(AvgBeam)==FALSE)
MyBeam3.tst$ID.Region.Yr = as.integer(as.factor(MyBeam3.tst$Region.Yr))

Setup Matrix and Priors

Sr = length(unique(MyBeam3.tst$Region))
Tm = length(unique(MyBeam3.tst$Year))

g = inla.read.graph("J")

Q.xi = matrix(0, g$n, g$n) 
        for (i in 1:g$n){ 
         Q.xi[i,i]=g$nnbs[[i]] 
         Q.xi[i,g$nbs[[i]]]=-1}

Q.Leroux = diag(Sr)-Q.xi

lunif = "expression:
         a=1; b=1; 
         beta = exp(theta)/(1+exp(theta));
         logdens = lgamma(a+b)-lgamma(a)-lgamma(b)+(a-1)*beta+(b-1)*(1-beta);
         log_jacobian = log(beta*(1-beta));
         return(logdens+log_jacobian)"

sdunif = "expression: logdens = -log_precision/2; 
          return(logdens)"

Spatial Model Only

formula.sp = AvgBeam ~ f(Region, 
                         model="generic1",
                         Cmatrix=Q.Leroux, 
                         constr=TRUE, 
                         hyper=list(prec=list(prior=sdunif),
                                    beta=list(prior=lunif))) 


#theta1 = Model.sp$internal.summary.hyperpar$mean
theta1 = c(-2.686110, -2.946664, 3.201317)

Model.sp = inla(formula.sp, 
                family = "gaussian", 
                verbose = FALSE, 
                control.predictor = list(
                          compute = TRUE, 
                             link = 1), 
                control.mode = list(restart = TRUE, theta = theta1),
                control.inla = list(strategy = "gaussian", 
                                int.strategy = "eb"),
                control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                data = MyBeam3.tst)
dic.m1 = c(Model.sp$dic$dic, Model.sp$waic$waic)
summary(Model.sp)
## 
## Call:
## c("inla(formula = formula.sp, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          2.0736        481.8719         18.4135        502.3590 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## (Intercept) 12.0155 0.0064    12.0028  12.0155    12.0281 12.0155   0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## 
## Model hyperparameters:
##                                           mean   sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0681 0.00     0.0681   0.0681
## Precision for Region                    0.0525 0.00     0.0525   0.0525
## Beta for Region                         0.9609 0.00     0.9609   0.9609
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0682 0.0681
## Precision for Region                        0.0526 0.0525
## Beta for Region                             0.9609 0.9609
## 
## Expected number of effective parameters(std dev): 83.10(0.00)
## Number of equivalent replicates : 4192.22 
## 
## Deviance Information Criterion (DIC) ...............: 2793711.65
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 56.14
## 
## Watanabe-Akaike information criterion (WAIC) ...: 2793999.26
## Effective number of parameters .................: 343.38
## 
## Marginal log-Likelihood:  -2324285.42 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Plot Random Field

LU.tab = as.data.frame(Model.sp$summary.random$Region[, c(1:2,4,6)])
names(LU.tab) = c("ID", "Mean", "Q025", "Q975")

LU.tab$County = LookUp.tab$NAME

Counties2 = Counties

Domain.r = raster(res=0.01, 
                  ext = extent(Counties))

Counties2$RE = LU.tab$Mean
Region.mean.r = rasterize(Counties2,
                          Domain.r,
                          field = "RE",
                          background = NA)
              
Counties2$Q025 = LU.tab$Q025
Region.Q025.r = rasterize(Counties2,
                          Domain.r,
                          field = "Q025",
                          background = NA)


Counties2$Q975 = LU.tab$Q975 
Region.Q975.r = rasterize(Counties2,
                          Domain.r,
                          field = "Q975",
                          background = NA)


MRF.stk = stack(Region.Q025.r, Region.mean.r, Region.Q975.r)
names(MRF.stk) = c("A", "B", "C")

MRF.stk2 = MRF.stk

Space and Non-Parametric Time

formula.st = AvgBeam ~ f(Region, 
                         model="generic1",
                         Cmatrix=Q.Leroux, 
                         constr=TRUE, 
                         hyper=list(prec=list(prior=sdunif),
                                    beta=list(prior=lunif))) +
                               f(Year1, 
                         model="rw1", 
                         constr=TRUE,
                         hyper=list(prec=list(prior=sdunif))) 

#theta1 = Model.st$internal.summary.hyperpar$mean
theta1 = c(-2.421195, -1.292276, 3.282074, 1.992129)

Model.st = inla(formula.st, 
                family = "gaussian", 
                verbose = FALSE, 
                control.predictor = list(
                          compute = TRUE, 
                             link = 1), 
                control.mode = list(restart = TRUE, theta = theta1),
                control.inla = list(strategy = "gaussian", 
                                int.strategy = "eb"),
                control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                data = MyBeam3.tst)
dic.m2 = c(Model.st$dic$dic, Model.st$waic$waic)
summary(Model.st)
## 
## Call:
## c("inla(formula = formula.st, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          2.7041        744.8421         17.4182        764.9644 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## (Intercept) 19.5178 0.0078    19.5024  19.5178    19.5332 19.5178   0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## Year1   RW1 model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0888 0.0000     0.0888   0.0888
## Precision for Region                    0.2752 0.0177     0.2340   0.2782
## Beta for Region                         0.9637 0.0033     0.9534   0.9645
## Precision for Year1                     7.3807 0.8267     5.5107   7.5062
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0900 0.0888
## Precision for Region                        0.3004 0.2945
## Beta for Region                             0.9686 0.9650
## Precision for Year1                         8.5715 8.2319
## 
## Expected number of effective parameters(std dev): 112.15(0.00)
## Number of equivalent replicates : 3106.55 
## 
## Deviance Information Criterion (DIC) ...............: 1844406.33
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 112.15
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1844409.61
## Effective number of parameters .................: 115.38
## 
## Marginal log-Likelihood:  -922564.07 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Space and Non-Parametric Time and Space-Time Interaction (Type I)

formula.T1 = AvgBeam ~ f(Region, 
                         model="generic1",
                         Cmatrix=Q.Leroux, 
                         constr=TRUE, 
                         hyper=list(prec=list(prior=sdunif),
                                    beta=list(prior=lunif))) +
                               f(Year1, 
                         model="rw1", 
                         constr=TRUE,
                         hyper=list(prec=list(prior=sdunif))) +
                       f(ID.Region.Yr, 
                         model="iid", 
                         constr=TRUE,
                         hyper=list(prec=list(prior=sdunif))) 


#theta1 = Model.T1$internal.summary.hyperpar$mean
theta1 = c(-2.430207, -1.077162, 4.345397, 1.679594, 1.157008)

Model.T1 = inla(formula.T1, 
                family = "gaussian", 
                verbose = FALSE, 
                control.predictor = list(
                          compute = TRUE, 
                             link = 1), 
                control.mode = list(restart = TRUE, theta = theta1),
                control.inla = list(strategy = "gaussian", 
                                int.strategy = "eb"),
                control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                data = MyBeam3.tst)
dic.m3 = c(Model.T1$dic$dic, Model.T1$waic$waic)
summary(Model.T1)
## 
## Call:
## c("inla(formula = formula1, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          2.4554        348.0650         19.4889        370.0093 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## (Intercept) 19.5522 0.0085    19.5354  19.5522    19.5689 19.5522   0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0880 0.0002     0.0877   0.0880
## Precision for Region                    0.3434 0.0452     0.2740   0.3364
## Beta for Region                         0.9864 0.0050     0.9741   0.9874
## Precision for Year1                     5.5393 1.4600     3.3732   5.2947
## Precision for ID.Region.Yr              3.1921 0.2807     2.8225   3.1340
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0883 0.0880
## Precision for Region                        0.4496 0.3183
## Beta for Region                             0.9933 0.9890
## Precision for Year1                         9.0624 4.8229
## Precision for ID.Region.Yr                  3.8744 2.9278
## 
## Expected number of effective parameters(std dev): 1853.94(0.00)
## Number of equivalent replicates : 187.92 
## 
## Deviance Information Criterion (DIC) ...............: 1837326.90
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1853.92
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1837325.75
## Effective number of parameters .................: 1839.31
## 
## Marginal log-Likelihood:  -919790.62 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Spatio-temporal with Regional Effect

Effect for Upper and Lower Peninsula

formula.T1r = AvgBeam ~ f(Region, 
                          model="generic1",
                          Cmatrix=Q.Leroux, 
                          constr=TRUE, 
                          hyper=list(prec=list(prior=sdunif),
                                     beta=list(prior=lunif))) +
                        f(Pen,
                          model="iid",
                          hyper=reg.prior,
                          replicate=Year2,
                          constr = TRUE) +
                                f(Year1, 
                          model="rw1", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) +
                        f(ID.Region.Yr, 
                          model="iid", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) 


#theta1 = Model.T1r$internal.summary.hyperpar$mean
theta1 = c(-2.400136, -1.269618, 3.594011, 4.210061, 1.394595, 1.690612)

Model.T1r = inla(formula.T1r, 
                family = "gaussian", 
                verbose = FALSE, 
                control.predictor = list(
                          compute = TRUE, 
                             link = 1), 
                control.mode = list(restart = TRUE, theta = theta1),
                control.inla = list(strategy = "gaussian", 
                                int.strategy = "eb"),
                control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                data = MyBeam3.tst)
dic.m4 = c(Model.T1r$dic$dic, Model.T1r$waic$waic)
summary(Model.T1r)
## 
## Call:
## c("inla(formula = formula1reg, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          2.7320        818.1981         20.2657        841.1958 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## (Intercept) 19.5551 0.0352    19.4861  19.5551    19.6241 19.5551   0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## Pen   IID model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0880 0.0002     0.0876   0.0880
## Precision for Region                    0.3064 0.0462     0.2329   0.3000
## Beta for Region                         0.9595 0.0240     0.8977   0.9651
## Precision for Pen                       5.4601 1.2249     3.3074   5.3863
## Precision for Year1                     7.9033 4.7639     3.1856   6.5259
## Precision for ID.Region.Yr              3.3953 0.1491     3.0841   3.4044
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0885 0.0879
## Precision for Region                        0.4133 0.2849
## Beta for Region                             0.9889 0.9746
## Precision for Pen                           8.0961 5.2614
## Precision for Year1                        20.5417 4.7107
## Precision for ID.Region.Yr                  3.6655 3.4418
## 
## Expected number of effective parameters(std dev): 1790.55(0.00)
## Number of equivalent replicates : 194.57 
## 
## Deviance Information Criterion (DIC) ...............: 1837305.16
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1790.60
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1837302.20
## Effective number of parameters .................: 1775.36
## 
## Marginal log-Likelihood:  -919743.22 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Spatio-temporal with Regional Effect and the NW12

Effect for Upper and Lower Peninsula and the “Northwest 12” counties subject to APRs.

formula.T1apr = AvgBeam ~ f(Region, 
                            model="generic1",
                            Cmatrix=Q.Leroux, 
                            constr=TRUE, 
                            hyper=list(prec=list(prior=sdunif),
                                       beta=list(prior=lunif))) +
                          f(RegAPR,
                            model="iid",
                            hyper=reg.prior,
                            replicate=Year2,
                            constr = TRUE) +
                                    f(Year1, 
                            model="rw1", 
                            constr=TRUE,
                            hyper=list(prec=list(prior=sdunif))) +
                          f(ID.Region.Yr, 
                            model="iid", 
                            constr=TRUE,
                            hyper=list(prec=list(prior=sdunif))) 


#theta1 = Model.T1apr$internal.summary.hyperpar$mean
theta1 = c(-2.400136, -1.269618, 3.594011, 4.210061, 1.394595, 1.690612)

Model.T1apr = inla(formula.T1apr, 
                  family = "gaussian", 
                  verbose = FALSE, 
                  control.predictor = list(
                            compute = TRUE, 
                               link = 1), 
                  control.mode = list(restart = TRUE, theta = theta1),
                  control.inla = list(strategy = "gaussian", 
                                  int.strategy = "eb"),
                  control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                  data = MyBeam3.tst)
dic.m5 = c(Model.T1apr$dic$dic, Model.T1apr$waic$waic)
summary(Model.T1apr)
## 
## Call:
## c("inla(formula = formula1apr, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          2.7417        787.3496         18.0695        808.1608 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## (Intercept) 19.5752 0.0409    19.4948  19.5752    19.6555 19.5752   0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0880 0.0002     0.0876   0.0880
## Precision for Region                    0.3866 0.0662     0.2579   0.3878
## Beta for Region                         0.9739 0.0264     0.9042   0.9825
## Precision for RegAPR                    3.0388 0.5967     1.9885   3.0013
## Precision for Year1                     5.0310 1.5435     2.6572   4.8129
## Precision for ID.Region.Yr              3.5836 0.1188     3.3918   3.5683
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0884 0.0881
## Precision for Region                        0.5138 0.3959
## Beta for Region                             0.9992 0.9985
## Precision for RegAPR                        4.3270 2.9374
## Precision for Year1                         8.6546 4.4054
## Precision for ID.Region.Yr                  3.8530 3.5140
## 
## Expected number of effective parameters(std dev): 1792.04(0.00)
## Number of equivalent replicates : 194.41 
## 
## Deviance Information Criterion (DIC) ...............: 1837161.01
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1792.04
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1837158.97
## Effective number of parameters .................: 1777.68
## 
## Marginal log-Likelihood:  -919603.74 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Regional Spatio-temporal with Covariates

Adding covariates to previous model.

formula1 = AvgBeam ~ f(Region, 
                      model="generic1",
                      Cmatrix=Q.Leroux, 
                      constr=TRUE, 
                      hyper=list(prec=list(prior=sdunif),
                                 beta=list(prior=lunif))) +
                    f(RegAPR,
                      model="iid",
                      hyper=reg.prior,
                      replicate=Year2,
                      constr = TRUE) +
                            f(Year1, 
                      model="rw1", 
                      constr=TRUE,
                      hyper=list(prec=list(prior=sdunif))) +
                    f(ID.Region.Yr, 
                      model="iid", 
                      constr=TRUE,
                      hyper=list(prec=list(prior=sdunif))) +
                    sbio1 + sbio2 + sW_PPTMax + sCORN_PLANTED + sWHEAT_PLANTED + sSOYBEANS_PLANTED +
                        sGrowSeas + sPrecEx10mm + sPrecEx20mm +  sDSubZero + sPrec95pct + sMonMxDT +
                        sMMx1DPrec + sAMinT + sAMxT + sSumDur + sFrstDays + sPrecDep + sSnwDep


#theta1 = Model1cov$internal.summary.hyperpar$mean
theta1 = c(-2.400136, -1.269618, 3.594011, 4.210061, 1.394595, 1.690612)

Model1cov = inla(formula1, 
                  family = "gaussian", 
                  verbose = FALSE, 
                  control.predictor = list(
                            compute = TRUE, 
                               link = 1), 
                  control.mode = list(restart = TRUE, theta = theta1),
                  control.inla = list(strategy = "gaussian", 
                                  int.strategy = "eb"),
                  control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                  data = MyBeam3.tst)
dic.m6 = c(Model1cov$dic$dic, Model1cov$waic$waic)
summary(Model1cov)
## 
## Call:
## c("inla(formula = formula1, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), control.mode = list(restart = TRUE, ",  "        theta = theta1), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          5.1680       1326.3301         18.7236       1350.2217 
## 
## Fixed effects:
##                      mean     sd 0.025quant 0.5quant 0.975quant    mode
## (Intercept)       19.5676 0.0392    19.4906  19.5676    19.6444 19.5676
## sbio1              0.5064 0.0952     0.3194   0.5064     0.6933  0.5064
## sbio2             -0.0827 0.0401    -0.1613  -0.0827    -0.0041 -0.0827
## sW_PPTMax          0.0205 0.0233    -0.0253   0.0205     0.0663  0.0205
## sCORN_PLANTED      0.1345 0.0466     0.0431   0.1345     0.2258  0.1345
## sWHEAT_PLANTED     0.1357 0.0375     0.0621   0.1357     0.2093  0.1357
## sSOYBEANS_PLANTED -0.0890 0.0433    -0.1741  -0.0890    -0.0040 -0.0890
## sGrowSeas          0.0036 0.0405    -0.0759   0.0036     0.0830  0.0036
## sPrecEx10mm        0.0155 0.0330    -0.0494   0.0155     0.0803  0.0155
## sPrecEx20mm       -0.1360 0.0433    -0.2211  -0.1360    -0.0510 -0.1360
## sDSubZero         -0.0047 0.0561    -0.1148  -0.0047     0.1052 -0.0047
## sPrec95pct         0.3097 0.0469     0.2176   0.3097     0.4018  0.3097
## sMonMxDT          -0.0084 0.0501    -0.1068  -0.0084     0.0899 -0.0084
## sMMx1DPrec        -0.1981 0.0387    -0.2741  -0.1981    -0.1221 -0.1981
## sAMinT             0.1572 0.0515     0.0560   0.1572     0.2583  0.1572
## sAMxT             -0.1981 0.0640    -0.3238  -0.1981    -0.0725 -0.1981
## sSumDur           -0.0580 0.0575    -0.1708  -0.0580     0.0547 -0.0580
## sFrstDays          0.0448 0.0412    -0.0361   0.0448     0.1257  0.0448
## sPrecDep          -0.0432 0.0348    -0.1116  -0.0432     0.0251 -0.0432
## sSnwDep           -0.2056 0.0384    -0.2810  -0.2056    -0.1303 -0.2056
##                   kld
## (Intercept)         0
## sbio1               0
## sbio2               0
## sW_PPTMax           0
## sCORN_PLANTED       0
## sWHEAT_PLANTED      0
## sSOYBEANS_PLANTED   0
## sGrowSeas           0
## sPrecEx10mm         0
## sPrecEx20mm         0
## sDSubZero           0
## sPrec95pct          0
## sMonMxDT            0
## sMMx1DPrec          0
## sAMinT              0
## sAMxT               0
## sSumDur             0
## sFrstDays           0
## sPrecDep            0
## sSnwDep             0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0880 0.0002     0.0876   0.0880
## Precision for Region                    0.4512 0.0647     0.3367   0.4468
## Beta for Region                         0.9387 0.0329     0.8629   0.9436
## Precision for RegAPR                    3.7026 0.8669     2.2577   3.6189
## Precision for Year1                     5.9745 2.0088     2.5204   5.8585
## Precision for ID.Region.Yr              4.5722 0.2172     4.1551   4.5695
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0884 0.0880
## Precision for Region                        0.5903 0.4386
## Beta for Region                             0.9863 0.9600
## Precision for RegAPR                        5.6550 3.4619
## Precision for Year1                        10.1814 5.5035
## Precision for ID.Region.Yr                  5.0083 4.5674
## 
## Expected number of effective parameters(std dev): 1676.13(0.00)
## Number of equivalent replicates : 207.86 
## 
## Deviance Information Criterion (DIC) ...............: 1837111.82
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1676.08
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1837109.79
## Effective number of parameters .................: 1663.67
## 
## Marginal log-Likelihood:  -919621.54 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Regional Spatio-temporal (Reduced Model Above)

Removing weak linear effects.

formula1rcov = AvgBeam ~ f(Region, 
                          model="generic1",
                          Cmatrix=Q.Leroux, 
                          constr=TRUE, 
                          hyper=list(prec=list(prior=sdunif),
                                     beta=list(prior=lunif))) +
                        f(RegAPR,
                          model="iid",
                          hyper=reg.prior,
                          replicate=Year2,
                          constr = TRUE) +
                                f(Year1, 
                          model="rw1", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) +
                        f(ID.Region.Yr, 
                          model="iid", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) +
                        sbio1 + sbio2 + sCORN_PLANTED + sWHEAT_PLANTED + sSOYBEANS_PLANTED +
                                sPrecEx20mm + sPrec95pct + sMMx1DPrec + sAMinT + sAMxT + sSnwDep



#theta1 = Model.T1rcov$internal.summary.hyperpar$mean
theta1 = c(-2.4298949, -0.8306994, 3.7172451, 1.2343055, 1.9743757, 1.5678625)

Model.T1rcov = inla(formula1rcov, 
                    family = "gaussian", 
                    verbose = FALSE, 
                    control.predictor = list(
                              compute = TRUE, 
                                 link = 1), 
                    control.mode = list(restart = TRUE, theta = theta1),
                    control.inla = list(strategy = "gaussian", 
                                    int.strategy = "eb"),
                    control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                    data = MyBeam3.tst)
dic.m7 = c(Model.T1rcov$dic$dic, Model.T1rcov$waic$waic)
summary(Model.T1rcov)
## 
## Call:
## c("inla(formula = formula1rcov, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), control.mode = list(restart = TRUE, ",  "        theta = theta1), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          3.5779        602.6386         20.1282        626.3446 
## 
## Fixed effects:
##                      mean     sd 0.025quant 0.5quant 0.975quant    mode
## (Intercept)       19.5868 0.0396    19.5090  19.5868    19.6645 19.5868
## sbio1              0.4307 0.0866     0.2607   0.4307     0.6005  0.4307
## sbio2             -0.0927 0.0400    -0.1712  -0.0927    -0.0143 -0.0927
## sCORN_PLANTED      0.1345 0.0470     0.0422   0.1345     0.2268  0.1345
## sWHEAT_PLANTED     0.1411 0.0374     0.0676   0.1411     0.2145  0.1411
## sSOYBEANS_PLANTED -0.0885 0.0436    -0.1741  -0.0885    -0.0029 -0.0885
## sPrecEx20mm       -0.1173 0.0359    -0.1878  -0.1173    -0.0468 -0.1173
## sPrec95pct         0.3038 0.0464     0.2127   0.3038     0.3949  0.3038
## sMMx1DPrec        -0.2113 0.0367    -0.2833  -0.2113    -0.1393 -0.2113
## sAMinT             0.1465 0.0486     0.0511   0.1465     0.2418  0.1465
## sAMxT             -0.1880 0.0569    -0.2996  -0.1880    -0.0765 -0.1880
## sSnwDep           -0.2217 0.0263    -0.2734  -0.2217    -0.1700 -0.2217
##                   kld
## (Intercept)         0
## sbio1               0
## sbio2               0
## sCORN_PLANTED       0
## sWHEAT_PLANTED      0
## sSOYBEANS_PLANTED   0
## sPrecEx20mm         0
## sPrec95pct          0
## sMMx1DPrec          0
## sAMinT              0
## sAMxT               0
## sSnwDep             0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                          mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.088 0.0002     0.0876   0.0880
## Precision for Region                    0.441 0.0689     0.3221   0.4353
## Beta for Region                         0.974 0.0127     0.9408   0.9773
## Precision for RegAPR                    3.486 0.6042     2.4861   3.4210
## Precision for Year1                     7.355 1.5006     4.6915   7.2768
## Precision for ID.Region.Yr              4.818 0.4694     4.1943   4.7216
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0885 0.0880
## Precision for Region                        0.5920 0.4238
## Beta for Region                             0.9883 0.9824
## Precision for RegAPR                        4.8476 3.2839
## Precision for Year1                        10.5344 7.1520
## Precision for ID.Region.Yr                  5.9591 4.3941
## 
## Expected number of effective parameters(std dev): 1693.24(0.00)
## Number of equivalent replicates : 205.76 
## 
## Deviance Information Criterion (DIC) ...............: 1837108.65
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1693.18
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1837106.55
## Effective number of parameters .................: 1680.43
## 
## Marginal log-Likelihood:  -919574.45 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Re-check for Colinearity

These checks are to avoid colinearity; cross-correlation between the El Nino index and response variable is assessed later in the script.

Colin.df = MyBeam3.tst %>%
            select(sbio1, sbio2, sCORN_PLANTED, sWHEAT_PLANTED, sSOYBEANS_PLANTED, 
                   sPrecEx20mm, sPrec95pct, sMMx1DPrec, sAMinT, sAMxT, sSnwDep, ONI.indx)

Colin.df = Colin.df[complete.cases(Colin.df),]

cor.mtest = function(mat, ...) {
    mat = as.matrix(mat)
    n = ncol(mat)
    p.mat= matrix(NA, n, n)
    diag(p.mat) = 0
    for (i in 1:(n - 1)) {
        for (j in (i + 1):n) {
            tmp = cor.test(mat[, i], mat[, j], ...)
            p.mat[i, j] = p.mat[j, i] = tmp$p.value
        }}
    
    colnames(p.mat) = rownames(p.mat) = colnames(mat)
    p.mat
}
p.mat = cor.mtest(Colin.df)

col = colorRampPalette(rev(c("#BB4444", "#EE9988", "#FFFFFF", "#77AADD", "#4477AA")))
corrplot(cor(Colin.df), method="color", col=col(200),  
         type="upper", order="hclust", 
         addCoef.col = "black",
         tl.col="black", tl.srt=45,
         p.mat = p.mat, sig.level = 0.01, insig = "blank", 
         diag=FALSE)

CorCov = cor(Colin.df)
CI = colldiag(CorCov)
CI
## Condition
## Index    Variance Decomposition Proportions
##            intercept sbio1 sbio2 sCORN_PLANTED sWHEAT_PLANTED
## 1    1.000 0.006     0.002 0.008 0.001         0.001         
## 2    1.454 0.008     0.000 0.002 0.000         0.000         
## 3    1.684 0.001     0.001 0.004 0.001         0.002         
## 4    2.161 0.018     0.001 0.000 0.000         0.000         
## 5    2.678 0.032     0.001 0.002 0.000         0.002         
## 6    3.204 0.314     0.008 0.136 0.000         0.003         
## 7    4.377 0.013     0.019 0.092 0.000         0.009         
## 8    5.560 0.126     0.249 0.073 0.000         0.029         
## 9    8.168 0.012     0.019 0.080 0.003         0.012         
## 10  14.341 0.037     0.057 0.004 0.088         0.104         
## 11  15.453 0.379     0.021 0.567 0.001         0.092         
## 12  19.589 0.054     0.622 0.034 0.906         0.746         
##    sSOYBEANS_PLANTED sPrecEx20mm sPrec95pct sMMx1DPrec sAMinT sAMxT
## 1  0.001             0.002       0.000      0.000      0.001  0.001
## 2  0.000             0.009       0.006      0.013      0.000  0.001
## 3  0.001             0.001       0.000      0.000      0.010  0.015
## 4  0.000             0.000       0.001      0.000      0.000  0.006
## 5  0.000             0.005       0.002      0.028      0.001  0.001
## 6  0.001             0.044       0.002      0.002      0.000  0.001
## 7  0.000             0.213       0.000      0.084      0.000  0.011
## 8  0.000             0.003       0.017      0.002      0.006  0.094
## 9  0.012             0.007       0.116      0.194      0.308  0.201
## 10 0.636             0.309       0.278      0.311      0.036  0.152
## 11 0.331             0.356       0.506      0.279      0.626  0.434
## 12 0.017             0.051       0.071      0.086      0.010  0.084
##    sSnwDep ONI.indx
## 1  0.002   0.001   
## 2  0.007   0.000   
## 3  0.004   0.028   
## 4  0.138   0.202   
## 5  0.221   0.159   
## 6  0.006   0.017   
## 7  0.133   0.052   
## 8  0.025   0.004   
## 9  0.092   0.174   
## 10 0.036   0.001   
## 11 0.069   0.350   
## 12 0.267   0.012

Adding Oceanic Nino Index

The ONI is a measure of the El Niño-Southern Oscillation.

pcprior = list(prec = list(prior="pc.prec", param = c(1, 0.0001)))

formulaONI = AvgBeam ~ f(Region, 
                          model="generic1",
                          Cmatrix=Q.Leroux, 
                          constr=TRUE, 
                          hyper=list(prec=list(prior=sdunif),
                                     beta=list(prior=lunif))) +
                        f(RegAPR,
                          model="iid",
                          hyper=reg.prior,
                          replicate=Year2,
                          constr = TRUE) +
                        f(ONI.indx, 
                          model="rw1", 
                          constr=TRUE,
                          hyper=pcprior) +
                                f(Year1, 
                          model="rw1", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) +
                        f(ID.Region.Yr, 
                          model="iid", 
                          constr=TRUE,
                          hyper=list(prec=list(prior=sdunif))) +
                        sbio1 + sbio2 + sCORN_PLANTED + sWHEAT_PLANTED + sSOYBEANS_PLANTED +
                                sPrecEx20mm + sPrec95pct + sMMx1DPrec + sAMinT + sAMxT + sSnwDep


#theta1 = Model.T1oni$internal.summary.hyperpar$mean
theta1 = c(-2.4548244, -0.9312593, 2.7321389, 1.4852687, 0.6983412, 1.5902152, 1.5257093)

Model.T1oni = inla(formulaONI, 
                    family = "gaussian", 
                    verbose = FALSE, 
                    control.predictor = list(
                              compute = TRUE, 
                                 link = 1), 
                    control.mode = list(restart = TRUE, theta = theta1),
                    control.inla = list(strategy = "gaussian", 
                                    int.strategy = "eb"),
                    control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                    data = MyBeam3.tst)
dic.m8 = c(Model.T1oni$dic$dic, Model.T1oni$waic$waic)
summary(Model.T1oni)
## 
## Call:
## c("inla(formula = formulaONI, family = \"gaussian\", data = MyBeam3.tst, ",  "    verbose = TRUE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"))")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          3.9137       2543.2267         22.1809       2569.3212 
## 
## Fixed effects:
##                      mean     sd 0.025quant 0.5quant 0.975quant    mode
## (Intercept)       19.5311 0.0383    19.4558  19.5311    19.6063 19.5311
## sbio1              0.4928 0.0919     0.3123   0.4928     0.6732  0.4928
## sbio2             -0.0876 0.0401    -0.1663  -0.0876    -0.0089 -0.0876
## sCORN_PLANTED      0.1337 0.0465     0.0424   0.1337     0.2250  0.1337
## sWHEAT_PLANTED     0.1473 0.0369     0.0749   0.1473     0.2196  0.1473
## sSOYBEANS_PLANTED -0.0905 0.0432    -0.1753  -0.0905    -0.0059 -0.0905
## sPrecEx20mm       -0.1198 0.0370    -0.1925  -0.1198    -0.0471 -0.1198
## sPrec95pct         0.3239 0.0473     0.2310   0.3239     0.4167  0.3239
## sMMx1DPrec        -0.2165 0.0374    -0.2899  -0.2165    -0.1432 -0.2165
## sAMinT             0.1540 0.0526     0.0508   0.1540     0.2571  0.1540
## sAMxT             -0.2276 0.0612    -0.3477  -0.2276    -0.1077 -0.2276
## sSnwDep           -0.2268 0.0259    -0.2777  -0.2268    -0.1759 -0.2268
##                   kld
## (Intercept)         0
## sbio1               0
## sbio2               0
## sCORN_PLANTED       0
## sWHEAT_PLANTED      0
## sSOYBEANS_PLANTED   0
## sPrecEx20mm         0
## sPrec95pct          0
## sMMx1DPrec          0
## sAMinT              0
## sAMxT               0
## sSnwDep             0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## ONI.indx   RW1 model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0859 0.0001     0.0857   0.0859
## Precision for Region                    0.3966 0.0456     0.3203   0.3917
## Beta for Region                         0.9352 0.0248     0.8719   0.9409
## Precision for RegAPR                    4.4683 0.6689     3.1325   4.4945
## Precision for ONI.indx                  2.0671 0.4673     1.1616   2.0796
## Precision for Year1                     5.0120 1.0167     3.0819   5.0132
## Precision for ID.Region.Yr              4.6012 0.1607     4.2570   4.6146
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0862 0.0858
## Precision for Region                        0.4992 0.3794
## Beta for Region                             0.9669 0.9511
## Precision for RegAPR                        5.7141 4.6289
## Precision for ONI.indx                      2.9346 2.1372
## Precision for Year1                         7.0019 5.0740
## Precision for ID.Region.Yr                  4.8838 4.6674
## 
## Expected number of effective parameters(std dev): 1678.67(0.00)
## Number of equivalent replicates : 207.54 
## 
## Deviance Information Criterion (DIC) ...............: 1836912.50
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1678.77
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1836867.31
## Effective number of parameters .................: 1623.53
## 
## Marginal log-Likelihood:  -919534.77 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Lag Data

Modifying best model above (Model.T1oni) for a one and two-year lag.

#Copy data
MyBeam4.tst = MyBeam3.tst

#Need to lag by county-year (one county and year at a time)
CntyYr.lvls = levels(factor(MyBeam4.tst$Region.Yr))

for(i in 1:length(CntyYr.lvls)){
  
  CntyYr.tmp = MyBeam4.tst %>% filter(Region.Yr == CntyYr.lvls[i])
  
  CntyYr.tmp$Lag1.AvgBeam = lag(CntyYr.tmp$AvgBeam, 1) #1 year lag
  
  CntyYr.tmp$Lag2.AvgBeam = lag(CntyYr.tmp$AvgBeam, 2)#2 year lag
  
  if(i == 1){L2.MyBeam4.tst = CntyYr.tmp
  } else{L2.MyBeam4.tst = rbind(L2.MyBeam4.tst, CntyYr.tmp)}
  
  }

#save(list=c("L2.MyBeam4.tst", "Q.Leroux", "g", "sdunif", "lunif"), file="C:/Users/humph173/Documents/Michigan_State/SLP_Beam_Diam/HPCC/Aug2519/Lag2yr.RData")

#save(list=c("L2.MyBeam4.tst", "Q.Leroux", "g", "sdunif", "lunif"), file="C:/Users/humph173/Documents/Michigan_State/SLP_Beam_Diam/HPCC/Aug2519/Lag1_2yr.RData")

One Year Lag

pcprior = list(prec = list(prior="pc.prec", param = c(1, 0.0001)))

formulaONI = Lag1.AvgBeam ~ f(Region, 
                              model="generic1",
                              Cmatrix=Q.Leroux, 
                              constr=TRUE, 
                              hyper=list(prec=list(prior=sdunif),
                                         beta=list(prior=lunif))) +
                            f(RegAPR,
                              model="iid",
                              hyper=reg.prior,
                              replicate=Year2,
                              constr = TRUE) +
                            f(ONI.indx, 
                              model="rw1", 
                              constr=TRUE,
                              hyper=pcprior) +
                                    f(Year1, 
                              model="rw1", 
                              constr=TRUE,
                              hyper=list(prec=list(prior=sdunif))) +
                            f(ID.Region.Yr, 
                              model="iid", 
                              constr=TRUE,
                              hyper=list(prec=list(prior=sdunif))) +
                            sbio1 + sbio2 + sCORN_PLANTED + sWHEAT_PLANTED + sSOYBEANS_PLANTED +
                                    sPrecEx20mm + sPrec95pct + sMMx1DPrec + sAMinT + sAMxT + sSnwDep


#theta1 = Model.T1oni$internal.summary.hyperpar$mean
theta1 = c(-2.4548244, -0.9312593, 2.7321389, 1.4852687, 0.6983412, 1.5902152, 1.5257093)

Model.T1oni.l1 = inla(formulaONI, 
                      family = "gaussian", 
                      verbose = FALSE, 
                      control.predictor = list(
                                compute = TRUE, 
                                   link = 1), 
                      control.mode = list(restart = TRUE, theta = theta1),
                      control.inla = list(strategy = "gaussian", 
                                      int.strategy = "eb"),
                      control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                      data = L2.MyBeam4.tst)
dic.m9 = c(Model.T1oni.l1$dic$dic, Model.T1oni.l1$waic$waic)
summary(Model.T1oni.l1)
## 
## Call:
## c("inla(formula = formulaONI, family = \"gaussian\", data = L2.MyBeam4.tst, ",  "    verbose = FALSE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          4.9514       2692.3701         27.2380       2724.5596 
## 
## Fixed effects:
##                      mean     sd 0.025quant 0.5quant 0.975quant    mode
## (Intercept)       19.5679 0.0469    19.4759  19.5679    19.6598 19.5679
## sbio1              0.4539 0.0946     0.2681   0.4539     0.6396  0.4539
## sbio2             -0.1017 0.0403    -0.1808  -0.1017    -0.0226 -0.1017
## sCORN_PLANTED      0.1421 0.0468     0.0502   0.1421     0.2340  0.1421
## sWHEAT_PLANTED     0.1463 0.0375     0.0727   0.1463     0.2199  0.1463
## sSOYBEANS_PLANTED -0.0853 0.0434    -0.1705  -0.0853    -0.0002 -0.0853
## sPrecEx20mm       -0.1194 0.0385    -0.1949  -0.1194    -0.0439 -0.1194
## sPrec95pct         0.3265 0.0490     0.2302   0.3265     0.4227  0.3265
## sMMx1DPrec        -0.2196 0.0388    -0.2958  -0.2196    -0.1435 -0.2196
## sAMinT             0.1579 0.0552     0.0496   0.1579     0.2662  0.1579
## sAMxT             -0.2185 0.0643    -0.3447  -0.2185    -0.0924 -0.2185
## sSnwDep           -0.2265 0.0267    -0.2789  -0.2265    -0.1741 -0.2265
##                   kld
## (Intercept)         0
## sbio1               0
## sbio2               0
## sCORN_PLANTED       0
## sWHEAT_PLANTED      0
## sSOYBEANS_PLANTED   0
## sPrecEx20mm         0
## sPrec95pct          0
## sMMx1DPrec          0
## sAMinT              0
## sAMxT               0
## sSnwDep             0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## ONI.indx   RW1 model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0881 0.0002     0.0878   0.0881
## Precision for Region                    0.5355 0.0628     0.4131   0.5360
## Beta for Region                         0.9933 0.0084     0.9714   0.9959
## Precision for RegAPR                    2.5870 0.5438     1.6783   2.5333
## Precision for ONI.indx                  2.7913 0.7397     1.5823   2.7119
## Precision for Year1                     4.9197 1.3616     2.8266   4.7212
## Precision for ID.Region.Yr              4.4262 0.1747     4.1194   4.4118
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0885 0.0881
## Precision for Region                        0.6583 0.5417
## Beta for Region                             0.9993 0.9982
## Precision for RegAPR                        3.8090 2.4305
## Precision for ONI.indx                      4.4758 2.5608
## Precision for Year1                         8.1324 4.3486
## Precision for ID.Region.Yr                  4.8033 4.3667
## 
## Expected number of effective parameters(std dev): 1714.83(0.00)
## Number of equivalent replicates : 201.67 
## 
## Deviance Information Criterion (DIC) ...............: 1823074.52
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1714.88
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1823072.72
## Effective number of parameters .................: 1702.08
## 
## Marginal log-Likelihood:  -912633.02 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Two Year Lag

pcprior = list(prec = list(prior="pc.prec", param = c(1, 0.0001)))

formulaONI = Lag2.AvgBeam ~ f(Region, 
                              model="generic1",
                              Cmatrix=Q.Leroux, 
                              constr=TRUE, 
                              hyper=list(prec=list(prior=sdunif),
                                         beta=list(prior=lunif))) +
                            f(RegAPR,
                              model="iid",
                              hyper=reg.prior,
                              replicate=Year2,
                              constr = TRUE) +
                            f(ONI.indx, 
                              model="rw1", 
                              constr=TRUE,
                              hyper=pcprior) +
                                    f(Year1, 
                              model="rw1", 
                              constr=TRUE,
                              hyper=list(prec=list(prior=sdunif))) +
                            f(ID.Region.Yr, 
                              model="iid", 
                              constr=TRUE,
                              hyper=list(prec=list(prior=sdunif))) +
                            sbio1 + sbio2 + sCORN_PLANTED + sWHEAT_PLANTED + sSOYBEANS_PLANTED +
                                    sPrecEx20mm + sPrec95pct + sMMx1DPrec + sAMinT + sAMxT + sSnwDep


#theta1 = Model.T1oni$internal.summary.hyperpar$mean
theta1 = c(-2.4548244, -0.9312593, 2.7321389, 1.4852687, 0.6983412, 1.5902152, 1.5257093)

Model.T1oni.l2 = inla(formulaONI, 
                      family = "gaussian", 
                      verbose = FALSE, 
                      control.predictor = list(
                                compute = TRUE, 
                                   link = 1), 
                      control.mode = list(restart = TRUE, theta = theta1),
                      control.inla = list(strategy = "gaussian", 
                                      int.strategy = "eb"),
                      control.compute = list(cpo = TRUE, waic=TRUE, dic=TRUE),
                      data = L2.MyBeam4.tst)
dic.m10 = c(Model.T1oni.l2$dic$dic, Model.T1oni.l2$waic$waic)
summary(Model.T1oni.l2)
## 
## Call:
## c("inla(formula = formulaONI, family = \"gaussian\", data = L2.MyBeam4.tst, ",  "    verbose = FALSE, control.compute = list(cpo = TRUE, waic = TRUE, ",  "        dic = TRUE), control.predictor = list(compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), num.threads = 6)")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          5.3643       2164.9179         27.7029       2197.9851 
## 
## Fixed effects:
##                      mean     sd 0.025quant 0.5quant 0.975quant    mode
## (Intercept)       19.5657 0.0432    19.4810  19.5657    19.6504 19.5657
## sbio1              0.4584 0.0971     0.2677   0.4584     0.6488  0.4584
## sbio2             -0.1049 0.0417    -0.1869  -0.1049    -0.0230 -0.1049
## sCORN_PLANTED      0.1399 0.0485     0.0446   0.1399     0.2351  0.1399
## sWHEAT_PLANTED     0.1458 0.0389     0.0695   0.1458     0.2221  0.1458
## sSOYBEANS_PLANTED -0.0844 0.0451    -0.1729  -0.0844     0.0039 -0.0844
## sPrecEx20mm       -0.1205 0.0396    -0.1982  -0.1205    -0.0428 -0.1205
## sPrec95pct         0.3313 0.0505     0.2321   0.3313     0.4304  0.3313
## sMMx1DPrec        -0.2201 0.0400    -0.2986  -0.2201    -0.1417 -0.2201
## sAMinT             0.1528 0.0566     0.0416   0.1528     0.2639  0.1528
## sAMxT             -0.2203 0.0659    -0.3497  -0.2203    -0.0910 -0.2203
## sSnwDep           -0.2318 0.0275    -0.2859  -0.2318    -0.1778 -0.2318
##                   kld
## (Intercept)         0
## sbio1               0
## sbio2               0
## sCORN_PLANTED       0
## sWHEAT_PLANTED      0
## sSOYBEANS_PLANTED   0
## sPrecEx20mm         0
## sPrec95pct          0
## sMMx1DPrec          0
## sAMinT              0
## sAMxT               0
## sSnwDep             0
## 
## Random effects:
## Name   Model
##  Region   Generic1 model 
## RegAPR   IID model 
## ONI.indx   RW1 model 
## Year1   RW1 model 
## ID.Region.Yr   IID model 
## 
## Model hyperparameters:
##                                           mean     sd 0.025quant 0.5quant
## Precision for the Gaussian observations 0.0882 0.0002     0.0878   0.0882
## Precision for Region                    0.4687 0.0782     0.3159   0.4704
## Beta for Region                         0.9302 0.0659     0.7463   0.9510
## Precision for RegAPR                    3.9946 1.2276     2.4176   3.7149
## Precision for ONI.indx                  3.5467 1.5271     1.1938   3.3626
## Precision for Year1                     5.8398 2.7476     2.6194   5.1457
## Precision for ID.Region.Yr              4.1113 0.1789     3.8374   4.0840
##                                         0.975quant   mode
## Precision for the Gaussian observations     0.0886 0.0882
## Precision for Region                        0.6180 0.4811
## Beta for Region                             0.9912 0.9772
## Precision for RegAPR                        7.0982 3.1752
## Precision for ONI.indx                      7.0242 2.8457
## Precision for Year1                        12.9807 4.0958
## Precision for ID.Region.Yr                  4.5252 3.9859
## 
## Expected number of effective parameters(std dev): 1745.59(0.00)
## Number of equivalent replicates : 196.64 
## 
## Deviance Information Criterion (DIC) ...............: 1809405.09
## Deviance Information Criterion (DIC, saturated) ....: NULL
## Effective number of parameters .....................: 1745.53
## 
## Watanabe-Akaike information criterion (WAIC) ...: 1809404.63
## Effective number of parameters .................: 1733.40
## 
## Marginal log-Likelihood:  -905783.93 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

Compare Models

Models = c("Model1", "Model2",
           "Model3", "Model4",
           "Model5", "Model6",
           "Model7", "Model8",
           "Model9", "Model10")

Type = c("Space Only",
         "Space + Time",
         "Space + Time + Interaction",
         "Space + Time + Interaction + Region",
         "Model above + NW12 (APR)",
         "Model above + Linear Covariates",
         "Model Above Reduced",
         "Model Above + El Nino",
         "Model Above + 1yr Lag",
         "Two Models Above + 2yr Lag")

#Deviance information criteria
DICs = c(dic.m1[1], dic.m2[1], dic.m3[1],
         dic.m4[1], dic.m5[1], dic.m6[1],
         dic.m7[1], dic.m8[1], dic.m9[1],
         dic.m10[1])

#Watanabe-Akaike information criteria
WAICs = c(dic.m1[2], dic.m2[2], dic.m3[2],
          dic.m4[2], dic.m5[2], dic.m6[2],
          dic.m7[2], dic.m8[2], dic.m9[2],
          dic.m10[2])

Model_mets = as.data.frame(cbind(Model = Models,
                                 DIC = round(DICs, 0),
                                 WAIC = round(WAICs, 0),
                                 Type = Type))
kable(Model_mets) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) 
Model DIC WAIC Type
Model1 2793712 2793999 Space Only
Model2 1844406 1844410 Space + Time
Model3 1837327 1837326 Space + Time + Interaction
Model4 1837305 1837302 Space + Time + Interaction + Region
Model5 1837161 1837159 Model above + NW12 (APR)
Model6 1837112 1837110 Model above + Linear Covariates
Model7 1837109 1837107 Model Above Reduced
Model8 1836912 1836867 Model Above + El Nino
Model9 1823075 1823073 Model Above + 1yr Lag
Model10 1809405 1809405 Two Models Above + 2yr Lag

Statewide Temporal Effect

mic.d = as.data.frame(Model.T1oni.l2$summary.random$Year1)[,c(1:6)]
                       
names(mic.d) = c("ID", "Mean", "sd", "Q025", "Q50", "Q975")

mic.d$YLab = paste(levels(factor(MyBeam3$year)))
YLab = paste(mic.d$YLab)

ggplot(mic.d, aes(ID, Mean), lab) +
        geom_smooth(col = "black", 
                  linetype= "solid",
                  method = "loess",
                  span = 0.4,
                  se = FALSE,
                  lwd = 1) +
        geom_smooth(data = mic.d, aes(ID, Q025), 
                    col = "grey", 
                    method = "loess",
                    span = 0.4,
                    se = FALSE,
                    linetype= "dashed") +
        geom_smooth(data = mic.d, aes(ID, Q975), 
                    col = "grey", 
                    method = "loess",
                    span = 0.4,
                    se = FALSE,
                    linetype= "dashed") +
        geom_hline(yintercept = 0, 
                   linetype = "solid",
                   col = "darkgray",
                   size = 0.5) +  
        xlab(" ") +
        ylab("Statewide Temporal Effect") +  
        theme_classic() +
        scale_y_continuous(breaks = seq(-0.5, 1.75, 0.5)) +
        scale_x_continuous(breaks = 0:30,labels=paste0(mic.d$YLab)) +
        theme(axis.text=element_text(size=16),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(face="bold", size = 20),
              axis.text.x = element_text(face="bold", size=14, vjust=0.5, 
                                         hjust=1, angle=90),
              axis.title.x = element_text(face="bold", size = 20))

Oceanic Nino Index

mic.df = as.data.frame(Model.T1oni.l2$summary.random$ONI.indx)[,c(1:6)]
names(mic.df) = c("ID", "Mean", "sd", "Q025", "Q50", "Q975")

ONI.plt = MyBeam4.tst[,c("Year", "Month", "ONI.indx")]

ONI.plt$Mean = with(mic.df,
                    Mean[match(
                               ONI.plt$ONI.indx,
                                           ID)])

ONI.plt$Q025 = with(mic.df,
                    Q025[match(
                               ONI.plt$ONI.indx,
                                           ID)])

ONI.plt$Q975 = with(mic.df,
                    Q975[match(
                               ONI.plt$ONI.indx,
                                           ID)])

plt.df = as.data.frame(
                    ONI.plt %>%
                      group_by(Year, Month) %>%
                      summarise(Mean = mean(Mean),
                                MeanL = mean(Q025),
                                MeanH = mean(Q975)))

YLab = paste(levels(factor(MyBeam3$year)))

ggplot(plt.df, aes(Year, Mean, color = Mean)) +
        geom_smooth(aes(col = ..y..), 
                  linetype= "solid",
                  method = "loess",
                  span = 0.3,
                  se = FALSE,
                  lwd = 2) +
        scale_colour_gradient2(low = "blue", 
                               high = "red", 
                               midpoint=0) +
        geom_smooth(data = plt.df, aes(Year, MeanL), 
                    col = "grey", 
                    method = "loess",
                    span = 0.3,
                    se = FALSE,
                    linetype= "dashed") +
        geom_smooth(data = plt.df, aes(Year, MeanH), 
                    col = "grey", 
                    method = "loess",
                    span = 0.3,
                    se = FALSE,
                    linetype= "dashed") +
        geom_hline(yintercept = 0, 
                   linetype = "solid",
                   col = "darkgray",
                   size = 0.5) +  
        xlab(" ") +
        ylab("Oceanic Niño Index Effect") +  
        theme_classic() +
        scale_x_continuous(breaks = seq(1987, 2017, 1), labels=paste0(YLab)) +
        theme(axis.text=element_text(size=16),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(face="bold", size = 20),
              legend.position="none",
              axis.text.x = element_text(face="bold", size=14, vjust=0.5, 
                                         hjust=1, angle=90),
              axis.title.x = element_text(face="bold", size = 20))

Compare Temporal Effect with ONI

ggplot(plt.df, aes(Year, Mean, color = Mean)) +
        geom_smooth(aes(col = ..y..), 
                  linetype= "solid",
                  method = "loess",
                  span = 0.3,
                  se = FALSE,
                  lwd = 2) +
        scale_colour_gradient2(low = "blue", 
                               high = "red", 
                               midpoint=0) +
        geom_smooth(data = mic.d, aes(as.numeric(YLab), Mean),
                    col = "black", 
                    method = "loess",
                    span = 0.3,
                    se = FALSE,
                    linetype= "solid") +
        geom_hline(yintercept = 0, 
                   linetype = "solid",
                   col = "darkgray",
                   size = 0.5) +  
        xlab(" ") +
        ylab("Effect Size") +  
       theme_classic() +
        scale_y_continuous(breaks = seq(-0.5, 1.75, 0.5)) +
        scale_x_continuous(breaks = seq(1987, 2017, 1), labels=paste0(YLab)) +
        theme(axis.text=element_text(size=16),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(face="bold", size = 20),
              legend.position="none",
              axis.text.x = element_text(face="bold", size=14, vjust=0.5, 
                                         hjust=1, angle=90),
              axis.title.x = element_text(face="bold", size = 20))

Region Effect

Region.plt = as.data.frame(Model.T1oni.l2$summary.random$RegAPR)

names(Region.plt) = c("ID", "Mean", "sd", "Q025", "Q50", "Q975", "mode", "kld")

Region.plt$ID2 = factor(rep(c("Northwest 12", "Lower Peninsula", "Upper Peninsula"),31))
Region.plt$ID2 = factor(Region.plt$ID2, levels = c("Upper Peninsula", "Lower Peninsula", "Northwest 12"))
Region.plt$Year = as.integer(rep(Yr.lv, each=3))

ggplot(Region.plt, aes(x=Year, y=Mean)) + 
    geom_point(size=2, pch=19, col = "red") +
    geom_linerange(aes(ymin=Q025, ymax=Q975), colour="black") +
    geom_hline(yintercept = 0, 
                   linetype = "dotted",
                   colour = "darkgray",
                   size = 0.5) +
    scale_x_continuous(breaks = seq(1987, 2017, 1), labels=paste0(YLab)) +
    facet_wrap(~ID2, ncol=1) +
        theme_classic() +
               xlab(NULL) +
               ylab("Regional Grouping Effect ") +
               theme(axis.title.y = element_text(face="bold", size=18),
                     strip.text.x = element_text(face="bold", size = 14, colour = "black"),
                     axis.text.y = element_text(face="bold", size=12),
                     axis.text.x = element_text(face="bold", size=14, vjust=0.5, 
                                         hjust=1, angle=90))

Linear Effects

Fixed.df = Model.T1oni.l2$summary.fixed[,c(1:3,5)]
names(Fixed.df) = c("Mean", "SD", "Q025", "Q975")

Fixed.df = round(Fixed.df[,1:4], 3)
kable(Fixed.df) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Mean SD Q025 Q975
(Intercept) 19.566 0.043 19.481 19.650
sbio1 0.458 0.097 0.268 0.649
sbio2 -0.105 0.042 -0.187 -0.023
sCORN_PLANTED 0.140 0.049 0.045 0.235
sWHEAT_PLANTED 0.146 0.039 0.070 0.222
sSOYBEANS_PLANTED -0.084 0.045 -0.173 0.004
sPrecEx20mm -0.120 0.040 -0.198 -0.043
sPrec95pct 0.331 0.051 0.232 0.430
sMMx1DPrec -0.220 0.040 -0.299 -0.142
sAMinT 0.153 0.057 0.042 0.264
sAMxT -0.220 0.066 -0.350 -0.091
sSnwDep -0.232 0.028 -0.286 -0.178

Spatial Effect Random Field Density

LU.tab = as.data.frame(Model.T1oni.l2$summary.random$Region[, c(1:2,4,6)])
names(LU.tab) = c("ID", "Mean", "Q025", "Q975")

LU.tab$County = LookUp.tab$NAME

Counties2 = Counties

Domain.r = raster(res=0.01, 
                  ext = extent(Counties))

Counties2$RE = LU.tab$Mean
Region.mean.r = rasterize(Counties2,
                          Domain.r,
                          field = "RE",
                          background = NA)
              
Counties2$Q025 = LU.tab$Q025
Region.Q025.r = rasterize(Counties2,
                          Domain.r,
                          field = "Q025",
                          background = NA)


Counties2$Q975 = LU.tab$Q975 
Region.Q975.r = rasterize(Counties2,
                          Domain.r,
                          field = "Q975",
                          background = NA)


MRF.stk = stack(Region.Q025.r, Region.mean.r, Region.Q975.r)
names(MRF.stk) = c("A", "B", "C")

Plot Spatial Random Field

mCols = c("#A50026", "#D73027", "#F46D43", "#FDAE61", "#E0F3F8", "#ABD9E9", "#74ADD1", "#4575B4","#313695") 

rng = seq(-3.3, 3.3, 0.01)

cr = colorRampPalette(c(rev(mCols)),  
                        bias = 1, space = "rgb")

levelplot(MRF.stk, 
          margin = FALSE,
          maxpixels = 1e5, 
          layout=c(3, 1),
          xlab = " ", ylab = NULL, 
          names.attr= c("A","B","C"),
          col.regions = cr, at = rng,
          colorkey = list(space = "bottom"),
          par.settings = list(axis.line = list(col = 'transparent'), 
                             strip.background = list(col = 'transparent'), 
                             strip.border = list(col = 'transparent')),
                             par.strip.text = list(cex=1, fontface='bold', col="black"),
                             scales = list(draw=FALSE)) + 
         latticeExtra::layer(sp.polygons(Counties, col = "darkgray", lwd = 0.5)) 

Compare Model.sp and Model.T1oni.l2 (Spatial Random Fields)

names(MRF.stk) = c("D", "E", "F")

MRF.stk2 = stack(MRF.stk2, MRF.stk)

rng = seq(-35, 5, 0.01)

cr = colorRampPalette(c(rev(mCols)),  
                        bias = 0.20, space = "rgb")

levelplot(MRF.stk2, 
          margin = FALSE,
          maxpixels = 1e5, 
          layout=c(3, 2),
          xlab = " ", ylab = NULL, 
          names.attr= c("A","B","C", "D", "E", "F"),
          col.regions = cr, at = rng,
          colorkey = list(labels=list(at=seq(-35, 5, 5),  
                         labels=c("-35","-30","-25","-20","-15","-10","-5","0","5"), 
                         cex=1.5),
                         labels=list(cex=12),
                         space = "bottom"), 
          par.settings = list(axis.line = list(col = 'transparent'), 
                             strip.background = list(col = 'transparent'), 
                             strip.border = list(col = 'transparent')),
                             par.strip.text = list(cex=1, fontface='bold', col="black"),
                             scales = list(draw=FALSE)) + 
         latticeExtra::layer(sp.polygons(Counties, col = "darkgray", lwd = 0.5)) 

Space-Time Interaction Mean Random Field Density

LU.tab = as.data.frame(Model.T1oni.l2$summary.random$ID.Region.Yr[, c(1:2,4,6)])
names(LU.tab) = c("ID", "Mean", "Q025", "Q975")

LU.tab$County = with(MyBeam4.tst,
                     County[match(
                       LU.tab$ID,
                          ID.Region.Yr)])

LU.tab$Year = with(MyBeam4.tst,
                     Year[match(
                       LU.tab$ID,
                          ID.Region.Yr)])

Counties2 = Counties

for(i in 1:length(Yr.lv)){
  
              tmp.lu = LU.tab %>%
                       filter(Year == Yr.lv[i])
              
              Counties2$RE = with(tmp.lu,
                                 Mean[match(
                                    Counties2$NAME,
                                           County)])
              
              Tmp.r = rasterize(Counties2,
                      Domain.r,
                      field = "RE",
                      background = NA)
              
              if(i == 1){Int.stk = Tmp.r}
                else{Int.stk = stack(Int.stk, Tmp.r)}
              }

names(Int.stk) = paste("Year.", Yr.lv, sep="")

Plot Interaction Fields

Rng.stk = range(values(Int.stk), na.rm=T)

rng = seq(Rng.stk[1]-0.02, Rng.stk[2]+0.02, 0.01)

mCols = c("#A50026", "#D73027", "#F46D43", "#FDAE61", "#E0F3F8", "#ABD9E9", "#74ADD1", "#4575B4","#313695") 
#brewer.pal(11, "RdBu")

cr = colorRampPalette(c(rev(mCols)),  
                        bias = 1, space = "rgb")

levelplot(Int.stk, 
          margin = FALSE,
          maxpixels = 1e5, 
          layout=c(5, 7),
          xlab = " ", ylab = NULL, 
          names.attr= Yr.lv,
          col.regions = cr, at = rng,
          colorkey = FALSE,
          par.settings = list(axis.line = list(col = 'transparent'), 
                             strip.background = list(col = 'transparent'), 
                             strip.border = list(col = 'transparent')),
                             par.strip.text = list(cex=1, fontface='bold', col="black"),
                             scales = list(draw=FALSE)) + 
         latticeExtra::layer(sp.polygons(Counties, col = "darkgray", lwd = 0.5)) 

Fit Values

MyBeam4.tst$Fit = Model.T1oni.l2$summary.fitted.values$mean


Counties2 = Counties

for(i in 1:length(Yr.lv)){
  
              tmp.lu = MyBeam4.tst %>%
                       filter(Year == Yr.lv[i])
              
              Counties2$Fit = with(tmp.lu,
                                 Fit[match(
                                    Counties2$NAME,
                                           County)])
              
              Tmp.r = rasterize(Counties2,
                      Domain.r,
                      field = "Fit",
                      background = NA)
              
              if(i == 1){Fit.stk = Tmp.r}
                else{Fit.stk = stack(Fit.stk, Tmp.r)}
              }

names(Fit.stk) = paste("Yr.", Yr.lv, sep="")

Plot Fit Values

Rng.stk = range(values(Fit.stk), na.rm=T)

rng = seq(14, 24, 0.01)

mCols = brewer.pal(9, "YlOrBr")
#brewer.pal(11, "YlOrBr")

cr = colorRampPalette(c(mCols),  
                        bias = 1, space = "rgb")

levelplot(Fit.stk, 
          margin = FALSE,
          maxpixels = 1e5, 
          layout=c(5, 7),
          xlab = " ", ylab = NULL, 
          names.attr= Yr.lv,
          col.regions = cr, at = rng,
          colorkey = list(labels=list(at=c(14, 16, 18, 20, 22, 24),  
                               labels=c("14 mm", "16 mm", "18 mm", "20 mm", "22 mm", "24 mm"), cex=1.5),
                               labels=list(cex=12),
                               space = "right"), 
          par.settings = list(axis.line = list(col = 'transparent'), 
                             strip.background = list(col = 'transparent'), 
                             strip.border = list(col = 'transparent')),
                             par.strip.text = list(cex=1, fontface='bold', col="black"),
                             scales = list(draw=FALSE)) + 
         latticeExtra::layer(sp.polygons(Counties, col = "darkgray", lwd = 0.5)) 

Session Info

sessionInfo()
## R version 3.5.3 (2019-03-11)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17134)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] grid      stats     graphics  grDevices utils     datasets  methods  
## [8] base     
## 
## other attached packages:
##  [1] perturb_2.10         astsa_1.8            lubridate_1.7.4     
##  [4] lettercase_0.13.1    kableExtra_1.1.0     stringr_1.4.0       
##  [7] readr_1.3.1          data.table_1.12.0    tseries_0.10-46     
## [10] dismo_1.1-4          dplyr_0.8.0.1        Rmisc_1.5           
## [13] plyr_1.8.4           xtable_1.8-3         gridExtra_2.3       
## [16] GISTools_0.7-4       ggthemes_4.1.0       spdep_1.0-2         
## [19] sf_0.7-3             spData_0.3.0         mapproj_1.2.6       
## [22] maptools_0.9-5       rgeos_0.4-2          rasterVis_0.45      
## [25] latticeExtra_0.6-28  RColorBrewer_1.1-2   lattice_0.20-38     
## [28] corrplot_0.84        factoextra_1.0.5     ggplot2_3.1.0       
## [31] seas_0.5-2           MASS_7.3-51.1        climdex.pcic_1.1-9.1
## [34] PCICt_0.5-4.1        rnoaa_0.8.4          missMDA_1.14        
## [37] FactoMineR_1.41      fields_9.6           maps_3.3.0          
## [40] spam_2.2-2           dotCall64_1.0-0      raster_2.8-19       
## [43] reshape2_1.4.3       rgdal_1.4-3          INLA_18.07.12       
## [46] sp_1.3-1             Matrix_1.2-17        rgl_0.100.19        
## [49] knitr_1.22          
## 
## loaded via a namespace (and not attached):
##  [1] backports_1.1.3         lazyeval_0.2.2         
##  [3] splines_3.5.3           crosstalk_1.0.0        
##  [5] digest_0.6.18           foreach_1.4.4          
##  [7] htmltools_0.3.6         gdata_2.18.0           
##  [9] magrittr_1.5            cluster_2.0.7-1        
## [11] doParallel_1.0.14       gmodels_2.18.1         
## [13] xts_0.11-2              colorspace_1.4-1       
## [15] rvest_0.3.2             rappdirs_0.3.1         
## [17] ggrepel_0.8.0           hoardr_0.5.2           
## [19] pan_1.6                 xfun_0.5               
## [21] crayon_1.3.4            jsonlite_1.6           
## [23] hexbin_1.27.2           lme4_1.1-21            
## [25] survival_2.43-3         zoo_1.8-5              
## [27] iterators_1.0.10        glue_1.3.1             
## [29] gtable_0.3.0            webshot_0.5.1          
## [31] quantmod_0.4-14         jomo_2.6-7             
## [33] scales_1.0.0            mvtnorm_1.0-10         
## [35] DBI_1.0.0               miniUI_0.1.1.1         
## [37] Rcpp_1.0.1              viridisLite_0.3.0      
## [39] units_0.6-2             flashClust_1.01-2      
## [41] foreign_0.8-71          httr_1.4.0             
## [43] htmlwidgets_1.3         mice_3.4.0             
## [45] pkgconfig_2.0.2         XML_3.98-1.19          
## [47] nnet_7.3-12             deldir_0.1-16          
## [49] crul_0.7.0              labeling_0.3           
## [51] tidyselect_0.2.5        rlang_0.3.2            
## [53] manipulateWidget_0.10.0 later_0.8.0            
## [55] munsell_0.5.0           tools_3.5.3            
## [57] generics_0.0.2          broom_0.5.1            
## [59] evaluate_0.13           yaml_2.2.0             
## [61] purrr_0.3.2             mitml_0.3-7            
## [63] nlme_3.1-137            mime_0.6               
## [65] leaps_3.0               xml2_1.2.0             
## [67] rstudioapi_0.10         compiler_3.5.3         
## [69] curl_3.3                e1071_1.7-1            
## [71] tibble_2.1.1            stringi_1.4.3          
## [73] highr_0.8               classInt_0.3-1         
## [75] nloptr_1.2.1            pillar_1.3.1           
## [77] LearnBayes_2.15.1       httpuv_1.5.0           
## [79] R6_2.4.0                promises_1.0.1         
## [81] codetools_0.2-16        boot_1.3-20            
## [83] gtools_3.8.1            assertthat_0.2.1       
## [85] withr_2.1.2             httpcode_0.2.0         
## [87] hms_0.4.2               expm_0.999-4           
## [89] parallel_3.5.3          quadprog_1.5-5         
## [91] rpart_4.1-13            tidyr_0.8.3            
## [93] coda_0.19-2             class_7.3-15           
## [95] minqa_1.2.4             rmarkdown_1.12         
## [97] TTR_0.23-4              scatterplot3d_0.3-41   
## [99] shiny_1.2.0