1 Data Prep

1.0.1 Load and Prep Observation Data

Michigan Loading raw Michigan data, dropping previously acquired environmental covariates and summing trial successes.

MI.df = read.csv("./Data_102418/mi_data10-24-18.csv",  
                   header = TRUE, stringsAsFactors = FALSE, sep=",") 

MI.df = MI.df %>%
        mutate(Long = Lat, #these appear reversed in original data
               Lat = Lon,
               State = "Michigan", #transect state
               Spp = "Hare",
               OBS = Hare, #overall presence (at any trial)
               Counts = T.1 + T.2 + T.3 + T.4 + T.5 + T.6 + T.7 + T.8 + T.9, #Sum across trials
               Trials = 9, #No NAs in this dataset all have 9 trials
               Data = "Fit",
               Site2 = paste("M", 1:nrow(MI.df), sep=".")) %>%
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp)

dim(MI.df) #Check data
## [1] 125   9
head(MI.df)
##   Site2      Long      Lat    State OBS Counts Trials Data  Spp
## 1   M.1 -88.24388 47.41681 Michigan   1      6      9  Fit Hare
## 2   M.2 -84.58309 45.53829 Michigan   1      6      9  Fit Hare
## 3   M.3 -84.14892 45.46854 Michigan   1      6      9  Fit Hare
## 4   M.4 -90.07851 46.35078 Michigan   1      1      9  Fit Hare
## 5   M.5 -86.56820 46.37911 Michigan   1      1      9  Fit Hare
## 6   M.6 -86.75846 46.25573 Michigan   1      9      9  Fit Hare

Wisconsin Loading data as above, but trials vary by transect, therefore, they are individually summed.

WI.df = read.csv("./Data_102418/wi_data10-24-18.csv",  
                   header = TRUE, stringsAsFactors = FALSE, sep=",")

WI.Trial.df = WI.df[2:21] #pull-out transects

WI.Trial.df[WI.Trial.df >= 1] = 1 #set track counts to 1

WI.Trial.df$Counts = rowSums(WI.Trial.df, na.rm=T) #sum total successes by transect

WI.Trial.df$Trials = rowSums(is.na(WI.df[2:21])==FALSE) #count number of trials

WI.df = WI.df %>% #Basically the same as MI above
        mutate(Long = Lat, 
               Lat = Lon,
               State = "Wisconsin",
               OBS = Hare,
               Spp = "Hare",
               Data = "Fit",
               Counts = WI.Trial.df$Counts, #Sum across trials
               Trials = WI.Trial.df$Trials, #trials vary by transect
               Site2 = paste("W", 1:nrow(WI.df), sep=".")) %>% #Site identifier
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp)

dim(WI.df)
## [1] 195   9
head(WI.df)
##   Site2      Long      Lat     State OBS Counts Trials Data  Spp
## 1   W.1 -90.72816 44.36486 Wisconsin   1      0      8  Fit Hare
## 2   W.2 -90.84052 44.27223 Wisconsin   0      0      6  Fit Hare
## 3   W.3 -90.82058 44.45853 Wisconsin   1      5      8  Fit Hare
## 4   W.4 -92.11831 45.44736 Wisconsin   0      0      6  Fit Hare
## 5   W.5 -91.22783 45.02406 Wisconsin   0      0      6  Fit Hare
## 6   W.6 -90.01462 44.21396 Wisconsin   0      0      8  Fit Hare

Combine & Convert to Points

Fit.df = rbind(MI.df, WI.df)

Fit.pnt = SpatialPointsDataFrame(Fit.df[, c("Long","Lat")], Fit.df) 
proj4string(Fit.pnt) = "+proj=longlat +datum=NAD83 +no_defs +ellps=GRS80 +towgs84=0,0,0"

1.1 Get Domain Boundaries

Defining domain extent, downloading state boundaries, and then creating raster versions for later plotting. Note that the “Lakes” file is used to better identify lake boundaries. A copy of this shapefile is in the folder with this script. The adress to the shapefile will need to be updated in this chunk.

World = map("world", 
            fill = TRUE, 
            plot = FALSE)

IDs = sapply(strsplit(World$names, ":"), function(x) x[1])

LL84 = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

WorldP = map2SpatialPolygons(World, IDs = IDs,
                             proj4string = CRS(projection(LL84)))

#Add a dataframe  
pid = sapply(slot(WorldP, "polygons"), 
             function(x) slot(x, "ID"))

p.df = data.frame( ID=1:length(WorldP), 
                   row.names = pid)

World = SpatialPolygonsDataFrame(WorldP, p.df)
World = spTransform(World, LL84)
World = gBuffer(World, width = 0, byid = F)

#States
States = map("state", 
             fill = TRUE, 
             plot = FALSE)


IDs = sapply(strsplit(States$names, ":"), function(x) x[1])

LL84 = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

StatesP = map2SpatialPolygons(States, IDs = IDs,
                              proj4string = CRS(projection(LL84)))

#Add a dataframe  
pid = sapply(slot(StatesP, "polygons"), 
             function(x) slot(x, "ID"))

p.df = data.frame( ID=1:length(StatesP), 
                   row.names = pid)

States = SpatialPolygonsDataFrame(StatesP, p.df)
States = spTransform(States, LL84)



Lakes = readOGR(dsn = "C:/Users/humph173/Documents/Michigan_State/Marten/ArcWork/Lakes", 
                layer = "Lake_2ks", 
                stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\Michigan_State\Marten\ArcWork\Lakes", layer: "Lake_2ks"
## with 1 features
## It has 15 fields
## Integer64 fields read as strings:  OBJECTID Id area InPoly_FID SimPgnFlag
LakesLL = spTransform(Lakes, proj4string(States))


Ext = c(-93.173415, -81.932841, 41.217132, 47.740649)
Domain = crop(States, Ext)
Domain$Name = str_cap_words(rownames(Domain@data))

Water0 = as(extent(Domain), "SpatialPolygons") 
p.df = data.frame(ID=1:length(Water0))

Water0 = SpatialPolygonsDataFrame(Water0, p.df, match.ID = F)
proj4string(Water0) = proj4string(Domain)
Water1 = gDifference(Water0, spTransform(Lakes, proj4string(Water0)))

DomLLU = gUnaryUnion(Domain)

#Rasterized version  
Ras = raster(res = 0.02, ext = extent(DomLLU),
             crs = proj4string(DomLLU))

Domain.r = rasterize(DomLLU, Ras, 
                     field = 0, 
                     background = NA)

#Point grid version
Grd.pnt = rasterToPoints(Domain.r, spatial = TRUE)

Grd.pnt@data = Grd.pnt@data %>%
  mutate(Long = Grd.pnt@coords[,1],
         Lat = Grd.pnt@coords[,2],
         Spp = "Grid") %>%
  select(-layer)


#Dense for hi-res prediction
Zext = as(raster::extent(-92, -91, 45, 46), "SpatialPolygons")
proj4string(Zext) = proj4string(States)

Z_samp = disaggregate(crop(Domain.r, Zext), fact=6)
zPred.pnts = rasterToPoints(Z_samp, sp=T)

zPred.pnts@data = zPred.pnts@data %>%
  mutate(Long = zPred.pnts@coords[,1],
         Lat = zPred.pnts@coords[,2],
         Spp = "Zoom") %>%
  select(-layer)

Grd.pnt.df = rbind(Grd.pnt@data, zPred.pnts@data)

Grd.pnt = SpatialPointsDataFrame(Grd.pnt.df[,c("Long","Lat")], Grd.pnt.df)
proj4string(Grd.pnt) = proj4string(States)

Quick Plot to Check Data Locations used to fit Model.

rng = seq(0, 255, 1)

mCols  = brewer.pal(11, "RdYlBu")[-6] 
cr0 = rev(colorRampPalette((mCols))(n = 256))
cr = colorRampPalette(c("tan", cr0),  
                      bias = 1, space = "rgb")

MyMatrix = matrix(nrow=7, ncol=2)
rownames(MyMatrix) = rownames(coordinates(Domain))
MyMatrix[,1] = c(-89.07686, -86.11260, -91.91021, -83.82014, -91.31258, -83.46125, -89.51171)
MyMatrix[,2] = c(41.80000, 41.36514, 42.28710, 43.0, 47.5041, 41.61285, 43.63285)

Hare.LLpnt = spTransform(Fit.pnt, proj4string(States))

levelplot(Domain.r, 
          margin = FALSE,
          xlab = NULL, 
          ylab = NULL, 
          maxpixels = 1e5,
          col.regions = cr, at = rng,
          colorkey = FALSE, par.strip.text = list(fontface='bold', cex=1.5),
          par.settings = list(axis.line = list(col = "black"),
                              strip.background = list(col = 'transparent'), 
                              strip.border = list(col = 'transparent')),
          scales = list(cex = 1.25)) + 
  latticeExtra::layer(sp.polygons(Domain, col = "black", lwd = 0.5)) +
  latticeExtra::layer(sp.polygons(LakesLL, fill = "lightblue", col = "transparent", lwd = 0.5)) +
  latticeExtra::layer(sp.polygons(Hare.LLpnt , col = "red", pch=factor(Hare.LLpnt$OBS), cex = 1)) +
  latticeExtra::layer(sp.text(MyMatrix, txt = Domain$Name, 
                              pos =c(1,3,3,2,2,1,1), 
                              col="black",font=list(face="bold"), cex=1)) +
  latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                              offset = c(-83, 45.5),
                                              scale = 2)})

Hare.LLpnt@data

Project Everything to KM

nProj = "+proj=utm +zone=16 +datum=NAD83 +units=km +no_defs +ellps=GRS80 +towgs84=0,0,0"

LakesKM = spTransform(Lakes, nProj)
FocalKM = spTransform(Water1, nProj)
DomP = spTransform(Domain, nProj)
DomPU = gUnaryUnion(DomP)

Fit.pntP = spTransform(Fit.pnt, nProj)

Grd.pntP = spTransform(Grd.pnt, nProj)

2 Construct Mesh

These values are scaled to correspond to the geographic projection (kilometers)

max.edge = 8 #Make the outer edge length 8km
bound.outer = 75 #Outer extension can be 75km

Fit.pntPX = subset(Fit.pntP, Spp == "Hare" & Data == "Fit") #Only training data

bdry = inla.sp2segment(DomPU) #Formatting boundary for r-INLA

mesh = inla.mesh.2d(boundary = bdry, #Boundary
                    loc = Fit.pntPX, #Fit to point locations
                    max.edge = c(1, 5)*max.edge, #mesh size specifications
                    cutoff = 10,#8
                    min.angle = 25,
                    offset = c(max.edge, bound.outer))
mesh$n #number of nodes
## [1] 3792
plot(mesh, lwd=0.5)

3 Validation Data

Wisconsin

WIV.df = read.csv("./WI_HareValid2000-18.csv",  
                   header = TRUE, stringsAsFactors = FALSE, sep=",")

WIV.df = WIV.df %>%
         select(X, Y, HARESA_12_, HARESA_13_,  HARESA_14_)


WIV.df[is.na(WIV.df)] = 0

WIV.df = WIV.df %>%
         mutate(Site2 = paste("WIV_", 1:dim(WIV.df)[1]), 
                Long = X,
                Lat = Y,
                Source = "WI",
                Spp = "Valid",
                State = "Wisconsin",
                Data = "Valid",
                Counts = (HARESA_12_ + HARESA_13_ + HARESA_14_),
                OBS = ifelse(Counts >= 1, 1, 0),
                Trials = 1) %>% 
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp, Source)

dim(WIV.df)
## [1] 51 10
head(WIV.df)
##    Site2     Long      Lat     State OBS Counts Trials  Data   Spp Source
## 1 WIV_ 1 459786.6 640333.1 Wisconsin   0      0      1 Valid Valid     WI
## 2 WIV_ 2 459119.3 623022.5 Wisconsin   0      0      1 Valid Valid     WI
## 3 WIV_ 3 448003.0 645595.7 Wisconsin   1     20      1 Valid Valid     WI
## 4 WIV_ 4 544112.4 409977.9 Wisconsin   1     40      1 Valid Valid     WI
## 5 WIV_ 5 429676.8 692700.4 Wisconsin   1     16      1 Valid Valid     WI
## 6 WIV_ 6 323853.0 603638.8 Wisconsin   1     58      1 Valid Valid     WI
#NAD_1983_HARN_Wisconsin_TM
WIval.nproj = "+proj=tmerc +lat_0=0 +lon_0=-90 +k=0.9996 +x_0=520000 +y_0=-4480000 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs"

WIv.pnt = SpatialPointsDataFrame(WIV.df[, c("Long","Lat")], WIV.df) 
proj4string(WIv.pnt) = WIval.nproj

WIv.pnt = spTransform(WIv.pnt, nProj)

Hare (Sault Tribe)

library("readxl")

files = list.files(path="C:/Users/humph173/Documents/Michigan_State/Eric/Pellets", 
                   pattern="*.xlsx", full.names=T, recursive=FALSE)


for(i in 1:length(files)) {
  tmp.ex = as.data.frame(read_excel(files[i]))[,1:2]
  names(tmp.ex) = c("X", "Y")
  
  if(i == 1){Pellet.df = tmp.ex}
      else{Pellet.df = rbind(Pellet.df, tmp.ex)}
}
## New names:
## * `` -> ...3
## New names:
## * `` -> ...3
STV.df = Pellet.df %>%
               mutate(Site2 = paste("STV_", 1:dim(Pellet.df)[1]), 
                      Long = X,
                      Lat = Y,
                      Source = "ST",
                      Spp = "Valid",
                      State = "Michigan",
                      Data = "Valid",
                      Counts = 1,
                      OBS = 1,
                      Trials = 1) %>% 
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp, Source)

head(STV.df)
##    Site2     Long     Lat    State OBS Counts Trials  Data   Spp Source
## 1 STV_ 1 649079.4 5120467 Michigan   1      1      1 Valid Valid     ST
## 2 STV_ 2 661239.2 5135238 Michigan   1      1      1 Valid Valid     ST
## 3 STV_ 3 661243.7 5135231 Michigan   1      1      1 Valid Valid     ST
## 4 STV_ 4 661246.0 5135232 Michigan   1      1      1 Valid Valid     ST
## 5 STV_ 5 661253.6 5135214 Michigan   1      1      1 Valid Valid     ST
## 6 STV_ 6 661249.1 5135212 Michigan   1      1      1 Valid Valid     ST
dim(STV.df)
## [1] 847  10
MnProj = "+proj=utm +zone=16 +datum=NAD83 +units=m +no_defs +ellps=GRS80 +towgs84=0,0,0"

STV.pnt = SpatialPointsDataFrame(STV.df[, c("Long","Lat")], STV.df) 
proj4string(STV.pnt) = MnProj

STV.pnt = spTransform(STV.pnt, nProj)

Predator Prey Data

PPV.df = read.csv("C:/Users/humph173/Documents/Michigan_State/Marten/Data/PredPrey/Pred_prey_Comb_121218.csv",  
                   header = TRUE, sep=",") 

PPV.df = PPV.df %>%
          mutate(Site2 = paste("PPV_", 1:dim(PPV.df)[1]), 
                      Long = Easting,
                      Lat = Northing,
                      Source = "PP",
                      Spp = "Valid",
                      State = "Michigan",
                      Data = "Valid",
                      Counts = 1,
                      OBS = 1,
                      Trials = 1) %>% 
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp, Source)

head(PPV.df)
##    Site2   Long     Lat    State OBS Counts Trials  Data   Spp Source
## 1 PPV_ 1 399299 5117030 Michigan   1      1      1 Valid Valid     PP
## 2 PPV_ 2 399299 5117030 Michigan   1      1      1 Valid Valid     PP
## 3 PPV_ 3 411292 5117450 Michigan   1      1      1 Valid Valid     PP
## 4 PPV_ 4 400838 5119454 Michigan   1      1      1 Valid Valid     PP
## 5 PPV_ 5 400838 5119454 Michigan   1      1      1 Valid Valid     PP
## 6 PPV_ 6 396858 5127841 Michigan   1      1      1 Valid Valid     PP
dim(PPV.df)
## [1] 25939    10
PPV.pnt = SpatialPointsDataFrame(PPV.df[, c("Long","Lat")], PPV.df) 
proj4string(PPV.pnt) = MnProj

PPV.pnt = spTransform(PPV.pnt, nProj)

Lower Survey (Pellet )

LPS.df = read.csv("./2019_pellet_sites.csv",  
                   header = TRUE, sep=",") 

LPS.df = LPS.df %>%
          mutate(Site2 = paste("PPV_", 1:dim(LPS.df)[1]), 
                      Long = Latitude, #Reversed in data
                      Lat = Longitude,
                      Source = "LPP",
                      Spp = "Valid",
                      State = "Michigan",
                      Data = "Valid",
                      Counts = 1,
                      OBS = CurrentOcc,
                      Trials = 1) %>% 
        filter(plot_ID != "main") %>% #Remove Original Surveys
        select(Site2, Long, Lat, State, OBS, Counts, Trials, Data, Spp, Source)

head(LPS.df)
##    Site2     Long      Lat    State OBS Counts Trials  Data   Spp Source
## 1 PPV_ 1 619035.8 519897.4 Michigan   1      1      1 Valid Valid    LPP
## 2 PPV_ 2 617098.3 528769.7 Michigan   1      1      1 Valid Valid    LPP
## 3 PPV_ 4 620157.4 513569.1 Michigan   1      1      1 Valid Valid    LPP
## 4 PPV_ 5 609769.3 522842.3 Michigan   0      1      1 Valid Valid    LPP
## 5 PPV_ 6 608475.4 518285.8 Michigan   1      1      1 Valid Valid    LPP
## 6 PPV_ 7 613170.1 511702.0 Michigan   0      1      1 Valid Valid    LPP
dim(LPS.df)
## [1] 31 10
#Long & Lat Reversed & not UTM
#NAD_1983_2011_Michigan_GeoRef_Meters
StateGeoref.Proj = "+proj=omerc +lat_0=45.30916666666666 +lonc=-86 +alpha=337.25556 +k=0.9996 +x_0=2546731.496 +y_0=-4354009.816 +no_uoff +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs"

LPS.pnt = SpatialPointsDataFrame(LPS.df[, c("Long","Lat")], LPS.df) 
proj4string(LPS.pnt) =StateGeoref.Proj 

LPS.pnt = spTransform(LPS.pnt, nProj)

Update Coordinates and Combine

Fit.pnt.tab = Fit.pntP@data %>%
              mutate(Source = "Fit",
                     Long = Fit.pntP@coords[,1],
                     Lat = Fit.pntP@coords[,2])

WIv.pnt.tab = WIv.pnt@data %>%
              mutate(Long = WIv.pnt@coords[,1],
                     Lat = WIv.pnt@coords[,2])


PPV.pnt.tab = PPV.pnt@data %>%
              mutate(Long = PPV.pnt@coords[,1],
                     Lat = PPV.pnt@coords[,2])

STV.pnt.tab = STV.pnt@data %>%
              mutate(Long = STV.pnt@coords[,1],
                     Lat = STV.pnt@coords[,2])

LPS.pnt.tab = LPS.pnt@data %>%
              mutate(Long = LPS.pnt@coords[,1],
                     Lat = LPS.pnt@coords[,2])

Combine All Data Join data to common dataframe.

hare.df = rbind(Fit.pnt.tab, WIv.pnt.tab, STV.pnt.tab, PPV.pnt.tab, LPS.pnt.tab)

dim(hare.df)
## [1] 27188    10
hare.df %>%
  group_by(State, Data) %>%
  summarise(Count = length(Data))
## # A tibble: 4 x 3
## # Groups:   State [2]
##   State     Data  Count
##   <chr>     <chr> <int>
## 1 Michigan  Fit     125
## 2 Michigan  Valid 26817
## 3 Wisconsin Fit     195
## 4 Wisconsin Valid    51

Convert to Points

Hare.pnt = SpatialPointsDataFrame(hare.df[, c("Long","Lat")], hare.df) 
proj4string(Hare.pnt) = nProj

Quick Plot to Check Validation Data Locations used for model validation.

rng = seq(0, 255, 1)

mCols  = brewer.pal(11, "RdYlBu")[-6] 
cr0 = rev(colorRampPalette((mCols))(n = 256))
cr = colorRampPalette(c("tan", cr0),  
                      bias = 1, space = "rgb")

MyMatrix = matrix(nrow=7, ncol=2)
rownames(MyMatrix) = rownames(coordinates(Domain))
MyMatrix[,1] = c(-89.07686, -86.11260, -91.91021, -83.82014, -91.31258, -83.46125, -89.51171)
MyMatrix[,2] = c(41.80000, 41.36514, 42.28710, 43.0, 47.5041, 41.61285, 43.63285)

Hare.LLpnt = spTransform(Hare.pnt, proj4string(States))
Hare.LLpnt = subset(Hare.LLpnt, Data == "Valid")

levelplot(Domain.r, 
          margin = FALSE,
          xlab = NULL, 
          ylab = NULL, 
          maxpixels = 1e5,
          col.regions = cr, at = rng,
          colorkey = FALSE, par.strip.text = list(fontface='bold', cex=1.5),
          par.settings = list(axis.line = list(col = "black"),
                              strip.background = list(col = 'transparent'), 
                              strip.border = list(col = 'transparent')),
          scales = list(cex = 1.25)) + 
  latticeExtra::layer(sp.polygons(Domain, col = "black", lwd = 0.5)) +
  latticeExtra::layer(sp.polygons(LakesLL, fill = "lightblue", col = "transparent", lwd = 0.5)) +
  latticeExtra::layer(sp.polygons(Hare.LLpnt , col = "red", pch=factor(Hare.pnt$Data), cex = 1)) +
  latticeExtra::layer(sp.text(MyMatrix, txt = Domain$Name, 
                              pos =c(1,3,3,2,2,1,1), 
                              col="black",font=list(face="bold"), cex=1)) +
  latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                              offset = c(-83, 45.5),
                                              scale = 2)})

Combine Observations, Mesh Nodes, and Grid Points

#Node coordinates
dd = as.data.frame(cbind(mesh$loc[,1], 
                         mesh$loc[,2]))

names(dd) = c("Long", "Lat") #name coordinates
dd$OBS = 0 #no hare at these locations
dd$Site2 = paste("N", 1:nrow(dd), sep = ".") #to match with observation data
dd$State = "All" 
dd$Spp = "Mesh"
dd$Counts = 0
dd$Trials = 0
dd$Source = "Mesh"
dd$Data = "Fit"
#Hare Obs
hare.set = Hare.pnt@data %>%
           mutate(Long = Hare.pnt@coords[,1],
                  Lat = Hare.pnt@coords[,2])

#Grid
grid.set = Grd.pntP@data %>%
           mutate(Long = Grd.pntP@coords[,1],
                  Lat = Grd.pntP@coords[,2],
                  Spp = Spp,
                  OBS = 0,
                  State = "All",
                  Counts = 0,
                  Trials = 0,
                  Data = "grid",
                  Source = "Grid",
                  Site2 = paste("G", 1:nrow(Grd.pntP@data), sep = "."))


All.pnts = rbind(hare.set, dd, grid.set)
All.pnts = SpatialPointsDataFrame(All.pnts[, c("Long","Lat")], All.pnts)
proj4string(All.pnts) = nProj

4 Extract Covariates

Forest1km = raster("C:/Users/humph173/Documents/Michigan_State/Sean/Loop_020819/Forest1km.grd")
mSnow5Yr = raster("./Hare1/Mean5yrSnow.tif")
mxTemp = raster("./Hare1/meanTMAX.tif")

All.pnts$mSnow5yrE = extract(mSnow5Yr, 
                                spTransform(All.pnts,
                                CRS(proj4string(mSnow5Yr))), 
                                method="simple") 

All.pnts$mSnow5yrE[is.na(All.pnts$mSnow5yrE)] = mean(All.pnts$mSnow5yrE, na.rm=T)

All.pnts$mxTempE = extract(mxTemp, 
                                spTransform(All.pnts,
                                CRS(proj4string(mxTemp))), 
                                method="simple") 

All.pnts$mxTempE[is.na(All.pnts$mxTempE)] = mean(All.pnts$mxTempE, na.rm=T)

All.pnts$Forest1kmE = extract(Forest1km, 
                                spTransform(All.pnts,
                                CRS(proj4string(Forest1km))), 
                                method="simple") 

All.pnts$Forest1kmE[is.na(All.pnts$Forest1kmE)] = mean(All.pnts$Forest1kmE, na.rm=T)

All.pnts$Forest1kmE = round(All.pnts$Forest1kmE/100, 1)
 
# 2019 Lower ###################
mSnow5Yr2019 = raster("./Mean5yrlp2019.tif")
mxTemp2019 = raster("./meanTMAXLP2019.tif")

All.pnts$mSnow5yrE2 = extract(mSnow5Yr2019, 
                                spTransform(All.pnts,
                                CRS(proj4string(mSnow5Yr2019))), 
                                method="simple") 

All.pnts$mSnow5yrE2[is.na(All.pnts$mSnow5yrE2)] = mean(All.pnts$mSnow5yrE2, na.rm=T)

All.pnts$mxTempE2 = extract(mxTemp2019, 
                                spTransform(All.pnts,
                                CRS(proj4string(mxTemp2019))), 
                                method="simple") 

All.pnts$mxTempE2[is.na(All.pnts$mxTempE2)] = mean(All.pnts$mxTempE2, na.rm=T)

#Select Variable Year
All.pnts$mSnow5yrE = ifelse(All.pnts$Source == "LPP", All.pnts$mSnow5yrE2, All.pnts$mSnow5yrE)

All.pnts$mxTempE2 = ifelse(All.pnts$Source == "LPP", All.pnts$mxTempE2, All.pnts$mxTempE)

#Clean up frame

All.pnts@data = All.pnts@data %>% select(-c(mSnow5yrE2, mxTempE2))

5 Earth Environment Variables

Land Cover, Full Conncensus verision: http://www.earthenv.org/landcover

EE.files = list.files(path="E:/EarthEnv", 
                      pattern="*.tif", full.names=T, recursive=T)

EE.stack = stack(EE.files)

EE.df = as.data.frame(
                  extract(EE.stack, 
                          spTransform(All.pnts,
                                      proj4string(EE.stack)),
                          method = "simple"))

names(EE.df) = paste("Type", 1:12, sep=".")

EE.dfs = cbind(All.pnts@data %>% select(mSnow5yrE, mxTempE), EE.df) 

for(i in 1:dim(EE.dfs)[2]){
  EE.dfs[,i] = scale(EE.dfs[,i], scale=T, center=T)
}

names(EE.dfs) = paste("s", names(EE.dfs), sep=".")

All.pnts@data = cbind(All.pnts@data, EE.dfs)

#Lables
EE.names = read.csv("E:/EarthEnv/EE_lables.csv",  
                   header = TRUE, sep=",")

EE.names
##    Type                                 NAME
## 1     1 Evergreen/Deciduous Needleleaf Trees
## 2     2            Evergreen Broadleaf Trees
## 3     3            Deciduous Broadleaf Trees
## 4     4                    Mixed/Other Trees
## 5     5                               Shrubs
## 6     6                Herbaceous Vegetation
## 7     7    Cultivated and Managed Vegetation
## 8     8         Regularly Flooded Vegetation
## 9     9                       Urban/Built-up
## 10   10                             Snow/Ice
## 11   11                               Barren
## 12   12                           Open Water

Identify the UP Identify locations from the U.P. based on a well-defined county boundaries

UP = readOGR(dsn = "C:/Users/humph173/Documents/Michigan_State/SLP_Beam_Diam/Counties_v17a", 
                layer = "MI_UP", 
                stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\Michigan_State\SLP_Beam_Diam\Counties_v17a", layer: "MI_UP"
## with 15 features
## It has 15 fields
## Integer64 fields read as strings:  OBJECTID FIPSNUM
XX = readOGR(dsn = "C:/Users/humph173/Documents/Michigan_State/Sean/Hare1/CoordTest", 
                layer = "Test7b", 
                stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\Michigan_State\Sean\Hare1\CoordTest", layer: "Test7b"
## with 35 features
## It has 13 fields
## Integer64 fields read as strings:  FID_1 Site CurrentOcc Site_ID BUFF_DIST
UPp = gUnaryUnion(spTransform(UP, proj4string(All.pnts)))

All.pnts$UP = is.na(over(All.pnts, UPp))

Update State Designation to Include UP Cleaning up labels

All.pnts$StateUP = ifelse(All.pnts$UP == FALSE & All.pnts$Spp == "Hare", "Mich.UP", All.pnts$State)
All.pnts$StateUPW = ifelse(All.pnts$StateUP == "Mich.UP", "Wisconsin", All.pnts$StateUP)
All.pnts$Domain = over(All.pnts, DomPU)

levels(factor(All.pnts$StateUP))
## [1] "All"       "Mich.UP"   "Michigan"  "Wisconsin"
levels(factor(All.pnts$StateUPW))
## [1] "All"       "Michigan"  "Wisconsin"

5.1 Seperate Datasets

Hare.mod = subset(All.pnts, Spp == "Hare") #Observations only

OneTier.mod = subset(All.pnts, Spp == "Hare" | Spp == "Mesh" & is.na(Domain) == FALSE)

Mesh.mod = subset(All.pnts, Spp == "Mesh" & is.na(Domain) == FALSE) #Mesh locations excluding buffer extension

HareMesh.mod = subset(All.pnts, Data == "Fit") #Observations excluding Grid points for prediction/plotting
HareMesh.mod$ID = 1:nrow(HareMesh.mod@data)

Validation.set = subset(All.pnts, Data == "Valid")


Grd.pnts = subset(All.pnts, Spp == "Grid") #Grid loactions for prediction/plotting
zGrd.pnts = subset(All.pnts, Spp == "Zoom") #zoomed locations for prediction/plotting

Add Background Points to Validation Set

Validation.set2 = spRbind(Validation.set, Mesh.mod)

Validation.set2@data %>%
  group_by(OBS) %>%
  summarise(Cnt =length(OBS))
## # A tibble: 2 x 2
##     OBS   Cnt
##   <dbl> <int>
## 1     0  3061
## 2     1 26857
Validation.set2$dups = duplicated(Validation.set2@data[,c("Long","Lat")])
Validation.set2 = subset(Validation.set2, dups == FALSE)

6 Colinearty Check

Index value needs to be below 30. This suggest colineartity will not be an issue.

library(perturb)
## 
## Attaching package: 'perturb'
## The following object is masked from 'package:raster':
## 
##     reclassify
Colin.df = HareMesh.mod@data %>% #select covariates
            select(mSnow5yrE, mxTempE, Forest1kmE, paste("s.Type", 1:12, sep="."))

CorCov = cor(Colin.df) #calculate correlation

corrplot(CorCov) #view correlation table

CI = colldiag(CorCov) #Apply metric
CI
## Condition
## Index    Variance Decomposition Proportions
##            intercept mSnow5yrE mxTempE Forest1kmE s.Type.1 s.Type.2
## 1    1.000 0.000     0.000     0.001   0.000      0.005    0.000   
## 2    1.566 0.000     0.001     0.000   0.000      0.003    0.001   
## 3    1.746 0.071     0.000     0.000   0.000      0.011    0.059   
## 4    2.243 0.019     0.000     0.000   0.000      0.000    0.202   
## 5    2.361 0.007     0.000     0.000   0.000      0.019    0.024   
## 6    2.506 0.008     0.000     0.000   0.000      0.008    0.001   
## 7    2.632 0.001     0.000     0.000   0.000      0.005    0.001   
## 8    2.932 0.008     0.000     0.000   0.000      0.058    0.426   
## 9    3.242 0.008     0.000     0.000   0.000      0.262    0.097   
## 10   4.000 0.513     0.001     0.001   0.000      0.007    0.102   
## 11   5.896 0.025     0.001     0.017   0.000      0.370    0.008   
## 12   7.922 0.042     0.000     0.013   0.000      0.207    0.003   
## 13   8.843 0.067     0.023     0.106   0.003      0.032    0.037   
## 14  22.447 0.046     0.058     0.001   0.960      0.004    0.006   
## 15  31.244 0.185     0.915     0.861   0.036      0.011    0.033   
##    s.Type.3 s.Type.4 s.Type.5 s.Type.6 s.Type.7 s.Type.8 s.Type.9
## 1  0.002    0.002    0.000    0.002    0.002    0.000    0.000   
## 2  0.001    0.000    0.003    0.004    0.004    0.027    0.005   
## 3  0.000    0.000    0.043    0.001    0.000    0.014    0.042   
## 4  0.000    0.000    0.021    0.001    0.001    0.058    0.115   
## 5  0.001    0.000    0.204    0.001    0.000    0.020    0.006   
## 6  0.002    0.000    0.156    0.000    0.000    0.008    0.001   
## 7  0.001    0.000    0.096    0.000    0.000    0.588    0.000   
## 8  0.002    0.001    0.057    0.000    0.000    0.000    0.305   
## 9  0.024    0.006    0.193    0.003    0.001    0.001    0.140   
## 10 0.001    0.001    0.091    0.000    0.003    0.171    0.029   
## 11 0.106    0.115    0.002    0.118    0.016    0.002    0.012   
## 12 0.153    0.086    0.005    0.639    0.070    0.004    0.002   
## 13 0.046    0.253    0.064    0.062    0.122    0.002    0.031   
## 14 0.421    0.365    0.012    0.168    0.191    0.013    0.266   
## 15 0.239    0.171    0.052    0.002    0.591    0.092    0.045   
##    s.Type.10 s.Type.11 s.Type.12
## 1  0.000     0.000     0.000    
## 2  0.000     0.025     0.004    
## 3  0.012     0.009     0.001    
## 4  0.101     0.076     0.000    
## 5  0.430     0.004     0.000    
## 6  0.200     0.415     0.000    
## 7  0.005     0.186     0.000    
## 8  0.005     0.000     0.001    
## 9  0.001     0.001     0.000    
## 10 0.183     0.248     0.004    
## 11 0.014     0.002     0.001    
## 12 0.012     0.000     0.005    
## 13 0.010     0.003     0.006    
## 14 0.009     0.018     0.017    
## 15 0.018     0.012     0.961
#Remove Forest1kmE
Colin.df = HareMesh.mod@data %>% #select covariates
            select(mSnow5yrE, mxTempE, paste("s.Type", 1:12, sep="."))

CorCov = cor(Colin.df) #calculate correlation

corrplot(CorCov) #view correlation table

CI = colldiag(CorCov) #Apply metric
CI
## Condition
## Index    Variance Decomposition Proportions
##            intercept mSnow5yrE mxTempE s.Type.1 s.Type.2 s.Type.3 s.Type.4
## 1    1.000 0.000     0.000     0.001   0.008    0.000    0.005    0.006   
## 2    1.392 0.000     0.002     0.000   0.000    0.001    0.004    0.001   
## 3    1.532 0.074     0.000     0.000   0.013    0.054    0.001    0.000   
## 4    1.996 0.017     0.000     0.000   0.001    0.209    0.000    0.000   
## 5    2.103 0.009     0.000     0.000   0.021    0.017    0.004    0.000   
## 6    2.243 0.008     0.000     0.000   0.008    0.000    0.008    0.000   
## 7    2.349 0.001     0.000     0.000   0.006    0.001    0.002    0.000   
## 8    2.596 0.009     0.000     0.001   0.077    0.388    0.012    0.004   
## 9    2.812 0.005     0.000     0.000   0.198    0.145    0.060    0.011   
## 10   3.608 0.563     0.001     0.001   0.007    0.106    0.006    0.001   
## 11   4.928 0.035     0.001     0.016   0.428    0.011    0.221    0.187   
## 12   6.702 0.056     0.000     0.014   0.194    0.004    0.255    0.122   
## 13   7.452 0.052     0.022     0.103   0.032    0.035    0.140    0.502   
## 14  26.112 0.171     0.974     0.864   0.008    0.029    0.280    0.166   
##    s.Type.5 s.Type.6 s.Type.7 s.Type.8 s.Type.9 s.Type.10 s.Type.11
## 1  0.001    0.005    0.003    0.000    0.000    0.000     0.000    
## 2  0.001    0.003    0.003    0.024    0.005    0.000     0.024    
## 3  0.043    0.001    0.000    0.015    0.051    0.011     0.011    
## 4  0.030    0.001    0.001    0.056    0.157    0.087     0.068    
## 5  0.183    0.001    0.000    0.018    0.004    0.458     0.005    
## 6  0.144    0.000    0.000    0.006    0.001    0.178     0.456    
## 7  0.100    0.000    0.000    0.612    0.000    0.005     0.167    
## 8  0.084    0.000    0.000    0.001    0.328    0.006     0.000    
## 9  0.200    0.004    0.001    0.001    0.215    0.000     0.001    
## 10 0.104    0.000    0.004    0.175    0.047    0.197     0.254    
## 11 0.001    0.110    0.013    0.003    0.017    0.018     0.004    
## 12 0.004    0.816    0.070    0.006    0.005    0.015     0.001    
## 13 0.062    0.057    0.119    0.001    0.049    0.008     0.002    
## 14 0.044    0.001    0.786    0.082    0.120    0.016     0.009    
##    s.Type.12
## 1  0.000    
## 2  0.005    
## 3  0.001    
## 4  0.000    
## 5  0.000    
## 6  0.001    
## 7  0.000    
## 8  0.001    
## 9  0.000    
## 10 0.004    
## 11 0.001    
## 12 0.005    
## 13 0.008    
## 14 0.974

6.1 Spatial Priors and Indicies

Relating observations to mesh locations as a matrix and defining flat spatial priors.

#Relate mesh for detection level
locs = cbind(Hare.mod@coords[,1], Hare.mod@coords[,2]) #point locations

A.det = inla.spde.make.A(mesh, #the mesh
                         alpha = 2, #default setting
                         loc=locs) #our locations



#Relate mesh for covariate level
HR.df = OneTier.mod@data
HR.df$Trials = ifelse(HR.df$Spp == "Mesh", 0, HR.df$Trials) #Zero trials for background points

locs = cbind(HR.df[,"Long"], HR.df[,"Lat"])

A.obs = inla.spde.make.A(mesh, #F
                          alpha = 2,
                          loc=locs)

#These two include covariates
A.tem = inla.spde.make.A(mesh, 
                          alpha = 2,
                          loc=locs,
                          weights = HR.df[,"mxTempE"]) #covariate to model as SVC

A.sno = inla.spde.make.A(mesh, 
                          alpha = 2,
                          loc=locs,
                          weights = HR.df[,"mSnow5yrE"]) #covariate to model as SVC

spde = inla.spde2.pcmatern(mesh, 
                           prior.range = c(1500, 0.5), 
                           prior.sigma = c(5, 0.01))


#Create index to track locations of mesh nodes
field.env = inla.spde.make.index("field.env", spde$n.spde) #index for observations

field.tem = inla.spde.make.index("field.tem", spde$n.spde) #index for Temperature SVC

field.sno = inla.spde.make.index("field.sno", spde$n.spde) #index for snow SVC

7 Model Runs

Organize data

HR.lst = list(field.env, 
               field.tem, 
               field.sno,
               list(intercept1 = rep(1,dim(HR.df)[1]),
                       mxTempE = HR.df[,"s.mxTempE"],
                       mSnow5yrE = HR.df[,"s.mSnow5yrE"],
                       Type.1 = HR.df[,"s.Type.1"],
                       Type.2 = HR.df[,"s.Type.2"],
                       Type.3 = HR.df[,"s.Type.3"],
                       Type.4 = HR.df[,"s.Type.4"],
                       Type.5 = HR.df[,"s.Type.5"],
                       Type.6 = HR.df[,"s.Type.6"],
                       Type.7 = HR.df[,"s.Type.7"],
                       Type.8 = HR.df[,"s.Type.8"],
                       Type.9 = HR.df[,"s.Type.9"],
                       Type.10 = HR.df[,"s.Type.10"],
                       Type.11 = HR.df[,"s.Type.11"],
                       Type.12 = HR.df[,"s.Type.12"]))

Env.stk = inla.stack(data = list(Y = HR.df$OBS, 
             Field.trials = HR.df$Trials), 
                        A = list(A.obs, A.tem, A.sno, 1), #sptial matricies
                  effects = HR.lst,   
                      tag = "env.0")




#Set for non-SVC moel
HR2.lst = list(field.env, 
              # field.tem, 
               #field.sno,
               list(intercept1 = rep(1,dim(HR.df)[1]),
                       mxTempE = HR.df[,"s.mxTempE"],
                       mSnow5yrE = HR.df[,"s.mSnow5yrE"],
                       Type.1 = HR.df[,"s.Type.1"],
                       Type.2 = HR.df[,"s.Type.2"],
                       Type.3 = HR.df[,"s.Type.3"],
                       Type.4 = HR.df[,"s.Type.4"],
                       Type.5 = HR.df[,"s.Type.5"],
                       Type.6 = HR.df[,"s.Type.6"],
                       Type.7 = HR.df[,"s.Type.7"],
                       Type.8 = HR.df[,"s.Type.8"],
                       Type.9 = HR.df[,"s.Type.9"],
                       Type.10 = HR.df[,"s.Type.10"],
                       Type.11 = HR.df[,"s.Type.11"],
                       Type.12 = HR.df[,"s.Type.12"]))

FE.stk = inla.stack(data = list(Y = HR.df$OBS, 
             Field.trials = HR.df$Trials), 
                        A = list(A.obs, 1), #sptial matricies
                  effects = HR2.lst,   
                      tag = "fe.0")

Constraints to prevent spatial field from confounding covariates.

n.data = dim(HR.df)[1]
n.covariates = 6

X = cbind(rep(1,n.data), 
        HR.df$s.mxTempE,
        HR.df$s.mSnow5yrE,
        HR.df$s.Type.1,
        HR.df$s.Type.3,
        HR.df$s.Type.4)

Q = qr.Q(qr(X))


spde = inla.spde2.pcmatern(mesh,
                           prior.range = c(1500, 0.5), 
                           prior.sigma = c(5, 0.01),  
                           extraconstr = list(A = as.matrix(t(Q)%*%A.obs), 
                                              e = rep(0, n.covariates))) 

spde2 = inla.spde2.pcmatern(mesh,
                           prior.range = c(1500, 0.5), 
                           prior.sigma = c(5, 0.01),  
                           extraconstr = list(A = as.matrix(t(Q)%*%A.tem), 
                                              e = rep(0, n.covariates))) 


spde3 = inla.spde2.pcmatern(mesh,
                           prior.range = c(1500, 0.5), 
                           prior.sigma = c(5, 0.01),  
                           extraconstr = list(A = as.matrix(t(Q)%*%A.sno), 
                                              e = rep(0, n.covariates))) 

8 Full Model

Site.prior = list(theta=list(prior = "normal", param=c(0, 3))) #Prior for site effect
Reg.prior = list(theta=list(prior = "normal", param=c(0, 3))) #prior for region

JFrm0 = Y ~ -1 + intercept1 + 
                f(field.env, 
                  model=spde) +
                f(field.tem, 
                  model=spde2) +
                f(field.sno, 
                  model=spde3) +
               mxTempE + mSnow5yrE +
               Type.1 + Type.3 + Type.6 

  
#thetJ = Test.mod$internal.summary.hyperpar$mean
thetaJ = c(7.4867414, -0.4521236, 8.4357741, -1.4313818, 8.4668390, -1.4568577)

SVC.mod = inla(JFrm0, #formula
                   data = inla.stack.data(Env.stk),
                   family = "binomial", 
                   verbose = TRUE, 
                   Ntrials = inla.stack.data(Env.stk)$Field.trials, #number trials 
                   control.fixed = list(prec = 0.001, 
                              prec.intercept = 0.0001), 
                   control.predictor = list(
                                   A = inla.stack.A(Env.stk), 
                             compute = TRUE, 
                                link = 1),
                  control.mode = list(restart = TRUE, theta = thetaJ), 
                  control.inla = list(strategy="gaussian", 
                                       int.strategy = "eb"),
                   control.results = list(return.marginals.random = TRUE,
                                          return.marginals.predictor = TRUE),
                   control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE)) 

#save(list=c("SVC.mod", "Env.stk"), file="./Results_112119/SVC_112119.RData") 
#Previously run
load("./Results_112119/SVC_112119.RData")
summary(SVC.mod)
## 
## Call:
## c("inla(formula = JFrm0, family = \"binomial\", data = inla.stack.data(Env.stk), ",  "    Ntrials = inla.stack.data(Env.stk)$Field.trials, verbose = TRUE, ",  "    control.compute = list(dic = TRUE, cpo = TRUE, waic = TRUE), ",  "    control.predictor = list(A = inla.stack.A(Env.stk), compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), control.results = list(return.marginals.random = TRUE, ",  "        return.marginals.predictor = TRUE), control.fixed = list(prec = 0.001, ",  "        prec.intercept = 1e-04), control.mode = list(restart = TRUE, ",  "        theta = thetaJ))")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##         21.0933        467.1675          1.3006        489.5615 
## 
## Fixed effects:
##               mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## intercept1 -3.7708 0.2025    -4.1684  -3.7708    -3.3735 -3.7708   0
## mxTempE    -0.5678 0.2769    -1.1115  -0.5678    -0.0246 -0.5678   0
## mSnow5yrE   0.7116 0.2946     0.1331   0.7115     1.2895  0.7116   0
## Type.1      0.1468 0.0641     0.0209   0.1468     0.2726  0.1468   0
## Type.3      0.1812 0.1165    -0.0476   0.1812     0.4098  0.1812   0
## Type.6     -0.0098 0.2117    -0.4254  -0.0098     0.4056 -0.0098   0
## 
## Random effects:
## Name   Model
##  field.env   SPDE2 model 
## field.tem   SPDE2 model 
## field.sno   SPDE2 model 
## 
## Model hyperparameters:
##                          mean        sd 0.025quant  0.5quant 0.975quant
## Range for field.env 2658.4249 3.177e+03   358.5679 1707.0276  10733.673
## Stdev for field.env    0.8843 7.856e-01     0.1135    0.6663      2.971
## Range for field.tem 9821.0590 1.853e+04   448.4431 4627.9834  51576.858
## Stdev for field.tem    0.5085 8.784e-01     0.0263    0.2553      2.575
## Range for field.sno 9530.1064 1.540e+04   560.6840 5021.1402  47042.417
## Stdev for field.sno    0.4055 6.213e-01     0.0240    0.2210      1.959
##                          mode
## Range for field.env  851.2918
## Stdev for field.env    0.3172
## Range for field.tem 1132.5221
## Stdev for field.tem    0.0673
## Range for field.sno 1449.3246
## Stdev for field.sno    0.0631
## 
## Expected number of effective parameters(std dev): 13.57(0.00)
## Number of equivalent replicates : 248.39 
## 
## Deviance Information Criterion (DIC) ...............: 453.95
## Deviance Information Criterion (DIC, saturated) ....: 185.49
## Effective number of parameters .....................: 13.86
## 
## Watanabe-Akaike information criterion (WAIC) ...: 447.40
## Effective number of parameters .................: 6.907
## 
## Marginal log-Likelihood:  -258.45 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

8.1 Covariates

Cov.fe = SVC.mod$summary.fixed[,c(1:3,5)]
names(Cov.fe) = c("Mean", "sd", "Q0.025", "Q0.975")

kable(Cov.fe, caption = "Fixed Effects", digits=2) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Fixed Effects
Mean sd Q0.025 Q0.975
intercept1 -3.77 0.20 -4.17 -3.37
mxTempE -0.57 0.28 -1.11 -0.02
mSnow5yrE 0.71 0.29 0.13 1.29
Type.1 0.15 0.06 0.02 0.27
Type.3 0.18 0.12 -0.05 0.41
Type.6 -0.01 0.21 -0.43 0.41

9 Mapped Spatial Predictions

ModResult = SVC.mod
Pred.pntsN = Grd.pnts

#Get RF
pLoc = cbind(Pred.pntsN@coords[,1], Pred.pntsN@coords[,2])
Ap = inla.spde.make.A(mesh, loc=pLoc)
Pred.pntsN$RF = drop(Ap %*% ModResult$summary.random$field.env$mean) 

#Intercept
Fixed.df = ModResult$summary.fixed

Pred.pntsN$Sno = Pred.pntsN$s.mSnow5yrE*drop(Ap %*% ModResult$summary.random$field.sno$mean) #+  Fixed.df[3,1]
Pred.pntsN$Tem = Pred.pntsN$s.mxTempE*drop(Ap %*% ModResult$summary.random$field.tem$mean) #+  Fixed.df[2,1]

Pred.pntsN$SnoEf = drop(Ap %*% ModResult$summary.random$field.sno$mean) +  Fixed.df[3,1]
Pred.pntsN$TemEf = drop(Ap %*% ModResult$summary.random$field.tem$mean) +  Fixed.df[2,1]


Pred.pntsN@data = Pred.pntsN@data %>%
                       mutate(E.LP = RF + Sno + Tem + #Det +
                                     Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1],
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1], 
                              P.LP = Sno + Tem + #Det +
                                     #Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1], 
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1],  
                              Est = plogis(E.LP),
                              Pred = plogis(P.LP))



Test.pred = rasterize(spTransform(
                      Pred.pntsN,
                      CRS(proj4string(Domain.r))), 
                      Domain.r, 
                      "Pred", 
                      background = NA)

Test.est = rasterize(spTransform(
                      Pred.pntsN,
                      CRS(proj4string(Domain.r))), 
                      Domain.r, 
                      "Est", 
                      background = NA)

Test.sno= rasterize(spTransform(
                      Pred.pntsN,
                      CRS(proj4string(Domain.r))), 
                      Domain.r, 
                      "SnoEf", 
                      background = NA)

Test.tem= rasterize(spTransform(
                      Pred.pntsN,
                      CRS(proj4string(Domain.r))), 
                      Domain.r, 
                      "TemEf", 
                      background = NA)

Test.rf = rasterize(spTransform(
                      Pred.pntsN,
                      CRS(proj4string(Domain.r))), 
                      Domain.r, 
                      "RF", 
                      background = NA)

#Test.det = rasterize(spTransform(
#                      Pred.pntsN,
#                      CRS(proj4string(Domain.r))), 
#                      Domain.r, 
#                      "Det", 
#                      background = NA)
rng = seq(0, 1, 0.01)

mCols = brewer.pal(9, "YlOrBr")
cr = colorRampPalette(mCols)(n = 500)

cr = colorRampPalette(cr,  
          bias = 0.75, space = "rgb")

Domain2 = subset(Domain, ID != 21 & ID != 48)

rng = seq(0, 1, 0.01)
mCols  = rev(inferno(10))
#mCols[1] = "lightgray"
cr = colorRampPalette(c(mCols), 
         bias = 0.35)


levelplot(Test.pred, #Trial.comp,
          #layout = c(2,1),
          margin = FALSE,
          xlab = NULL, 
          ylab = NULL, 
          #main = "Occurrence Probabilty",
          #names.attr= c("A", "B", "C"),
          maxpixels = 1e6,
          col.regions = cr, at = rng,
          colorkey = list(labels=list(at=c(0.02, 0.25, 0.50, 0.75, 0.98),  
                                 labels=c("0.00", "0.25", "0.50", "0.75", "1.00"), 
                                 fontface='bold', cex=1.5),
                                 labels=list(cex=18),
                                 space = "bottom"), 
          par.strip.text = list(fontface='bold', cex=1.9),
          par.settings = list(axis.line = list(col = "black"),
                              strip.background = list(col = 'transparent'), 
                              strip.border = list(col = 'transparent')),
          scales = list(cex = 1.25)) + 
  latticeExtra::layer(sp.polygons(Domain2, col = "grey26", fill="lightgray", lwd = 1)) +
  latticeExtra::layer(sp.polygons(LakesLL , fill = "lightblue", alpha = 1, lwd=2, col = "grey29")) +
  latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                              offset = c(-83.5, 46.3),
                                              scale = c(1.9,1.3))})

#Save grids
#writeRaster(Pred.stk2[[3]], "C:/Users/humph173/Documents/Michigan_State/Sean/Present_092219/Prediction_high.tif")

10 Covariates

Cov.stk = stack(Test.sno, Test.tem)
names(Cov.stk) = c("Snow", "Temp")

Domain2 = subset(Domain, ID != 21 & ID != 48)

rng = seq(0.69, 0.73, 0.001)
mCols  = rev(viridis(100))
cr = colorRampPalette(c(mCols), 
         bias = 0.5)

#values(Test.pred) = Scale(values(Test.pred))
levelplot(Cov.stk[[1]], #Trial.comp,
          #layout = c(2,1),
          margin = FALSE,
          xlab = NULL, 
          ylab = NULL, 
          main = "Snow Weeks Effect Size \n(mean = 0.71)",
          #names.attr= c("Snow Weeks (mean = 0.55)"),
          maxpixels = 1e6,
          col.regions = cr, at = rng,
          colorkey = list(labels=list(at=c(seq(0.69, 0.73, 0.01)),  
                                 labels=c(paste(round(seq(0.69, 0.73, 0.01),2))), 
                                 fontface='bold', cex=1.5),
                                 labels=list(cex=18),
                                 space = "bottom"), 
          par.strip.text = list(fontface='bold', cex=1.9),
          par.settings = list(axis.line = list(col = "black"),
                              strip.background = list(col = 'transparent'), 
                              strip.border = list(col = 'transparent')),
          scales = list(cex = 1.25)) + 
  latticeExtra::layer(sp.polygons(Domain2, col = "grey26", fill="lightgray", lwd = 1)) +
  latticeExtra::layer(sp.polygons(LakesLL , fill = "lightblue", alpha = 1, lwd=2, col = "grey29")) +
  latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                              offset = c(-83.5, 46.3),
                                              scale = c(1.9,1.3))})

Cov.stk = stack(Test.sno, Test.tem)
names(Cov.stk) = c("Snow", "Temp")

Domain2 = subset(Domain, ID != 21 & ID != 48)

rng = seq(-0.53, -0.60, -0.001)
mCols  = rev(viridis(100))
cr = colorRampPalette(c(mCols), 
         bias = 0.5)

#values(Test.pred) = Scale(values(Test.pred))
levelplot(Cov.stk[[2]], #Trial.comp,
          #layout = c(2,1),
          margin = FALSE,
          xlab = NULL, 
          ylab = NULL, 
          main = "Temperature Effect Size \n(mean = -0.57)",
          #names.attr= c("Snow Weeks (mean = 0.55)"),
          maxpixels = 1e6,
          col.regions = cr, at = rng,
          colorkey = list(labels=list(at=c(seq(-0.53, -0.60, -0.01)),  
                                 labels=c(paste(seq(-0.53, -0.60, -0.01))), 
                                 fontface='bold', cex=1.5),
                                 labels=list(cex=18),
                                 space = "bottom"), 
          par.strip.text = list(fontface='bold', cex=1.9),
          par.settings = list(axis.line = list(col = "black"),
                              strip.background = list(col = 'transparent'), 
                              strip.border = list(col = 'transparent')),
          scales = list(cex = 1.25)) + 
  latticeExtra::layer(sp.polygons(Domain2, col = "grey26", fill="lightgray", lwd = 1)) +
  latticeExtra::layer(sp.polygons(LakesLL , fill = "lightblue", alpha = 1, lwd=2, col = "grey29")) +
  latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                              offset = c(-83.5, 46.3),
                                              scale = c(1.9,1.3))})

Validation

ModResult = SVC.mod
Pred.pntsN = Validation.set2

#Get RF
pLoc = cbind(Pred.pntsN@coords[,1], Pred.pntsN@coords[,2])
Ap = inla.spde.make.A(mesh, loc=pLoc)
Pred.pntsN$RF = drop(Ap %*% ModResult$summary.random$field.env$mean) 


#Intercept
Fixed.df = ModResult$summary.fixed

Pred.pntsN$Sno = Pred.pntsN$s.mSnow5yrE*drop(Ap %*% ModResult$summary.random$field.sno$mean) #+  Fixed.df[3,1]
Pred.pntsN$Tem = Pred.pntsN$s.mxTempE*drop(Ap %*% ModResult$summary.random$field.tem$mean) #+  Fixed.df[2,1]


Pred.pntsN@data = Pred.pntsN@data %>%
                       mutate(E.LP = RF + Sno + Tem + #Det +
                                     Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1],
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1], 
                              P.LP = Sno + Tem + #Det +
                                     #Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1], 
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1],  
                              Est = plogis(E.LP),
                              Pred = plogis(P.LP))
Reg2.PA = as.data.frame(cbind(1:dim(Pred.pntsN@data)[1], Pred.pntsN$OBS, Pred.pntsN$Pred))
names(Reg2.PA) = c("ID", "OBS", "Pred")

Thresh = optimal.thresholds(Reg2.PA, opt.methods = c("MaxSens+Spec", "PredPrev=Obs"))
Thresh
##         Method Pred
## 1 MaxSens+Spec 0.63
## 2 PredPrev=Obs 0.76
Reg2.PA.out1 = presence.absence.accuracy(Reg2.PA, threshold = Thresh[1,2])
Reg2.PA.out2 = presence.absence.accuracy(Reg2.PA, threshold = 0.5)
Reg2.PA.out = rbind(Reg2.PA.out1, Reg2.PA.out2)

#Calculate TSS
Reg2.PA.out$TSS = Reg2.PA.out$sensitivity + Reg2.PA.out$specificity - 1

Reg2.PA.out
##   model threshold       PCC sensitivity specificity     Kappa       AUC
## 1  Pred      0.63 0.8262784   0.9853801    0.772950 0.6214246 0.8521222
## 2  Pred      0.50 0.7678003   0.9980507    0.690624 0.5272494 0.8521222
##        PCC.sd sensitivity.sd specificity.sd   Kappa.sd      AUC.sd
## 1 0.005927081    0.003748970    0.007573136 0.01202777 0.005880771
## 2 0.006605501    0.001377703    0.008356096 0.01186339 0.005880771
##         TSS
## 1 0.7583301
## 2 0.6886747
Validation.tab = Reg2.PA.out %>% select(threshold, PCC, sensitivity, specificity, AUC, TSS)

kable(Validation.tab, caption = "Fixed Effects", digits=2) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Fixed Effects
threshold PCC sensitivity specificity AUC TSS
0.63 0.83 0.99 0.77 0.85 0.76
0.50 0.77 1.00 0.69 0.85 0.69

11 Comparison Models

JFrm3 = Y ~ -1 + intercept1 + 
                f(field.env, 
                  model=spde) +
                mxTempE + mSnow5yrE +
                Type.1 + Type.3 + Type.6   

  
#thetJ = Fixed.mod$internal.summary.hyperpar$mean
thetaJ = c(7.4867414, -0.4521236, 8.4357741, -1.4313818, 8.4668390, -1.4568577)

Fixed.mod = inla(JFrm3, #formula
                   data = inla.stack.data(FE.stk),
                   family = "binomial", 
                   verbose = TRUE, 
                   Ntrials = inla.stack.data(FE.stk)$Field.trials, #number trials 
                   control.fixed = list(prec = 0.001, 
                              prec.intercept = 0.0001), 
                   control.predictor = list(
                                   A = inla.stack.A(FE.stk), 
                             compute = TRUE, 
                                link = 1),
                  #control.mode = list(restart = TRUE, theta = thetaJ), 
                  control.inla = list(strategy="gaussian", 
                                       int.strategy = "eb"),
                   control.results = list(return.marginals.random = TRUE,
                                          return.marginals.predictor = TRUE),
                   control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE)) 

#save(list=c("Fixed.mod", "FE.stk"), file="./Results_112119/FE_112119.RData") 
load("./Results_112119/FE_112119.RData")
summary(Fixed.mod)
## 
## Call:
## c("inla(formula = JFrm3, family = \"binomial\", data = inla.stack.data(FE.stk), ",  "    Ntrials = inla.stack.data(FE.stk)$Field.trials, verbose = TRUE, ",  "    control.compute = list(dic = TRUE, cpo = TRUE, waic = TRUE), ",  "    control.predictor = list(A = inla.stack.A(FE.stk), compute = TRUE, ",  "        link = 1), control.inla = list(strategy = \"gaussian\", ",  "        int.strategy = \"eb\"), control.results = list(return.marginals.random = TRUE, ",  "        return.marginals.predictor = TRUE), control.fixed = list(prec = 0.001, ",  "        prec.intercept = 1e-04))")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          7.6416         30.2166          0.4797         38.3379 
## 
## Fixed effects:
##               mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## intercept1 -3.7314 0.1906    -4.1055  -3.7314    -3.3576 -3.7314   0
## mxTempE    -0.5150 0.2674    -1.0400  -0.5150     0.0096 -0.5150   0
## mSnow5yrE   0.7021 0.2882     0.1363   0.7020     1.2674  0.7021   0
## Type.1      0.1472 0.0636     0.0223   0.1472     0.2720  0.1472   0
## Type.3      0.1760 0.1150    -0.0498   0.1760     0.4015  0.1760   0
## Type.6     -0.0067 0.2109    -0.4208  -0.0068     0.4069 -0.0067   0
## 
## Random effects:
## Name   Model
##  field.env   SPDE2 model 
## 
## Model hyperparameters:
##                         mean       sd 0.025quant 0.5quant 0.975quant
## Range for field.env 1666.014 1375.481   354.8601 1275.569   5318.937
## Stdev for field.env    1.463    1.014     0.3741    1.196      4.116
##                         mode
## Range for field.env 799.9178
## Stdev for field.env   0.8235
## 
## Expected number of effective parameters(std dev): 11.04(0.00)
## Number of equivalent replicates : 305.29 
## 
## Deviance Information Criterion (DIC) ...............: 452.87
## Deviance Information Criterion (DIC, saturated) ....: 209.82
## Effective number of parameters .....................: 11.24
## 
## Watanabe-Akaike information criterion (WAIC) ...: 447.61
## Effective number of parameters .................: 5.691
## 
## Marginal log-Likelihood:  -255.62 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed

11.1 Covariates

Cov.fe = Fixed.mod$summary.fixed[,c(1:3,5)]
names(Cov.fe) = c("Mean", "sd", "Q0.025", "Q0.975")

kable(Cov.fe, caption = "Fixed Effects", digits=2) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Fixed Effects
Mean sd Q0.025 Q0.975
intercept1 -3.73 0.19 -4.11 -3.36
mxTempE -0.51 0.27 -1.04 0.01
mSnow5yrE 0.70 0.29 0.14 1.27
Type.1 0.15 0.06 0.02 0.27
Type.3 0.18 0.11 -0.05 0.40
Type.6 -0.01 0.21 -0.42 0.41

Compare

SVC.mod$dic$dic
## [1] 453.946
Fixed.mod$dic$dic
## [1] 452.8703
SVC.mod$waic$waic
## [1] 447.4024
Fixed.mod$waic$waic
## [1] 447.6102

Validation

ModResult = Fixed.mod
Pred.pntsN = Validation.set2

#Get RF
pLoc = cbind(Pred.pntsN@coords[,1], Pred.pntsN@coords[,2])
Ap = inla.spde.make.A(mesh, loc=pLoc)
Pred.pntsN$RF = drop(Ap %*% ModResult$summary.random$field.env$mean) 



#Pred.pntsN$Det = drop(Ap %*% ModResult$summary.random$field.det$mean)

#Site.df = ModResult$summary.random$Site

#Pred.pntsN$Site.eff = with(Site.df,
#                        mean[match(Pred.pntsN$Site2, 
#                                              ID)])

#Pred.pntsN$Tsvc = drop(Ap %*% ModResult$summary.random$field.tem$mean)

#Intercept
Fixed.df = ModResult$summary.fixed

#Pred.pntsN$Sno = Pred.pntsN$s.mSnow5yrE*drop(Ap %*% ModResult$summary.random$field.sno$mean) #+  Fixed.df[3,1]
#Pred.pntsN$Tem = Pred.pntsN$s.mxTempE*drop(Ap %*% ModResult$summary.random$field.tem$mean) #+  Fixed.df[2,1]


Pred.pntsN@data = Pred.pntsN@data %>%
                       mutate(E.LP = RF + 
                                     Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1],
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1], 
                              P.LP = #Fixed.df[1,1] +
                                     #Fixed.df[2,1] +
                                     s.mxTempE*Fixed.df[2,1] +
                                     s.mSnow5yrE*Fixed.df[3,1] +
                                     s.Type.1*Fixed.df[4,1] +
                                     #Type.2*Fixed.df[5,1] +
                                     s.Type.3*Fixed.df[5,1], 
                                     #s.Type.4*Fixed.df[6,1],
                                     #Type.5*Fixed.df[8,1] +
                                     #Type.6*Fixed.df[7,1] +
                                     #Type.7*Fixed.df[10,1] +
                                     #Type.8*Fixed.df[8,1] +
                                     #Type.9*Fixed.df[7,1] +
                                     #Type.10*Fixed.df[13,1] +
                                     #Type.11*Fixed.df[14,1] +
                                     #Type.12*Fixed.df[8,1],  
                              Est = plogis(E.LP),
                              Pred = plogis(P.LP))
Reg2.PA = as.data.frame(cbind(1:dim(Pred.pntsN@data)[1], Pred.pntsN$OBS, Pred.pntsN$Pred))
names(Reg2.PA) = c("ID", "OBS", "Pred")

Thresh = optimal.thresholds(Reg2.PA, opt.methods = c("MaxSens+Spec", "PredPrev=Obs"))
Thresh
##         Method Pred
## 1 MaxSens+Spec 0.65
## 2 PredPrev=Obs 0.75
Reg2.PA.out1 = presence.absence.accuracy(Reg2.PA, threshold = Thresh[1,2])
Reg2.PA.out2 = presence.absence.accuracy(Reg2.PA, threshold = 0.5)
Reg2.PA.out = rbind(Reg2.PA.out1, Reg2.PA.out2)

#Calculate TSS
Reg2.PA.out$TSS = Reg2.PA.out$sensitivity + Reg2.PA.out$specificity - 1

Reg2.PA.out
##   model threshold       PCC sensitivity specificity     Kappa       AUC
## 1  Pred      0.65 0.8326401   0.9668616   0.7876511 0.6289838 0.8539137
## 2  Pred      0.50 0.7678003   0.9980507   0.6906240 0.5272494 0.8539137
##        PCC.sd sensitivity.sd specificity.sd   Kappa.sd      AUC.sd
## 1 0.005839897    0.005590955    0.007393179 0.01217976 0.005833895
## 2 0.006605501    0.001377703    0.008356096 0.01186339 0.005833895
##         TSS
## 1 0.7545127
## 2 0.6886747
Validation.tab = Reg2.PA.out %>% select(threshold, PCC, sensitivity, specificity, AUC, TSS)

kable(Validation.tab, caption = "Non SVC", digits=2) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Non SVC
threshold PCC sensitivity specificity AUC TSS
0.65 0.83 0.97 0.79 0.85 0.75
0.50 0.77 1.00 0.69 0.85 0.69