Manuscript: The spatial and temporal relationship of blue-winged teal (Anas discors) to avian inuenza H5 and H7 hemagglutinin subtypes across North America

1 Load and Prep Data

1.1 Telemetry Data

Filtering “dead” locations.
Selecting “plausible” records from the DAF_Filter.
Formating date, adding local time (America/Chicago) and getting Hour, Day, Week, season.
Assigning seasons based on date.

Argos_sp = read.csv("C:/Users/humph173/Documents/LabWork/Patuxent/Douglas_080218/BWTeal_2013-2017_Ramey_argos_tabular_diag_gis_filtered.csv",
                    stringsAsFactors=FALSE,
                    header = TRUE, sep=",")[,1:35] %>% 
                    filter(Fate == "alive",
                           DAF_Filter == 0) %>%
                    mutate(ID = Animal_ID,
                           ObsDate = as.POSIXct(Location_Datetime_GMT, 
                                                tz = "GMT", 
                                                format = "%Y/%m/%d %H:%M:%S"),
                           ObsDateD = as.Date(Location_Datetime_GMT),
                           LocT = ymd_hms(format(ObsDate, tz="America/Chicago", usetz=TRUE)),
                           LocHR = hour(LocT),
                           LocDY = day(LocT),
                           LocWK = week(LocT),
                           LocMN = month(LocT))

Argos_sp$Season = ifelse(Argos_sp$LocMN == 12 | Argos_sp$LocMN == 1, "Winter",
                      ifelse(Argos_sp$LocMN == 3, "Spring",
                             ifelse(Argos_sp$LocMN >= 4 & Argos_sp$LocMN <= 8, "Breeding",
                                    ifelse(Argos_sp$LocMN >= 9 & Argos_sp$LocMN <= 11, "Fall",
                                           ifelse(Argos_sp$LocMN == 2 & Argos_sp$Day <= 15, "Winter",
                                                  ifelse(Argos_sp$LocMN == 2 & Argos_sp$Day >= 16, "Spring",
                                                         NA))))))

1.2 Boundaries of North America

Jurisdictional boundaries for North America. Loading shapefile and creating raster and point version for later mapping.

NAmer.reg = readOGR(dsn = "./Arc/NAmer/States", 
                  layer = "NA_regions", 
                  stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\NAmer\States", layer: "NA_regions"
## with 127 features
## It has 36 fields
## Integer64 fields read as strings:  FID_NA_reg FID_Cuba FID_ FID_NA_r_1 FID_0 MEXST0_ MEXST0_ID TOTPOP MALE FEMALE FID_NA_sta FID_Bahama OBJECTID TOTPOP_CY
LL84 = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

NAmer = gUnaryUnion(NAmer.reg)

#Great Lakes Boundaries
Lakes = readOGR(dsn = "./Arc/Lakes", 
                layer = "Lakes", 
                stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\Lakes", layer: "Lakes"
## with 1 features
## It has 11 fields
## Integer64 fields read as strings:  OBJECTID gid water sqmi sqkm
Lakes = spTransform(Lakes, proj4string(NAmer.reg))


tmp.ras = raster(res=0.2) #raster verion
extent(tmp.ras) = extent(NAmer)

Domain.r = rasterize(NAmer,
                     tmp.ras,
                     field = 0,
                     background = NA)

Grd.pnt = rasterToPoints(Domain.r, sp = TRUE)

Grd.pnt@data = Grd.pnt@data %>%
               mutate(Long = Grd.pnt@coords[,1],
                      Lat = Grd.pnt@coords[,2]) %>%
               select(-layer)

GLakes = readOGR(dsn = "./Arc/RedRast/GLake", 
                  layer = "GLakes", 
                  stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\RedRast\GLake", layer: "GLakes"
## with 5 features
## It has 6 fields
## Integer64 fields read as strings:  OBJECTID_1 ObjectID

Convert to Points
Converting telemetry locations to spatial points.

Locs = cbind(Argos_sp$Longitude, Argos_sp$Latitude)

Sp.pnts = SpatialPointsDataFrame(Locs, Argos_sp)
proj4string(Sp.pnts) = proj4string(NAmer.reg)

2 Deployment Locations

RECORDS RETAINED FOR ANALYSIS:
PTT transmitter identifier (Tag)
Date of deployment (Deployment)
Last transmission date (Last)
Bird mortality type (Mortality)
Duration of record (Duration, days)
Median sampling interval between locations (Median, minutes)
Total locations (Sample)
Coordinates (Latitude, Longitude)
Geographic region of deployment (Region)

Deploy = read.csv("C:/Users/humph173/Documents/LabWork/Patuxent/Patux_level1/StopOver_out/Paper/Tags2.csv",
                    stringsAsFactors=FALSE,
                    header = TRUE, sep=",")[,1:10]

Deploy.pnts = SpatialPointsDataFrame(cbind(Deploy$deploy_on_longitude, Deploy$deploy_on_latitude), Deploy)
proj4string(Deploy.pnts) = proj4string(NAmer.reg)

plot(NAmer.reg)
plot(Deploy.pnts, add=T, col="red")

Deploy$Region = over(Deploy.pnts, NAmer.reg)[,"NAME"]

Deploy = Deploy %>% select(-Animal_ID)

names(Deploy) = c("Tag", "Deployment", "Last", "Mortality", "Duration", "Median", "Sample", "Latitude", "Longitude", "Region")

kable(Deploy, 
      caption = "Tag Deployment Table ") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Tag Deployment Table
Tag Deployment Last Mortality Duration Median Sample Latitude Longitude Region
131009 8/13/2013 10/26/2013 Hunting 74 29 558 52.04454 -107.0976 Saskatchewan
131010 8/13/2013 3/26/2014 Unknown 225 40 1142 52.04454 -107.0976 Saskatchewan
131011 8/14/2013 4/27/2015 Unknown 620 48 2730 50.48194 -112.1186 Alberta
131012 8/14/2013 11/5/2013 Unknown 83 41 464 50.48194 -112.1186 Alberta
131013 8/14/2013 10/24/2013 Hunting 71 37 453 52.04454 -107.0976 Saskatchewan
131014 8/11/2013 10/24/2013 Unknown 73 31 523 52.04454 -107.0976 Saskatchewan
131015 8/12/2013 4/8/2014 Unknown 239 43 1149 52.04454 -107.0976 Saskatchewan
131016 8/12/2013 1/25/2014 Unknown 165 42 803 52.04454 -107.0976 Saskatchewan
131017 8/12/2013 10/21/2013 Unknown 70 32 516 52.04454 -107.0976 Saskatchewan
131018 8/12/2013 10/17/2013 Unknown 66 32 468 52.04454 -107.0976 Saskatchewan
131019 8/15/2013 11/29/2013 Unknown 106 42 593 50.48194 -112.1186 Alberta
131020 8/15/2013 10/17/2013 Unknown 63 39 408 50.48194 -112.1186 Alberta
131009 3/17/2015 10/25/2015 Unknown 222 46 1087 30.00790 -94.1425 Texas
131013 3/19/2015 9/26/2015 Unknown 191 47 898 29.61340 -94.5342 Texas
135841 3/17/2015 12/9/2015 Hunting 267 49 1165 30.00790 -94.1425 Texas
135842 3/17/2015 11/12/2015 Unknown 240 57 816 30.00790 -94.1425 Texas
135843 3/17/2015 10/31/2015 Unknown 228 47 1055 30.00790 -94.1425 Texas
135844 3/17/2015 5/28/2015 Unknown 72 49 335 30.00790 -94.1425 Texas
135845 3/18/2015 4/28/2015 Unknown 41 52 181 29.67090 -94.4405 Texas
135846 3/18/2015 10/2/2015 Hunting 198 43 1061 29.67090 -94.4405 Texas
135847 3/18/2015 8/17/2015 Unknown 152 56 565 29.67090 -94.4405 Texas
135848 3/18/2015 10/27/2015 Unknown 223 46 1051 29.67090 -94.4405 Texas
135849 3/18/2015 9/27/2016 Unknown 559 53 2129 29.67090 -94.4405 Texas
135850 3/19/2015 3/31/2015 Unknown 12 53 57 29.61340 -94.5342 Texas
135851 3/19/2015 5/11/2015 Unknown 53 54 205 29.61340 -94.5342 Texas
135852 3/19/2015 9/30/2016 Unknown 561 56 1918 29.61340 -94.5342 Texas
135853 3/19/2015 5/5/2015 Unknown 47 53 211 29.61340 -94.5342 Texas
135854 3/19/2015 10/18/2015 Unknown 213 48 938 29.61340 -94.5342 Texas
135855 3/22/2015 10/21/2015 Unknown 213 48 955 30.55380 -91.8466 Louisiana
135856 3/22/2015 7/12/2015 Unknown 112 48 532 30.55380 -91.8466 Louisiana
135857 3/22/2015 7/22/2015 Unknown 122 51 519 30.55380 -91.8466 Louisiana
135858 3/22/2015 5/8/2015 Unknown 47 47 226 30.55380 -91.8466 Louisiana
135859 3/22/2015 7/17/2015 Unknown 117 46 569 30.55380 -91.8466 Louisiana
135860 3/22/2015 12/23/2016 Unknown 642 50 2647 30.55380 -91.8466 Louisiana
135861 3/22/2015 4/13/2016 Unknown 388 50 1338 30.55380 -91.8466 Louisiana
135862 3/22/2015 8/14/2015 Unknown 145 55 528 30.55380 -91.8466 Louisiana
135863 3/22/2015 4/17/2015 Unknown 26 50 125 30.55380 -91.8466 Louisiana
135864 3/21/2015 10/28/2015 Hunting 221 48 1019 30.46420 -91.5718 Louisiana
135865 3/21/2015 4/7/2015 Unknown 17 51 89 30.46420 -91.5718 Louisiana
135866 3/21/2015 5/31/2015 Unknown 71 53 290 30.46420 -91.5718 Louisiana
135867 3/21/2015 11/23/2015 Unknown 247 47 1141 30.46420 -91.5718 Louisiana
135868 3/21/2015 4/7/2015 Unknown 17 49 84 30.46420 -91.5718 Louisiana
#SelMod.fx = xtable(Deploy) #table for manuscript
#print(SelMod.fx, include.rownames = F)

3 Grid Residence Time

Estimated residence time serves as inital input values for later space-time modeling.

After estimating residence time for the full dataset, stopover locations are identified for the spring and fall migrations.

3.1 Annual (Full data set)

Estimating duration of time (hours) spent in each grid cell. Individual birds are returned as layers in a raster stack. Birds are then combined to get the maximum duration any one bird spent in each cell. Time spent in grid cells lacking satellite fixes are interpolated assuming uniform movement between successful satellite captures.

Animal.IDs = levels(as.factor(Sp.pnts$Animal_ID))

for(i in 1:length(Animal.IDs)){
  
    Ani.tmp = subset(Sp.pnts, Animal_ID == Animal.IDs[i])
    
    tmp.res = ResidenceRaster(Ani.tmp, Domain.r)
    
    if(i == 1){Res.stk = tmp.res}
      else{Res.stk = stack(Res.stk, tmp.res)}
    
}

names(Res.stk) = Animal.IDs

All.brds.res.stk = Res.stk

ZZ = max(All.brds.res.stk)

View residence time tesults (full dataset)

rng = seq(0, 81, 1)
mCols  = brewer.pal(9, "YlOrRd")[-c(1:2)]
cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(cr0), 
         bias = 4)

#Removing background zeros for plotting
ZZ2 = ZZ
ZZ2[ZZ2 == 0] = NA


Plt.a = levelplot(ZZ2, 
             margin = FALSE,
             xlab =NULL, 
             ylab = NULL,
             main = "Residence Time (days)",
             maxpixels = 1e5,
             col.regions = cr, at =rng,
             colorkey = list(labels=list(at=c(0, 20, 40, 60, 80, 103),  
                             labels=c("0", "20", "40", "60", "80", "100 Days"), cex=1),
                             labels=list(cex=12),
                             space = "bottom"), 
                             par.strip.text = list(fontface='bold', cex=1),
             par.settings = list(axis.line = list(col = "black"),
                                 strip.background = list(col = 'transparent'), 
                                 strip.border = list(col = 'transparent')),
                                 scales = list(cex = 1)) + 
             latticeExtra::layer(sp.polygons(NAmer.reg, col = "darkgray", lwd = 0.5)) 

Plt.a

3.2 Migration Period Stopovers

Residence times during migration periods greater than 1 Day are assumed stopovers.

Spring migration.

Animal.IDs = levels(as.factor(Sp.pnts$Animal_ID))

for(i in 1:length(Animal.IDs)){
  
    Ani.tmp = subset(Sp.pnts, Animal_ID == Animal.IDs[i])
    Ani.tmp = subset(Ani.tmp, LocMN >= 2 & LocMN < 6)
    
    if(dim(Ani.tmp@data)[1] < 2){tmp.res = Domain.r}
        else{
    
          tmp.res = ResidenceRaster(Ani.tmp, Domain.r)}
          
          if(i == 1){Res.stkS = tmp.res}
            else{Res.stkS = stack(Res.stkS, tmp.res)}
    }
    

Spring.res = Res.stkS

names(Spring.res) = Animal.IDs

nStrata = dim(Spring.res)[3]

for(i in 1:nStrata){
  
  f.rast = Spring.res[[i]]
  f.rast[f.rast < 1] = NA
  
  if(sum(values(f.rast), na.rm=T) == 0){
    
      tmp.strat = as.data.frame(cbind(0,0,0))
      names(tmp.strat) = c("Long", "Lat", "Duration")
      tmp.strat$Bird = Animal.IDs[i]
  }
    else{
  
        tmp.strat = as.data.frame(rasterToPoints(f.rast, sp=F))
        
        names(tmp.strat) = c("Long", "Lat", "Duration")
        
        tmp.strat$Bird = Animal.IDs[i] }
        
      if(i == 1){stpovr.pnts = tmp.strat}
          else{stpovr.pnts = rbind(stpovr.pnts, tmp.strat)}
        }

Spr.StpOvr.pnts = stpovr.pnts %>% filter(Lat >= 31 & Lat <=45)
Spr.StpOvr.pnts$Season = "Spring"
Spr.StpOvr.pnts = subset(Spr.StpOvr.pnts, Lat != 0)

Fall migration.

Animal.IDs = levels(as.factor(Sp.pnts$Animal_ID))

for(i in 1:length(Animal.IDs)){
  
    Ani.tmp = subset(Sp.pnts, Animal_ID == Animal.IDs[i])
    Ani.tmp = subset(Ani.tmp, LocMN >= 9 & LocMN < 12)
    
    if(dim(Ani.tmp@data)[1] < 2){tmp.res = Domain.r}
        else{
    
          tmp.res = ResidenceRaster(Ani.tmp, Domain.r)}
          
          if(i == 1){Res.stkF = tmp.res}
            else{Res.stkF = stack(Res.stkF, tmp.res)}
    
}

Fall.res = Res.stkF

names(Fall.res) = Animal.IDs

nStrata = dim(Fall.res)[3]

for(i in 1:nStrata){
  
  f.rast = Fall.res[[i]]
  f.rast[f.rast < 1] = NA
  
  if(sum(values(f.rast), na.rm=T) == 0){
    
      tmp.strat = as.data.frame(cbind(0,0,0))
      names(tmp.strat) = c("Long", "Lat", "Duration")
      tmp.strat$Bird = Animal.IDs[i]
  }
    else{
  
        tmp.strat = as.data.frame(rasterToPoints(f.rast, sp=F))
        
        names(tmp.strat) = c("Long", "Lat", "Duration")
        
        tmp.strat$Bird = Animal.IDs[i] }
        
      if(i == 1){stpovr.pnts = tmp.strat}
          else{stpovr.pnts = rbind(stpovr.pnts, tmp.strat)}
        }

Fall.StpOvr.pnts = stpovr.pnts %>% filter(Lat >= 31 & Lat <=45)
Fall.StpOvr.pnts$Season = "Fall"
Fall.StpOvr.pnts = subset(Fall.StpOvr.pnts, Lat != 0)

#Combine
SprFall.stpover.pnt = rbind(Spr.StpOvr.pnts, Fall.StpOvr.pnts)

SprFall.stpover.pnts = SpatialPointsDataFrame(SprFall.stpover.pnt[,c("Long","Lat")], SprFall.stpover.pnt)
proj4string(SprFall.stpover.pnts) = proj4string(NAmer)

View StopOver Locations
Residence times during migration periods greater than 1 Day are assumed stopovers.

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
tmp.df = SprFall.stpover.pnt 

tmp.df$Duration = ifelse(tmp.df$Duration >= 3, 3, tmp.df$Duration)
#tmp.df$Size[tmp.df$Size==0] = 0.1

myCol = c("black", "red", "green3", "blue", "cyan", "magenta", "orange", "brown")

names(myCol) = levels(factor(tmp.df$AI))

myShapes = c(3:4)

names(myShapes) = levels(factor(tmp.df$Season))


colScale = scale_colour_manual(name = "Strain", values = myCol, 
                               labels = c("Fall", "Spring"))

ShpScale = scale_shape_manual(name = "Season", values = myShapes,
                              labels = c("Fall", "Spring"))

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_point(data=tmp.df, 
                   aes(Long, Lat, 
                       group=Season, fill=NULL,
                       shape = Season,
                       color = Duration)) +
        scale_colour_gradient(low = "blue", high = "red") +
        ShpScale +
        #colScale + 
        xlab("Longitude") +
        ylab("Latitude") +
        ggtitle("Stopover Locations") +
        scale_x_longitude(xmin=-110, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              panel.border = element_blank(),
              legend.title = element_text(size=16, face="bold"),
              axis.title.x = element_text(size=22, face="bold"),
              axis.title.y = element_text(size=22, face="bold"),
              plot.title = element_text(size=24, face="bold", hjust = 0.5))

Stopovers By Bird

Bird.StpOvr = as.data.frame(
                    SprFall.stpover.pnt %>%
                        group_by(Bird, Season) %>%
                        summarize(Stopovers = length(Duration),
                                  MinDuration = min(Duration),
                                  MeanDuration = mean(Duration),
                                  MaxDuration = max(Duration)))


Sp.set = Bird.StpOvr %>% filter(Season == "Spring")
Fl.set = Bird.StpOvr %>% filter(Season == "Fall")

kable(Sp.set, 
      caption = "Spring Stopovers by Bird") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Spring Stopovers by Bird
Bird Season Stopovers MinDuration MeanDuration MaxDuration
BWTE13S_131011 Spring 11 1.023812 3.400350 8.718994
BWTE15S_131009 Spring 10 1.280445 2.290844 4.010739
BWTE15S_131013 Spring 9 1.270052 2.724942 4.263789
BWTE15S_135841 Spring 6 1.184467 1.671687 2.018994
BWTE15S_135842 Spring 4 1.956225 4.186395 6.142620
BWTE15S_135843 Spring 8 1.477020 3.076974 5.651407
BWTE15S_135844 Spring 4 1.423539 2.121222 2.802661
BWTE15S_135845 Spring 2 1.019103 1.019103 1.019103
BWTE15S_135846 Spring 7 1.245661 2.370123 4.288308
BWTE15S_135847 Spring 4 1.506573 3.040828 4.547401
BWTE15S_135848 Spring 8 1.001907 2.478607 5.257314
BWTE15S_135849 Spring 10 1.028808 2.621629 6.558354
BWTE15S_135851 Spring 3 1.365678 1.940705 2.338149
BWTE15S_135852 Spring 4 2.735596 5.490099 8.225694
BWTE15S_135855 Spring 11 1.040069 2.396922 5.434808
BWTE15S_135857 Spring 8 1.203439 2.261800 4.232223
BWTE15S_135858 Spring 4 1.302675 2.790639 4.093314
BWTE15S_135859 Spring 6 1.275443 2.118704 3.410049
BWTE15S_135860 Spring 10 1.270823 2.259087 3.862162
BWTE15S_135861 Spring 1 1.007670 1.007670 1.007670
BWTE15S_135864 Spring 6 1.034225 2.707784 5.314719
BWTE15S_135866 Spring 12 1.007364 2.114763 3.041905
BWTE15S_135867 Spring 14 1.184502 2.159875 3.574317
BWTE15S_135868 Spring 2 1.691965 1.696519 1.701073
kable(Fl.set, 
      caption = "Fall Stopovers by Bird") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Fall Stopovers by Bird
Bird Season Stopovers MinDuration MeanDuration MaxDuration
BWTE13S_131010 Fall 2 2.794068 2.794696 2.795323
BWTE13S_131011 Fall 6 1.387099 5.625048 11.329162
BWTE13S_131012 Fall 8 1.243968 3.565047 6.579313
BWTE13S_131015 Fall 6 1.174337 2.161307 3.662968
BWTE13S_131016 Fall 8 1.197418 2.641036 4.769214
BWTE13S_131019 Fall 9 1.208699 2.970959 5.723570
BWTE13S_131020 Fall 9 1.022517 1.848832 3.710053
BWTE15S_135841 Fall 6 2.309841 4.934694 9.361109
BWTE15S_135842 Fall 9 1.066189 7.380978 21.224169
BWTE15S_135843 Fall 4 1.168868 2.396239 3.565108
BWTE15S_135846 Fall 2 1.110884 1.110884 1.110884
BWTE15S_135849 Fall 13 1.032501 2.046960 3.207565
BWTE15S_135852 Fall 112 1.205674 1.430623 8.021270
BWTE15S_135860 Fall 8 1.097183 2.509382 5.366707
BWTE15S_135861 Fall 4 3.028012 6.084214 9.112226
BWTE15S_135867 Fall 12 1.228074 2.482733 4.547319

4 Poultry Proximity

Measuring distance between stopover locations identified from residence time analysis and commercial domestic poultry operations of various capacities (livestock abundances).

nProj = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0 +lon_0=0
         +x_0=0 +y_0=0 +k=1 +units=km +nadgrids=@null +no_defs" #Mercator projection (km)

NAMer_chk = raster("C:/Users/humph173/Documents/LabWork/Patuxent/GlobPoultry/NAmerExt/NAmer_chick.tif") #Livestock of the world.

NAMer_chk1000 = NAMer_chk
NAMer_chk1000[NAMer_chk1000 < 1000] = NA
Chk1000 = rasterToPoints(NAMer_chk1000, sp=T)

XX = pointDistance(SprFall.stpover.pnts, Chk1000)

NAMer_chk10k = NAMer_chk
NAMer_chk10k[NAMer_chk10k < 10000] = NA
Chk10k = rasterToPoints(NAMer_chk10k, sp=T)

NAMer_chk100k = NAMer_chk
NAMer_chk100k[NAMer_chk100k < 100000] = NA
Chk100k = rasterToPoints(NAMer_chk100k, sp=T)

NAMer_chk250k = NAMer_chk
NAMer_chk250k[NAMer_chk250k < 250000] = NA
Chk250k = rasterToPoints(NAMer_chk250k, sp=T)

NAMer_chk500k = NAMer_chk
NAMer_chk500k[NAMer_chk500k < 500000] = NA
Chk500k = rasterToPoints(NAMer_chk500k, sp=T)

NAMer_chk1m = NAMer_chk
NAMer_chk1m[NAMer_chk1m < 1000000] = NA
Chk1m = rasterToPoints(NAMer_chk1m, sp=T)
#Nearest A
SO.pnt = spTransform(SprFall.stpover.pnts, nProj)
so.pp = as.ppp(SO.pnt)
Chk1000p = as.ppp(spTransform(Chk1000, nProj))

NNS = as.data.frame(nncross(so.pp, Chk1000p, k=1))
SprFall.stpover.pnts$nChk1000 = NNS[,1]


Chk10kp = as.ppp(spTransform(Chk10k, nProj))
NNS = as.data.frame(nncross(so.pp, Chk10kp, k=1))
SprFall.stpover.pnts$nChk10k = NNS[,1]


Chk100kp = as.ppp(spTransform(Chk100k, nProj))
NNS = as.data.frame(nncross(so.pp, Chk100kp, k=1))
SprFall.stpover.pnts$nChk100k = NNS[,1]


Chk250kp = as.ppp(spTransform(Chk250k, nProj))
NNS = as.data.frame(nncross(so.pp, Chk250kp, k=1))
SprFall.stpover.pnts$nChk250k = NNS[,1]

Chk500kp = as.ppp(spTransform(Chk500k, nProj))
NNS = as.data.frame(nncross(so.pp, Chk500kp, k=1))
SprFall.stpover.pnts$nChk500k = NNS[,1]

Chk1mp = as.ppp(spTransform(Chk1m, nProj))
NNS = as.data.frame(nncross(so.pp, Chk1mp, k=1))
SprFall.stpover.pnts$nChk1m = NNS[,1]

View Proximity to Commercial Poultry

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
tmp.df = SprFall.stpover.pnts@data %>% select(-c(Duration, Bird))

myShapes = c(3:4)

names(myShapes) = levels(factor(tmp.df$Season))

ShpScale = scale_shape_manual(name = "Season", values = myShapes,
                              labels = c("Fall", "Spring"))

tmp.dfm = melt(tmp.df, c("Long", "Lat", "Season"))

tmp.dfm$Kilometers = as.numeric(tmp.dfm$value)

tmp.dfm$variable = ordered(tmp.dfm$variable, levels = c("nChk1000", "nChk10k", "nChk100k", "nChk250k", "nChk500k", "nChk1m"))

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_point(data=tmp.dfm, 
                   aes(Long, Lat, 
                       group=Season, fill=NULL,
                       shape = Season,
                       color = Kilometers)) +
        scale_colour_gradient(low = "blue", high = "red",
                              breaks=seq(0, 3000, 500)) +
        ShpScale +
        facet_wrap(~variable, ncol=3) +
        xlab(" ") +
        ylab(" ") +
        ggtitle("Stopover Proximity to Poultry") +
        scale_x_longitude(xmin=-110, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              panel.border = element_blank(),
              legend.title = element_text(size=16, face="bold"),
              axis.title.x = element_text(size=22, face="bold"),
              axis.title.y = element_text(size=22, face="bold"),
              plot.title = element_text(size=24, face="bold", hjust = 0.5))

5 Construct Triangulated Mesh

A 3D Cartesian projection is used that is scaled to one Earth radius (6371km). The mesh is used to estimate model spatial fields.

NA.bf = gBuffer(NAmer, 
                width = 1, 
                byid = FALSE) 

NA.bnds = inla.sp2segment(NA.bf)

MaxEdge = 0.5

mesh0 = inla.mesh.2d(boundary = NA.bnds, 
                     cutoff = 1, 
                     max.edge = MaxEdge,
                     min.angle = 21) 

MeshLocs = cbind(mesh0$loc[,1], mesh0$loc[,2]) 

xyz = as.data.frame(                
          inla.mesh.map(MeshLocs, 
                        projection = "longlat", 
                        inverse = TRUE))

true.radius.of.earth = 6371 #km
radius.of.earth = 1

mesh.dom = inla.mesh.create(loc = xyz,
                     cutoff = 35/true.radius.of.earth, 
                     refine=list(max.edge = 1000/true.radius.of.earth, 
                                 min.angle = 26))

Click, drag, and zoom to view mesh

mesh.dom$n 
## [1] 9555
plot(mesh.dom, rgl = TRUE, main = " ")

You must enable Javascript to view this page properly.

5.1 Estimate Areal Exposure

Tessellation of mesh nodes. We define the Natural Neighborhood surronding each mesh node/vertex.

dd = as.data.frame(cbind(mesh.dom$loc[,1], 
                         mesh.dom$loc[,2],
                         mesh.dom$loc[,3]))

dd = as.data.frame(
          inla.mesh.map(dd, 
                        projection = "longlat", 
                        inverse = FALSE))

names(dd) = c("Long", "Lat")

nProj = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0 +lon_0=0
         +x_0=0 +y_0=0 +k=1 +units=km +nadgrids=@null +no_defs"

dd.pnt = spTransform(
              SpatialPointsDataFrame(dd[,c("Long","Lat")], dd,
              proj4string = CRS(proj4string(NAmer))),
              nProj)

NA.unP = spTransform(NAmer, nProj)

Poly.msh = Mesh_tessellation(dd.pnt)

Poly.msh$Area = gArea(Poly.msh, byid=TRUE)
proj4string(Poly.msh) = proj4string(dd.pnt)

dd.pnt$Area = extract(Poly.msh, dd.pnt, 
                      method="simple")[,"Area"]

dd.pnt$Domain = is.na(over(dd.pnt, NA.unP))

dd.pnt$Exp = ifelse(dd.pnt$Domain == TRUE, 0, dd.pnt$Area)

Poly.in = spTransform(Poly.msh, proj4string(NAmer))
Poly.in = crop(Poly.in, NAmer)

Poly.msh$Zones = 1:nrow(Poly.msh@data)

Aggregate Residence Time to Zones
Zones delineate each node’s area of influence. Here we find the mean average BWTE residence for each zone (neighborhood).

#These are time intensive (previously run and saved as "Duration.csv")
#Poly.in$Dur = extract(ZZ, 
#                      spTransform(Poly.in, CRS(proj4string(ZZ))), 
#                      fun=max, sp=FALSE) 

#dd.pnt$Dur = as.numeric(extract(
#                          Poly.in, 
#                          spTransform(dd.pnt, proj4string(Poly.in)), 
 #                         method = "simple")[,"Dur"])

Poly.in$IDx = 1:nrow(Poly.in@data)
Poly.in$IDx = Poly.in$IDx - 1

tmp.cov = read.csv("./Arc/RedRast/Dur_tab/Duration.csv",
                     stringsAsFactors=FALSE,
                     header = TRUE, sep=",")
  
Poly.in$Dur = as.numeric(with(tmp.cov ,
                            MAX[match(Poly.in$IDx, 
                                                 FID)]))

Poly.in$Dur[is.na(Poly.in$Dur)] = 0

Additional Variables
Additional variables are matched to zones (these were pre-processed as residence time above).

GetFiles = list.files(path = "./Arc/RedRast/CSV2", full.names = TRUE)

CovLabels = substr(GetFiles, 20, 24)

for(i in 1:length(CovLabels)){
  
  tmp.cov = read.csv(GetFiles[[i]],
                     stringsAsFactors=FALSE,
                     header = TRUE, sep=",")
  
  tmp.vec = as.numeric(with(tmp.cov ,
                              MEAN[match(Poly.in$IDx, 
                                                   FID)]))
  
  tmp.vec[is.na(tmp.vec)] = mean(tmp.vec, na.rm=T)
  
  
  if(i == 1){OutCov = tmp.vec}
      else{OutCov = as.data.frame(cbind(OutCov, tmp.vec))}
  
}
  
names(OutCov) = CovLabels

tmp.cov = read.csv("./Arc/RedRast/CSV2/WaterArea.csv",
                     stringsAsFactors=FALSE,
                     header = TRUE, sep=",")
  
OutCov$Water = as.numeric(with(tmp.cov ,
                            COUNT[match(Poly.in$IDx, 
                                                 FID)]))

OutCov$Water[is.na(OutCov$Water)] = median(OutCov$Water, na.rm=T)


##Chicken
tmp.cov = read.csv("./Arc/RedRast/CSV2/GChick.csv",
                     stringsAsFactors=FALSE,
                     header = TRUE, sep=",")
  
OutCov$GChic = as.numeric(with(tmp.cov ,
                            MEAN[match(Poly.in$IDx, 
                                                 FID)]))

OutCov$GChic[is.na(OutCov$GChic)] = median(OutCov$GChic, na.rm=T)


##Ducks
tmp.cov = read.csv("./Arc/RedRast/CSV2/GDucks.csv",
                     stringsAsFactors=FALSE,
                     header = TRUE, sep=",")
  
OutCov$GDuck = as.numeric(with(tmp.cov ,
                            MEAN[match(Poly.in$IDx, 
                                                 FID)]))

OutCov$GDuck[is.na(OutCov$GDuck)] = median(OutCov$GDuck, na.rm=T)

Cov.df = OutCov

for(i in 1:ncol(Cov.df)){
      Cov.df[,i] = as.numeric(scale(Cov.df[,i], scale = T, center = T))
}
  

names(Cov.df) = paste(names(OutCov), "_s", sep="")
  
Cov.df = cbind(OutCov, Cov.df)                     

ScCov.df = names(Cov.df)

Poly.in@data = cbind(Poly.in@data, Cov.df)

Poly.inP = spTransform(Poly.in, proj4string(dd.pnt))

TTTT = over(dd.pnt, Poly.inP)
TTTT[is.na(TTTT)] = 0

dd.pnt@data = cbind(dd.pnt@data, TTTT)

6 AI Outbreaks and Surveillance

EMPRES Load EMPRES Data
Loading global data then selecting years 2004-2017, domestic livestock outbreaks, and records for North America. Initial data: Mar 03, 2018.
Second set: Oct 14, 2018.

CleanEMPRES("C:/Users/humph173/Documents/LabWork/Patuxent/EMPRES", "EMPRES.cln")
EMPRES.cln = as.data.frame(EMPRES.cln) %>% select(-localityQuality)

CleanEMPRES2("C:/Users/humph173/Documents/LabWork/Patuxent/EMPRESv", "EMPRES2.cln") #Validation Set
EMPRES2.cln = as.data.frame(EMPRES2.cln)

EMPRES.cln = rbind(EMPRES.cln, EMPRES2.cln)

#Observations
dim(EMPRES.cln)[1]
## [1] 7946
EMPRES.cln %>% 
   group_by(Year) %>%
   summarise(no_rows = length(Year))
## # A tibble: 15 x 2
##     Year no_rows
##    <int>   <int>
##  1  2004     981
##  2  2005     369
##  3  2006     757
##  4  2007     378
##  5  2008     509
##  6  2009     115
##  7  2010     109
##  8  2011     419
##  9  2012     210
## 10  2013     248
## 11  2014     193
## 12  2015     911
## 13  2016    1239
## 14  2017    1401
## 15  2018     107
EMPRES.cln$Dom = grepl("domestic", EMPRES.cln$Spp)

#EMPRES.cln$LP1 = grepl("LPAI", EMPRES.cln$AI)
#EMPRES.cln$LP2 = grepl("H5N8", EMPRES.cln$AI)
#EMPRES.cln$LP3 = grepl("H5N1", EMPRES.cln$AI)
#EMPRES.cln$LP4 = grepl("H5N2", EMPRES.cln$AI)
#EMPRES.cln$LP5 = grepl("H7N3", EMPRES.cln$AI)
EMPRES.cln$H5 = grepl("H5", EMPRES.cln$AI)
EMPRES.cln$H7 = grepl("H7", EMPRES.cln$AI)


EMPRES.cln %>% 
   group_by(Dom) %>%
   summarise(no_rows = length(Dom))
## # A tibble: 2 x 2
##   Dom   no_rows
##   <lgl>   <int>
## 1 FALSE     374
## 2 TRUE     7572
E.df = EMPRES.cln %>%
          mutate(OBS = 1,
                 Source = "EMPRES") %>%
          filter(Year >= 2004 & Year <= 2018,
                 Dom == "TRUE") %>%
          select(Long, Lat, OBS, H5, H7, AI, Spp, ObsDate, Source)

E.df$Source = ifelse(E.df$ObsDate >= as.Date("2018-02-01"), "EmValidate", E.df$Source)

Em.pnts = SpatialPointsDataFrame(E.df[,c("Long", "Lat")], E.df)
proj4string(Em.pnts) = proj4string(NAmer)

Em.pnts$Domain = is.na(over(Em.pnts, NAmer))
Em.pnts = subset(Em.pnts, Domain == "FALSE")

Em.pnts@data = Em.pnts@data %>% select(-Domain)


Em.pnts@data %>%
  group_by(Source) %>%
  summarise(Count = length(Source))
## # A tibble: 2 x 2
##   Source     Count
##   <chr>      <int>
## 1 EMPRES       202
## 2 EmValidate     8
#Em.pnts$AI = as.factor(Em.pnts$AI)
#levels(Em.pnts$AI) = c("H5N2 LP", "H7N3 HP", "H7N3 LP", "H7N7 LP", "H7N8 LP", "H7N9 HP", "H7N9 LP", "Other LP")

USDA
Get Additional Poultry Events (2014-2015)

F1.df = read.csv("./US_AI_2015/ai_022718B.csv",
                    stringsAsFactors=FALSE,
                    header = TRUE, sep=",") %>%
                mutate(OBS = 1,
                       Source = "USDA",
                       ObsDate = as.Date(Confirmation_Date, format = "%m/%d/%Y")) %>%
                select(Long, Lat, Species, ObsDate, Avian_Influenza_Subtype, OBS, Source)

F1.df$H5 = grepl("H5", F1.df$Avian_Influenza_Subtype)
F1.df$H7 = grepl("H7", F1.df$Avian_Influenza_Subtype)

names(F1.df) = c("Long", "Lat", "Spp", "ObsDate", "AI", "OBS", "Source", "H5", "H7")

AI_Events.pnt = rbind(Em.pnts@data, F1.df)

AI_Events.pnt = SpatialPointsDataFrame(AI_Events.pnt[,c("Long","Lat")], AI_Events.pnt)
proj4string(AI_Events.pnt) = proj4string(NAmer)

AI_Events.pnt = subset(AI_Events.pnt,  H5 == TRUE | H7==TRUE)

AI_Events.pnt$SubType = ifelse(AI_Events.pnt$AI == "EA -H5N8" | AI_Events.pnt$AI == "EA-H5N8", "H5N8 HPAI",
                           ifelse(AI_Events.pnt$AI == "EA/AM-H5N2", "H5N2 HPAI",
                                  AI_Events.pnt$AI))

chVals = as.data.frame(
          AI_Events.pnt@data %>%
            group_by(SubType) %>%
            summarise(Cnt = length(AI)))
chVals
##      SubType Cnt
## 1  H5N1 HPAI   1
## 2  H5N1 LPAI   2
## 3  H5N2 HPAI 188
## 4  H5N2 LPAI   5
## 5  H5N8 HPAI   7
## 6  H7N1 LPAI   2
## 7  H7N3 HPAI 146
## 8  H7N3 LPAI   3
## 9  H7N7 LPAI   1
## 10 H7N8 LPAI   9
## 11 H7N9 HPAI   2
## 12 H7N9 LPAI   5
sum(chVals$Cnt)
## [1] 371

Surveillance Data AI Database (https://www.fludb.org)

bwtw_ai.df = read.csv("C:/Users/humph173/Documents/LabWork/Patuxent/AI_database/AI_database.csv",
                    stringsAsFactors=FALSE,
                    header = TRUE, sep=",") %>%
                mutate(OBS = 1,
                       ObsDate = as.Date(Col_Date, 
                                 format = "%d/%m/%Y"),
                       Long = as.numeric(Longitude),
                       Lat = as.numeric(Latitude),
                       AI = Subtype,
                       Spp = Host_spp,
                       Source = "AIDb",
                       SubType = AI) %>% 
                filter(is.na(Long) == FALSE,
                       is.na(Lat) == FALSE,
                       is.numeric(Long) == TRUE,
                       is.numeric(Lat) == TRUE) %>%
                select(Long, Lat, ObsDate, OBS, Spp, AI, Source, SubType)

bwtw_ai.df$H5 = grepl("H5", bwtw_ai.df$AI)
bwtw_ai.df$H7 = grepl("H7", bwtw_ai.df$AI)

BWTE.AI.df = as.data.frame(
                      bwtw_ai.df %>%
                        group_by(AI) %>%
                        summarise(Count = length(AI)))

bwtw_ai.df = bwtw_ai.df %>% filter(H5 == TRUE | H7 == TRUE)

Combine EMPRES and Surveillance

All.ai = rbind(AI_Events.pnt@data, bwtw_ai.df)

All.ai.pnt = SpatialPointsDataFrame(All.ai[,c("Long","Lat")], All.ai)
proj4string(All.ai.pnt) = proj4string(NAmer)

All.ai.pnt$Domain = is.na(over(All.ai.pnt, NAmer))
All.ai.pnt = subset(All.ai.pnt, Domain == "FALSE")

All.ai.pnt@data %>%
  group_by(Source) %>%
  summarise(Count = length(Source))
## # A tibble: 4 x 2
##   Source     Count
##   <chr>      <int>
## 1 AIDb         539
## 2 EMPRES       200
## 3 EmValidate     8
## 4 USDA         163

View Poultry Events (Outbreaks)

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
tmp.df = AI_Events.pnt@data %>% 
            filter(Source != "EmValidate") %>%
            select(Long, Lat, SubType)

myShapes = c(0:10)

names(myShapes) = levels(factor(tmp.df$SubType))

ShpScale = scale_shape_manual(name = "Strain", values = myShapes)

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_point(data=tmp.df, 
                   aes(Long, Lat, 
                      group=NULL, fill=NULL,
                      # col = tmp.df$AI, 
                      shape=tmp.df$SubType),
                   size=2) +
        ShpScale +
        #colScale + 
        xlab("Longitude") +
        ylab("Latitude") +
        ggtitle("Historic A.I. Outbreaks") +
        scale_x_longitude(xmin=-110, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              panel.border = element_blank(),
              legend.title = element_text(size=16, face="bold"),
              axis.title.x = element_text(size=22, face="bold"),
              axis.title.y = element_text(size=22, face="bold"),
              plot.title = element_text(size=24, face="bold", hjust = 0.5))

View AI Surveillance Wild BWTE Locations

AI.set.plt = All.ai.pnt@data %>% filter(Source == "AIDb")

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
tmp.df = AI.set.plt 

tmp.df$LP1 = grepl("H5N2", tmp.df$AI)
tmp.df$LP2 = grepl("H5N9", tmp.df$AI)
tmp.df$LP3 = grepl("H7N1", tmp.df$AI)
tmp.df$LP4 = grepl("H7N3", tmp.df$AI)
tmp.df$LP5 = grepl("H7N9", tmp.df$AI)

tmp.df$Strain = ifelse(tmp.df$LP1 == TRUE, "H5N2",
                    ifelse(tmp.df$LP2 == TRUE, "H5N9",
                         ifelse(tmp.df$LP3 == TRUE, "H7N1",
                              ifelse(tmp.df$LP4 == TRUE, "H7N3",
                                     ifelse(tmp.df$LP5 == TRUE, "H7N9",
                                            "Other LP")))))

tmp.df$Strain = as.factor(tmp.df$Strain)

myCol = c("black", "red", "darkgreen", "blue", "cyan", "brown")

names(myCol) = levels(factor(tmp.df$Strain))

myShapes = c(0:3,5,4)

names(myShapes) = levels(factor(tmp.df$Strain))


colScale = scale_colour_manual(name = "Strain", values = myCol, 
                               labels = c("H5N2", "H5N9", "H7N1", "H7N3", "H7N9", "Other LP"))

ShpScale = scale_shape_manual(name = "Strain", values = myShapes,
                              labels = c("H5N2", "H5N9", "H7N1", "H7N3", "H7N9", "Other LP"))

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_point(data=tmp.df, 
                   aes(Long, Lat, 
                       group=tmp.df$Strain, fill=NULL,
                       col = tmp.df$Strain, 
                       shape=tmp.df$Strain),
                   size=4) +
        ShpScale +
        colScale + 
        xlab("Longitude") +
        ylab("Latitude") +
        ggtitle("BWTE AI Positive Samples \n (AIRD)") +
        scale_x_longitude(xmin=-110, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              panel.border = element_blank(),
              legend.title = element_text(size=16, face="bold"),
              axis.title.x = element_text(size=22, face="bold"),
              axis.title.y = element_text(size=22, face="bold"),
              plot.title = element_text(size=24, face="bold", hjust = 0.5))

7 Spatial Temporal Modeling

7.1 Prep Covariates

Combine Data for Covariate Extraction

Node.set = dd.pnt@data[,1:2] %>% 
            mutate(Source = "Node",
                   OBS = 0,
                   AI = "none",
                   ObsDate = "none",
                   TStep.wk = ceiling(runif(dim(dd.pnt@data)[1], 1, 52))) %>%
            select(Long, Lat, OBS, Source, AI, ObsDate, TStep.wk)

Grd.set = Grd.pnt@data %>%
           mutate(Source = "Grd",
                   OBS = 0,
                   AI = "none",
                   ObsDate = "none",
                   TStep.wk = ceiling(runif(dim(Grd.pnt@data)[1], 1, 52))) %>%
           select(Long, Lat, OBS, Source, AI, ObsDate, TStep.wk)
  
Obs.set = All.ai.pnt@data %>%
           mutate(OBS = 1,
                  TStep.wk = week(ObsDate)) %>%
           select(Long, Lat, OBS, Source, AI, ObsDate, TStep.wk)


Cov.pnts = rbind(Node.set, Grd.set, Obs.set)
Cov.pnts = SpatialPointsDataFrame(Cov.pnts[,c("Long","Lat")], Cov.pnts)
proj4string(Cov.pnts) = proj4string(NAmer)

7.1.1 Extract

####Soils
SoilFiles = list.files(path="C:/Users/humph173/Documents/Soils/SoilGrids/", 
                      pattern="*.tif", full.names=T, recursive=FALSE)

Soil.stk = stack(SoilFiles)
names(Soil.stk) = paste("soil_", 1:17, sep="")

SoilM = as.data.frame(
                  extract(Soil.stk, 
                          spTransform(Cov.pnts, 
                          CRS(proj4string(Soil.stk))), 
                          method = "simple"))

###ENVIREM
EnvFiles = list.files(path="C:/Users/humph173/Documents/ENVIREM", 
                      pattern="*.tif", full.names=T, recursive=FALSE)


##Topographical
Slope = raster("./Arc/RedRast/Slope.tif")
DEM = raster("./Arc/NA_DEM/NA_dem.tif")

Top.stk = stack(Slope, DEM)
names(Top.stk) = c("Slope", "DEM")

Top1M = as.data.frame(
                  extract(Top.stk, 
                          spTransform(Cov.pnts, 
                          CRS(proj4string(Top.stk))), 
                          method = "simple"))


Top.stk2 = stack(EnvFiles[17:18])
names(Top.stk2) = c("CTI", "TRI")

Top2M = as.data.frame(
                  extract(Top.stk2, 
                          spTransform(Cov.pnts, 
                          CRS(proj4string(Top.stk2))), 
                          method = "simple"))

##Poultry
GlobChkn = raster("C:/Users/humph173/Documents/LabWork/Patuxent/GlobPoultry/livestock_CHICKENS/Glb_chkAD_2006_paper.tif")
GlobDuck = raster("C:/Users/humph173/Documents/LabWork/Patuxent/GlobPoultry/DUCKS_global/GLb_Ducks_CC2006_AD.tif")

Cov.pnts$GlobChkn = extract(GlobChkn, 
                            spTransform(Cov.pnts, 
                            CRS(proj4string(GlobChkn))), 
                            method = "simple")

Cov.pnts$GlobChkn[is.na(Cov.pnts$GlobChkn)] = mean(Cov.pnts$GlobChkn, na.rm=T)
Cov.pnts$GlobChkn = Cov.pnts$GlobChkn/1000

Cov.pnts$GlobDuck = extract(GlobDuck, 
                            spTransform(Cov.pnts, 
                            CRS(proj4string(GlobDuck))), 
                            method = "simple")

Cov.pnts$GlobDuck[is.na(Cov.pnts$GlobDuck)] = mean(Cov.pnts$GlobDuck, na.rm=T)
Cov.pnts$GlobDuck = Cov.pnts$GlobDuck/100

##Human Population
Pop = raster("C:/Users/humph173/Documents/LabWork/PopDens/gpw_v4_population_density_rev10_2015_30_sec.tif")
Cov.pnts$Pop = extract(Pop, 
                      spTransform(Cov.pnts, 
                      CRS(proj4string(Pop))), 
                      method = "simple")

Cov.pnts$Pop[is.na(Cov.pnts$Pop)] = mean(Cov.pnts$Pop, na.rm=T)
Cov.pnts$Pop = Cov.pnts$Pop/100


###Land cover
GlobCover = raster("C:/Users/humph173/Documents/Michigan_State/Marten/Marten_SDM/Env/Globcover2009_V2.3_Global_/GLOBCOVER_L4_200901_200912_V2.3.tif")

Cov.pnts$LC = as.character(
                extract(GlobCover, 
                spTransform(Cov.pnts, 
                CRS(proj4string(GlobCover))), 
                method = "simple"))

Cov.pnts$LC[is.na(Cov.pnts$LC)] = "200" 
levels(as.factor(Cov.pnts$LC))


#Distance to water
GLCcdm = raster("./Arc/LC/WetDist/GLCcdm.tif") 

Cov.pnts$wDs = as.numeric(
                     extract(GLCcdm, 
                     spTransform(Cov.pnts, 
                     CRS(proj4string(GLCcdm))), 
                     method = "simple"))

range(Cov.pnts$wDs, na.rm=T)

Cov.pnts$wDs[is.na(Cov.pnts$wDs)] = mean(Cov.pnts$wDs, na.rm=T)
Cov.pnts$wDsR = round(Cov.pnts$wDs, 0) #rounding to whole km
range(Cov.pnts$wDsR)

Cov.pnts$wDsR[Cov.pnts$wDsR>400] = 400
range(Cov.pnts$wDsR)

#Nearest AI Outbreak
NN.pnt = spTransform(Cov.pnts, nProj)

By = as.factor(NN.pnt$OBS)
P.pp = as.ppp(NN.pnt)
NN = as.data.frame(nndist(P.pp, by=By, k = 1))
Cov.pnts$nOutBrk = NN[,"1"]

Scale and Center

Cov.var = cbind(SoilM, Top1M, Top2M)

Cov.df = Cov.var

for(i in 1:ncol(Cov.df)){
  
      Cov.df[,i][is.na(Cov.df[,i])] = mean(Cov.df[,i], na.rm=T)
       
      Cov.df[,i] = as.numeric(scale(Cov.df[,i], scale = T, center = T))
}

Cov.df = as.data.frame(Cov.df)
names(Cov.df) = paste("s", names(Cov.var), sep="_")

Combine Covarites

Cov.pnts@data = cbind(Cov.pnts@data, Cov.df)

7.1.2 Discriminant Functions

Summarising data attributes to reduce colinearity.

Subset Covariate Groups

DF.set = subset(Cov.pnts, Source == "Node" | Source == "EMPRES" | Source == "USDA")

DF.set$Domain = is.na(over(DF.set, NAmer))
DF.set = subset(DF.set, Domain == "FALSE")
###Soils
dapc.s = DF.set@data[,16:32]

###Topographical
dapc.e = DF.set@data[,33:36]

DF.set$Group = ifelse(DF.set$Dur >= 0.01, 1, 0)

Soils

dapc.sR = dapc(dapc.s, 
              DF.set$Group, 
              n.da=5, n.pca=17)

###Conduct DAPC
set.seed(1976)
DAPC.sR = dapc(dapc.s, 
              DF.set$Group, 
              n.da=5, n.pca=9)

###Loadings (Climate)
Ldngs.df = as.data.frame(DAPC.sR$var.contr) %>%
                        mutate(Variable = rownames(DAPC.sR$var.contr))

Ldngs.m = melt(Ldngs.df, "Variable")

lds.plt = ggplot(Ldngs.m, aes(Variable, value)) +
                 geom_bar(aes(x=Variable, y=value, 
                              col=Variable, fill=Variable), 
                          stat="identity") +
                 ylab("Variable Contribution (proportion)") +
                 theme_classic() +
                 ggtitle("DAPC Variables") +
                 guides(col = guide_legend(ncol = 16)) +
                 theme(axis.title.x=element_blank(),
                        axis.text.x=element_blank(),
                        axis.text.y=element_text(size=16, face="bold"),
                        axis.ticks.x=element_blank(),
                        panel.border = element_rect(color = "black", fill = NA, size = 1),
                        strip.text.x = element_text(size=16, face="bold"),
                        axis.title.y = element_text(size=16, face="bold"),
                        legend.title = element_text(size=16, face="bold"),
                        legend.position="bottom")

lds.plt

###Predict Grid 
pred.grdc = predict.dapc(DAPC.sR, newdata=Cov.pnts@data[,16:32])

Cov.pnts$SoilF = as.numeric(pred.grdc$ind.scores)

#Soil.dfr = rasterize(Cov.pnts, 
#                     Domain.r,
#                     "SoilF",
#                     background = NA)

#writeRaster(Soil.dfr, "./StopOver_out/DAPC_Env/SoilDF.tif", overwrite=T)

##Corrplot
dapc.cor = Cov.pnts@data[,16:32]
corrplot(cor(dapc.cor))

Soils.cor = as.data.frame(matrix(nrow = 17, ncol=5))
names(Soils.cor) = c("Factor", "r", "CI.l", "CI.h", "p.val")

for(i in 1:dim(dapc.cor)[2]){
      
  Focal.v = Cov.pnts$SoilF
  Chal.v = dapc.cor[,i]
  
  Cor.test = cor.test(Focal.v, Chal.v)
  
  p.stat = Cor.test$p.value
  cor.v = Cor.test$estimate
  ci.l= Cor.test$conf.int[1]
  ci.h = Cor.test$conf.int[2]
  
  cor.name = substr(SoilFiles[i], 45 , 50)
  
  Soils.cor[i, 1] = cor.name
  Soils.cor[i, 2] = cor.v
  Soils.cor[i, 3] = ci.l
  Soils.cor[i, 4] = ci.h
  Soils.cor[i, 5] = p.stat
  
}

Soils.cor$Label = paste(round(Soils.cor$r, 2)," (", round(Soils.cor$CI.l,2), ", ",round(Soils.cor$CI.h,2),")", sep="")

Soils.cor$Desc = "here"

Soils.table = Soils.cor %>% select(Factor, Label, Desc)

#SelMod.fx = xtable(Soils.table)
#print(SelMod.fx, include.rownames = F)

Soils.tableX = Soils.table[,1:2]
names(Soils.tableX) = c("Attribute", "Correlation")
kable(Soils.tableX, 
      caption = "Soils Table") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Soils Table
Attribute Correlation
AWCh1_ -0.21 (-0.21, -0.21)
AWCh2_ -0.2 (-0.2, -0.19)
AWCh3_ -0.2 (-0.2, -0.2)
AWCtS_ -0.46 (-0.46, -0.46)
BLDFIE 0.48 (0.47, 0.48)
CECSOL -0.23 (-0.23, -0.23)
CLYPPT 0.18 (0.18, 0.19)
CRFVOL -0.68 (-0.68, -0.68)
OCDENS 0.2 (0.19, 0.2)
OCSTHA -0.21 (-0.21, -0.21)
ORCDRC -0.32 (-0.32, -0.31)
PHIHOX 0.28 (0.28, 0.28)
PHIKCL 0.23 (0.23, 0.23)
SLTPPT 0.38 (0.37, 0.38)
SNDPPT -0.44 (-0.44, -0.44)
TEXMHT -0.08 (-0.08, -0.08)
WWP1_M -0.15 (-0.15, -0.14)

Topographical

dapc.eR = dapc(dapc.e, 
              DF.set$Group, 
              n.da=5, n.pca=4)

###Conduct DAPC
set.seed(1976)
dapc.eR = dapc(dapc.e, 
              DF.set$Group, 
              n.da=5, n.pca=4)


###Loadings (Climate)
Ldngs.df = as.data.frame(dapc.eR$var.contr) %>%
                        mutate(Variable = rownames(dapc.eR$var.contr))

Ldngs.m = melt(Ldngs.df, "Variable")

lds.plt = ggplot(Ldngs.m, aes(Variable, value)) +
                 geom_bar(aes(x=Variable, y=value, 
                              col=Variable, fill=Variable), 
                          stat="identity") +
                 ylab("Variable Contribution (proportion)") +
                 theme_classic() +
                 ggtitle("DAPC Variables") +
                 guides(col = guide_legend(ncol = 16)) +
                 theme(axis.title.x=element_blank(),
                        axis.text.x=element_blank(),
                        axis.text.y=element_text(size=16, face="bold"),
                        axis.ticks.x=element_blank(),
                        panel.border = element_rect(color = "black", fill = NA, size = 1),
                        strip.text.x = element_text(size=16, face="bold"),
                        axis.title.y = element_text(size=16, face="bold"),
                        legend.title = element_text(size=16, face="bold"),
                        legend.position="bottom")

lds.plt

###Predict Grid 
pred.grdc = predict.dapc(dapc.eR, newdata=Cov.pnts@data[,33:36])

Cov.pnts$TopoF = as.numeric(pred.grdc$ind.scores)

#Topo.dfr = rasterize(Cov.pnts, 
#                     Domain.r,
#                     "TopoF",
#                     background = NA)

#writeRaster(Topo.dfr, "./StopOver_out/DAPC_Env/TopoDF.tif", overwrite=T)

dapc.cor = Cov.pnts@data[,33:36]
corrplot(cor(dapc.cor))

Soils.cor = as.data.frame(matrix(nrow = 4, ncol=5))
names(Soils.cor) = c("Factor", "r", "CI.l", "CI.h", "p.val")

for(i in 1:dim(dapc.cor)[2]){
      
  Focal.v = Cov.pnts$TopoF
  Chal.v = dapc.cor[,i]
  
  Cor.test = cor.test(Focal.v, Chal.v)
  
  p.stat = Cor.test$p.value
  cor.v = Cor.test$estimate
  ci.l= Cor.test$conf.int[1]
  ci.h = Cor.test$conf.int[2]
  
  cor.name = names(dapc.e)[i]
  
  Soils.cor[i, 1] = cor.name
  Soils.cor[i, 2] = cor.v
  Soils.cor[i, 3] = ci.l
  Soils.cor[i, 4] = ci.h
  Soils.cor[i, 5] = p.stat
  
}

Soils.cor$Label = paste(round(Soils.cor$r, 2)," (", round(Soils.cor$CI.l,2), ", ",round(Soils.cor$CI.h,2),")", sep="")

Soils.cor$Desc = "here"

Soils.table = Soils.cor %>% select(Factor, Label, Desc)

#SelMod.fx = xtable(Soils.table)
#print(SelMod.fx, include.rownames = F)

Soils.tableX = Soils.table[,1:2]
names(Soils.tableX) = c("Attribute", "Correlation")
kable(Soils.tableX, 
      caption = "Topographic Table") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Topographic Table
Attribute Correlation
s_Slope -0.51 (-0.52, -0.51)
s_DEM -0.59 (-0.59, -0.58)
s_CTI 0.93 (0.93, 0.93)
s_TRI -0.52 (-0.52, -0.51)

7.1.3 Colinearity Diagnostics

library(perturb)
## 
## Attaching package: 'perturb'
## The following object is masked from 'package:raster':
## 
##     reclassify
Colin.df = Cov.pnts@data %>%
            select(SoilF, TopoF)

CorCov = cor(Colin.df)

corrplot(CorCov)

CI = colldiag(CorCov)
CI
## Condition
## Index    Variance Decomposition Proportions
##          intercept SoilF TopoF
## 1  1.000 1.000     0.076 0.076
## 2  2.753 0.000     0.924 0.924

Seperate Datasets

Cov.pnts$GlobPoultry = Cov.pnts$GlobChkn + Cov.pnts$GlobDuck

Mod.set = subset(Cov.pnts, Source == "Node" | Dur >= 1)

dd.pnt$SGlobPoultry = (dd.pnt$GChic/1000) + (dd.pnt$GDuck/100)
Node.set = subset(Cov.pnts, Source == "Node")
Node.set@data = cbind(Node.set@data, dd.pnt@data[, c("Domain", "Area", "Exp", "SGlobPoultry")])
Node.set$Dur = dd.pnt$Dur

#OutBrks.set = Si(subset(Cov.pnts, Source == "EMPRES" | Source == "Node" | Source == "USDA"))

OutBrks.prep = (subset(Cov.pnts, Source == "EMPRES" | Source == "Node" | Source == "USDA"))

Mod.pnts.obs = subset(OutBrks.prep, OBS == 1)
OutBrks.node = subset(OutBrks.prep, OBS == 0)

library(caret)
set.seed(1976)
strata.index = createDataPartition(Mod.pnts.obs$Source, p = 0.80, list = FALSE)
train = Mod.pnts.obs@data[strata.index,]
test  = Mod.pnts.obs@data[-strata.index,]

OutBrks.set = as.data.frame(rbind(train, OutBrks.node@data))

OutBrks.set = SpatialPointsDataFrame(OutBrks.set[,c("Long","Lat")], OutBrks.set)
proj4string(OutBrks.set) = proj4string(OutBrks.prep)

Grid.pnts = subset(Cov.pnts, Source == "Grd")

Valid.set = subset(Cov.pnts, Source == "EmValidate" | Source == "AIDb")

7.2 3D Cartesian Coordinates

locs = cbind(Node.set@coords[,1], Node.set@coords[,2])

locs = inla.mesh.map(locs, #Project to 3D Cartesian
                     projection = "longlat", 
                     inverse = TRUE)

A.node = inla.spde.make.A(mesh.dom, #Convert to matrix
                          alpha = 2,
                          loc=locs)

A.node3 = A.node2 = A.node #Extra copies of projection matrix

#Points
locs = cbind(OutBrks.set@coords[,1], OutBrks.set@coords[,2])

locs = inla.mesh.map(locs, 
                     projection = "longlat", 
                     inverse = TRUE)

A.obs = inla.spde.make.A(mesh.dom, 
                          alpha = 2,
                          loc=locs)

A.obs2 = A.obs

7.3 Sptial Priors and Indicies

spde0 = inla.spde2.pcmatern(mesh.dom, alpha = 2, #Flat spatialpriors
                                    prior.range=c(1, 0.5),
                                    prior.sigma=c(1, 0.5),
                                    constr = TRUE)

field.bwte1 = inla.spde.make.index("field.bwte1", spde0$n.spde) #Spatial fields

field.bwte2 = inla.spde.make.index("field.bwte2", spde0$n.spde)

field.bwte3 = inla.spde.make.index("field.bwte3", spde0$n.spde) 

field.bwteX = inla.spde.make.index("field.bwteX", spde0$n.spde) #Copy of field (shared)

field.bwteXX = inla.spde.make.index("field.bwteXX", spde0$n.spde) #Copy of field (shared)

field.bwteXXX = inla.spde.make.index("field.bwteXXX", spde0$n.spde) #Copy of field (shared)

7.4 Latitudinal Displacement

Estimating weekly displacement.

My.lat.lu = as.data.frame(
              Argos_sp %>%
                group_by(LocWK, Animal_ID) %>%
                summarise(mnLat = min(Latitude),
                          mxLat = max(Latitude),
                          avgLat = mean(Latitude),
                          mdLat = median(Latitude)))

My.lat.lu$rnLat = My.lat.lu$mxLat - My.lat.lu$mnLat

My.lat.lu2 = as.data.frame(
                My.lat.lu %>%
                  group_by(LocWK) %>%
                  summarise(mnLat = min(mnLat),
                          mxLat = max(mxLat),
                          avgLat = mean(avgLat),
                          mdLat = median(mdLat),
                          mdsLat = mean(rnLat),
                          mddsLat = median(rnLat),
                          ScDs = mean(rnLat)))

My.lat.lu2$delta.lag = lag(My.lat.lu2$avgLat, 1)

My.lat.lu2$delta = My.lat.lu2$avgLat-My.lat.lu2$delta.lag


My.lat.lu2$delta[is.na(My.lat.lu2$delta)] = mean(My.lat.lu2$delta, na.rm=T)


My.lat.lu2$LocWKs = c(0:52)

OutBrks.set$Lat.disp = with(My.lat.lu2,
                              delta[match(OutBrks.set$TStep.wk, 
                                                   LocWK)])


OutBrks.set$Lat.dispS = Scale(OutBrks.set$Lat.disp) #Scaled version for modeling

ggplot(OutBrks.set@data, aes(TStep.wk, Lat.disp)) + geom_line()

7.5 Data Stacks

Organizing data as list objects

Data for BWTE Occurence probabilty.

##PF Stack
FE.df = Node.set@data #Copy raw data

FE.lst.1 = list(c(field.bwte1,#Spatial Field
                list(intercept1 = 1)), #Intercept
                list(SoilF = FE.df[,"SoilF"], #Soil discriminant
                     wDsR = FE.df[,"wDsR"], #Water distance
                     TopoF = FE.df[,"TopoF"])) #Topo discriminant

FE.df$Dur1 = ifelse(FE.df$Dur >= 1, 1, 0) #If resident for more than 1 Day then present

BiS.stk = inla.stack(data = list(StpOvr = FE.df$Dur1), #Base model
                                       A = list(A.node, 1), 
                                 effects = FE.lst.1,   
                                     tag = "pf.1")

Bi2.stk = inla.stack(data = list(Y = cbind(FE.df$Dur1, NA)), #Two-tiered copy, leaving "NA" for outbreak data
                                 A = list(A.node, 1), 
                           effects = FE.lst.1,   
                               tag = "pf.2")

Bi3.stk = inla.stack(data = list(Y = cbind(FE.df$Dur1, NA, NA)), #Three level model (Experimental only, not in manuscript)
                                 A = list(A.node, 1), 
                           effects = FE.lst.1,   
                               tag = "pf.2")

Data for BWTE Duration (Residence time).

#Dur Stack
FE.df$Dur2 = ifelse(FE.df$Dur > 1, FE.df$Dur, 0) #Dur = Residence time
FE.df$Dur2[FE.df$Dur2 == 0 ] = NA

FE.df$Exp2 = FE.df$Exp/true.radius.of.earth #Exposure (Area of neighborhoods)
FE.df$Exp2 = ifelse(is.na(FE.df$Dur2), NA, FE.df$Exp2)
#FE.df$Exp2 = ifelse(FE.df$Dur2 > 0 | FE.df$Domain == TRUE, 0, FE.df$Exp2)

DurS.stk = inla.stack(data = list(ResidT = FE.df$Dur2, #Base Model
                                  e = FE.df$Exp2),
                                  A = list(A.node2, 1), 
                            effects = FE.lst.1,   
                                tag = "dur.1")

Dur2.stk = inla.stack(data = list(Y = cbind(FE.df$Dur2, NA), #Joint Model
                                 e = FE.df$Exp2),
                                 A = list(A.node2, 1), 
                           effects = FE.lst.1,   
                               tag = "dur.2")

Dur3.stk = inla.stack(data = list(Y = cbind(FE.df$Dur2, NA, NA), #Experimental (not in manuscript)
                                 e = FE.df$Exp2),
                                 A = list(A.node2, 1), 
                           effects = FE.lst.1,   
                               tag = "dur.3")

Data for AI Outbreaks

##OB Stack
FE.df = OutBrks.set@data

FE.lst.3 = list(c(field.bwte3, #Spatial field for autocorrelation between outbreaks
                  field.bwteXX, #copy of field for correlation to BWTE telemetry
                   field.bwteXXX, #Third field (experimental, not in manuscript)
                list(intercept3 = 1)),
                list(Pop = FE.df[,"Pop"],
                     GlobPoultry = FE.df[,"GlobPoultry"], #Poultry abundance
                     nOutBrk = round(FE.df[,"nOutBrk"]/100, 0), #distance to nearest outbreak
                     Lat.disp = FE.df[,"Lat.dispS"], #Latitude displacement
                     TStep.wk = FE.df[,"TStep.wk"], #Weekly time steps
                     TStep2.wk = FE.df[,"TStep.wk"], #extra copy of above time steps
                     SubType = FE.df[,"AI"])) #Strain indicator

OBS.stk = inla.stack(data = list(OutBreak = FE.df$OBS), #data for base model
                                 A = list(A.obs, 1), 
                           effects = FE.lst.3,   
                               tag = "ob.1")

OB2.stk = inla.stack(data = list(Y = cbind(NA, FE.df$OBS)), #data for joint model
                                 A = list(A.obs, 1), 
                           effects = FE.lst.3,   
                               tag = "ob.2")

OB3.stk = inla.stack(data = list(Y = cbind(NA, NA, FE.df$OBS), #three-level model (Experimental, not in manuscript)  
                                  e = rep(NA, dim(FE.df)[1])),
                             A = list(A.obs, 1), 
                           effects = FE.lst.3,   
                               tag = "ob.2")

Combine data for joint models

Dur2.Jstk = inla.stack(Dur2.stk, OB2.stk) #BWTE Residence time + AI Oubreaks

StpOvr2.Jstk = inla.stack(Bi2.stk, OB2.stk) #BWTE Occurence probabilty + AI Oubreaks

#Save to Copies to Run on HPCC
#save(list=c("Dur2.Jstk", "StpOvr2.Jstk", "spde0"), file="./StopOver_out/HPC_092518/Mods120318.rda")

7.6 Execute Models

7.6.1 Base Model (Outbreaks, no covariates)

Frm.ob = OutBreak ~ -1 +  intercept3 + #intercept
                         f(field.bwte3, #Spatial field
                           model = spde0)
              
theta.jnc = c(-0.5162245, 0.3596890)

Model.OB = inla(Frm.ob, 
             data = inla.stack.data(OBS.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(OBS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.mode = list(restart = TRUE, theta = theta.jnc),
             control.inla = list(strategy = "gaussian", 
                                 int.strategy = "eb"),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE)) 
GetMets(Model.OB)
##   Metric    Tier.1
## 1    DIC 1075.2012
## 2   WAIC  979.8504
## 3   lCPO    0.0762

Get Random Field (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.OB
ModMesh = mesh.dom

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.OB.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)

Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.OBsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

7.6.2 Model (Outbreaks, clustering)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 

Frm.ob = OutBreak ~ -1 +  intercept3 + 
                        f(field.bwte3, 
                          model = spde0) +
                        f(nOutBrk, #Distance to outbreak (Disease clusters)
                           model = "rw1",
                           hyper= pcprior)
              
theta.jnc = c(-1.27082034, 0.32640370, -0.08502562)

Model.OBS = inla(Frm.ob, 
             data = inla.stack.data(OBS.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(OBS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.mode = list(restart = TRUE, theta = theta.jnc),
             control.inla = list(strategy = "gaussian", 
                                 int.strategy = "eb"),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE)) 
GetMets(Model.OBS)
##   Metric   Tier.1
## 1    DIC 836.2283
## 2   WAIC 817.4019
## 3   lCPO   0.0427

Get Random Field (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.OBS
ModMesh = mesh.dom

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.OBS.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)

Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.OBSsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

7.6.3 Base Model (Outbreaks, space-time)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 
pcprior2 = list(prec = list(prior="pc.prec", param = c(1, 0.001))) 
LCPrior = list(theta=list(prior = "normal", param=c(0, 5)))


Frm.ob = OutBreak ~ -1 +  intercept3 + 
                        f(field.bwte3, 
                          model = spde0) + #spatal field
                        f(nOutBrk,         #Disease clusters
                           model = "rw1",
                           scale.model=TRUE,
                           hyper= pcprior) +
                        f(TStep.wk, Lat.disp, #Week*Lat Displacement
                           model = "rw1",
                           scale.model = TRUE,
                           constr = TRUE,
                           hyper= LCPrior) 



theta.jnc = c(-1.2542932, 0.4121492, -1.9887237,0.2510953)
              
              
Model.OBst = inla(Frm.ob, 
             data = inla.stack.data(OBS.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(OBS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.inla = list(strategy="gaussian", 
                                 int.strategy = "eb"),
             control.mode = list(restart = TRUE, theta = theta.jnc),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.OBst)
##   Metric   Tier.1
## 1    DIC 730.1631
## 2   WAIC 709.9028
## 3   lCPO   0.0368

7.6.4 Base Model (Outbreaks, space-time, covariates)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 
LCPrior = list(theta=list(prior = "normal", param=c(0, 0.5)))


Frm.ob = OutBreak ~ -1 +  intercept3 + 
                        f(field.bwte3, 
                          model = spde0) +
                        f(nOutBrk,
                           model = "rw1",
                           scale.model=TRUE,
                           hyper= pcprior) +
                        f(TStep.wk, Lat.disp,
                           model = "rw1",
                           scale.model = TRUE,
                           constr = TRUE,
                           hyper= LCPrior) +
                       GlobPoultry + Pop #Poultry and human abundance

theta.jnc = c(-1.2542932, 0.4121492, -1.9887237,0.2510953)
              
              
Model.OBcov = inla(Frm.ob, 
             data = inla.stack.data(OBS.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(OBS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.inla = list(strategy="gaussian", 
                                 int.strategy = "eb"),
             control.mode = list(restart = TRUE, theta = theta.jnc),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.OBcov)
##   Metric   Tier.1
## 1    DIC 729.1005
## 2   WAIC 709.5684
## 3   lCPO   0.0368

Get Random Field (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.OBcov

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.OBc.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)

Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.OBcsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

7.6.5 Base Model (Duration)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 
pcprior2 = list(prec = list(prior="pc.prec", param = c(3, 0.1))) 
LCPrior = list(theta=list(prior = "normal", param=c(0, 0.5)))

Frm.ob = ResidT ~ -1 +  intercept1 + 
                        f(field.bwte1, 
                          model = spde0) +
                        SoilF + TopoF + wDsR #Soil topo and water

#theta.jnc  = Model.dur$internal.summary.hyperpar$mean
theta.jnc = c(0.76305501, -2.32965895, -0.06289494, 3.29189990)
              
              
Model.dur = inla(Frm.ob, 
             data = inla.stack.data(DurS.stk, spde=spde0), 
             family = "gamma",
             verbose = TRUE,
             E = inla.stack.data(DurS.stk)$e,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(DurS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.inla = list(strategy="gaussian", 
                                 int.strategy = "eb"),
             #control.mode = list(restart = TRUE, theta = theta.jnc),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.dur)
##   Metric    Tier.1
## 1    DIC 1526.2284
## 2   WAIC 1540.9477
## 3   lCPO    3.6019

7.6.6 Base Model (Occurrence)

Frm.ob = StpOvr ~ -1 +  intercept1 + 
                        f(field.bwte1, 
                          model = spde0) +
                        SoilF + TopoF + wDsR

#theta.jnc  = Model.dur$internal.summary.hyperpar$mean
theta.jnc = c(0.76305501, -2.32965895, -0.06289494, 3.29189990)
              
              
Model.occur = inla(Frm.ob, 
             data = inla.stack.data(BiS.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             E = inla.stack.data(DurS.stk)$e,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(BiS.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.inla = list(strategy="gaussian", 
                                 int.strategy = "eb"),
             #control.mode = list(restart = TRUE, theta = theta.jnc),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.occur)
##   Metric    Tier.1
## 1    DIC 1642.2058
## 2   WAIC 1515.8507
## 3   lCPO    0.1272

7.7 Point Thinning

As a possible alternative to approximating farm-to-farm transmission by distance (disease clusters), point thining by virus subtype and jurisdictional boundaries is also explored.

Thin Points

OutBrks.set2 = OutBrks.set #Outbreak locations (copy)

NAmer.reg$Reg.ID = 1:nrow(NAmer.reg@data) #Assign ID to locations (states)

RegDomain = extract(NAmer.reg, OutBrks.set2, method="simple") #Associate states to outbreaks

OutBrks.set2$Reg.ID = RegDomain[,"Reg.ID"] #Add state ID

OutBrks.set2$dups = duplicated(OutBrks.set2@data[, c("AI","Reg.ID")]) #ID duplicates that have the same State and Virus

OutBrks.set2 = subset(OutBrks.set2, dups == FALSE | OBS == 0) #Remove duplicates

sum(OutBrks.set2$OBS) #How many remain?
## [1] 44

View Thinned Points

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
tmp.df = OutBrks.set2@data %>% 
            filter(OBS == 1) %>%
            select(Long, Lat)

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_point(data=tmp.df, 
                   aes(Long, Lat, 
                      group=NULL, fill=NULL),
                      col = "red", 
                      size=4) +
        xlab("Longitude") +
        ylab("Latitude") +
        ggtitle("Thinned A.I. Outbreaks") +
        scale_x_longitude(xmin=-150, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              plot.background = element_blank(),
              panel.border = element_blank(),
              #legend.direction = "horizontal",
              #legend.position = "bottom",
              strip.text = element_text(size=16, face="bold"),
              strip.background = element_blank(),
              legend.position=c(0.11, 0.32),
              #legend.key.size = unit(1.5,"line"),
              legend.text = element_text(size=14, face="bold"),
              legend.title = element_text(size=18, face="bold"),
              axis.text.x = element_text(size=14, face="bold"),
              axis.text.y =element_text(size=14, face="bold"),
              axis.title.x = element_text(size=16, face="bold"),
              axis.title.y =element_text(size=16, face="bold"),
              plot.title = element_blank())

Project Thined Points and orgnaize as list object.

#Points
locs = cbind(OutBrks.set2@coords[,1], OutBrks.set2@coords[,2])

locs = inla.mesh.map(locs, 
                     projection = "longlat", 
                     inverse = TRUE)

A.obsT = inla.spde.make.A(mesh.dom, 
                          alpha = 2,
                          loc=locs)

A.obs2T = A.obsT

##OB Stack
FE.df2 = OutBrks.set2@data

FE.lst.4 = list(c(field.bwte3,
                  field.bwteXX,
                   field.bwteXXX,
                list(intercept3 = 1)),
                list(Pop = FE.df2[,"Pop"],
                     GlobPoultry = FE.df2[,"GlobPoultry"],
                     #nOutBrk = round(FE.df2[,"nOutBrk"]/100, 0), #No Clustering
                     Lat.disp = FE.df2[,"Lat.dispS"],
                     TStep.wk = FE.df2[,"TStep.wk"]))

OBST.stk = inla.stack(data = list(OutBreak = FE.df2$OBS),
                                 A = list(A.obsT, 1), 
                           effects = FE.lst.4,   
                               tag = "obt.1")

Point-thinned Model

Frm.ob = OutBreak ~ -1 +  intercept3 + 
                        f(field.bwte3, 
                          model = spde0) +
                        f(TStep.wk, Lat.disp,
                           model = "rw1",
                           scale.model = TRUE,
                           constr = TRUE,
                           hyper= LCPrior) +
                       GlobPoultry + Pop
              
              
theta.jnc = c(-1.27082034, 0.32640370, 1.14758049)

Model.OBT = inla(Frm.ob, 
             data = inla.stack.data(OBST.stk, spde=spde0), 
             family = "binomial",
             verbose = TRUE,
             control.fixed = list(prec = 0.1, 
                                  prec.intercept = 0.01), 
             control.predictor = list(
                                    A = inla.stack.A(OBST.stk), 
                                    compute = TRUE, 
                                    link = 1), 
             control.mode = list(restart = TRUE, theta = theta.jnc),
             control.inla = list(strategy="gaussian", 
                                 int.strategy = "eb"),
             control.results = list(return.marginals.random = TRUE,
                                    return.marginals.predictor = TRUE),
             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
summary(Model.OBT)
## 
## Call:
## c("inla(formula = Frm.ob, family = \"binomial\", data = inla.stack.data(OBST.stk, ",  "    spde = spde0), verbose = TRUE, control.compute = list(dic = TRUE, ",  "    cpo = TRUE, waic = TRUE), control.predictor = list(A = inla.stack.A(OBST.stk), ",  "    compute = TRUE, link = 1), control.inla = list(strategy = \"gaussian\", ",  "    int.strategy = \"eb\"), control.results = list(return.marginals.random = TRUE, ",  "    return.marginals.predictor = TRUE), control.fixed = list(prec = 0.1, ",  "    prec.intercept = 0.01), control.mode = list(restart = TRUE, ",  "    theta = theta.jnc))")
## 
## Time used:
##  Pre-processing    Running inla Post-processing           Total 
##          5.0608        213.7746          1.1936        220.0290 
## 
## Fixed effects:
##                mean     sd 0.025quant 0.5quant 0.975quant    mode kld
## intercept3  -7.2657 0.4899    -8.2274  -7.2657    -6.3048 -7.2657   0
## GlobPoultry  0.1287 0.0371     0.0558   0.1287     0.2016  0.1287   0
## Pop          0.0302 0.0125     0.0057   0.0302     0.0546  0.0302   0
## 
## Random effects:
## Name   Model
##  field.bwte3   SPDE2 model 
## TStep.wk   RW1 model 
## 
## Model hyperparameters:
##                          mean     sd 0.025quant 0.5quant 0.975quant   mode
## Range for field.bwte3  0.3666 0.1646     0.1630   0.3278     0.7899 0.2676
## Stdev for field.bwte3  2.3708 0.6387     1.4063   2.2712     3.8935 2.0816
## Precision for TStep.wk 0.8143 0.6490     0.1595   0.6371     2.5309 0.3888
## 
## Expected number of effective parameters(std dev): 42.23(0.00)
## Number of equivalent replicates : 227.28 
## 
## Deviance Information Criterion (DIC) ...............: 506.20
## Deviance Information Criterion (DIC, saturated) ....: 506.19
## Effective number of parameters .....................: 61.46
## 
## Watanabe-Akaike information criterion (WAIC) ...: 480.43
## Effective number of parameters .................: 33.18
## 
## Marginal log-Likelihood:  -312.02 
## CPO and PIT are computed
## 
## Posterior marginals for linear predictor and fitted values computed
GetMets(Model.OBT)
##   Metric   Tier.1
## 1    DIC 506.1951
## 2   WAIC 480.4262
## 3   lCPO   0.0254

Get Random Field (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.OBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.OBt.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)


Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.OBtsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

Compare Thinning Vs. Cluster
Random fields and standard deviations.

CompRFs = stack(Mod.OBc.r, Mod.OBt.r)
names(CompRFs) = c("Cluster", "Thinning")

CompSDs = stack(Mod.OBcsd.r, Mod.OBtsd.r)
names(CompSDs) = c("Cluster", "Thinning")


rng = seq(-1.32, 3.78, 0.001)
mCols  = c("#A50026", "#D73027", "#F46D43", "#FDAE61",  "#ABD9E9", "#74ADD1", "#4575B4","#313695")
cr0 = rev(colorRampPalette(mCols)(n = 2000))
cr = colorRampPalette(c(cr0), 
         bias = 1.8)

Plt.rfs = levelplot(CompRFs, 
               layout=c(2, 1),
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               sub= "Compare Random Fields",
               col.regions = cr, at = rng,
               colorkey = list(
                               space = "bottom"), 
                               par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) 

Plt.rfs

rng = seq(0, 1.87, 0.001)
mCols  = brewer.pal(9, "PuBuGn")#[-c(1:2)]
cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(cr0), 
         bias = 1.4)

Plt.rfs = levelplot(CompSDs, 
               layout=c(2, 1),
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               sub= "Compare Standard Deviation",
               col.regions = cr, at = rng,
               colorkey = list(
                               space = "bottom"), 
                               par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) 

Plt.rfs

7.8 Joint Model (Outbreaks, BWTE duration)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 

Frm.JBi = Y ~ -1 +  intercept1 + #Intercept for residence time model
                    intercept3 + #intercept for outbreaks
                          f(field.bwte1, #spatial field for residence time
                              model = spde0) +
                          f(field.bwte3, #spatial field for outbreaks
                              model = spde0) +
                          f(field.bwteXX, #COPY of spatial field for residence time, shared with Outbreaks
                              copy = "field.bwte1", 
                              fixed = FALSE)  +
                          f(nOutBrk,      #Disease clusters
                    model = "rw1",
                    scale.model = TRUE,
                    hyper = pcprior) +
                          f(TStep.wk, Lat.disp, #Week x Displacement
                             model = "rw1",
                             scale.model = TRUE,
                             constr = TRUE,
                             hyper= LCPrior) +
                        SoilF + TopoF + wDsR + GlobPoultry + Pop #fixed effects

theta.jnc = c(0.7764397, -2.4866705, -0.1443130, -1.2197074, 0.2659467, -1.9934657, 0.4901098, 1.14)

Model.FJGamBi2TBT = inla(Frm.JBi, 
                                 #num.threads = 8,
                                 data = inla.stack.data(Dur2.Jstk, spde=spde0), 
                                 family = c("gamma", "binomial"), 
                                 verbose = TRUE,
                                 E = inla.stack.data(Dur2.Jstk)$e,
                                 control.fixed = list(prec = 0.1, 
                                                      prec.intercept = 0.01), 
                                 control.predictor = list(
                                                             A = inla.stack.A(Dur2.Jstk), 
                                                     compute = TRUE, 
                                                          link = 1), 
                                 control.mode = list(restart = TRUE, theta = theta.jnc),
                                 control.inla = list(strategy="gaussian", 
                                                     int.strategy = "eb"),
                                 control.results = list(return.marginals.random = TRUE,
                                                        return.marginals.predictor = TRUE),
                                 control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.FJGamBi2TBT)
##   Metric    Tier.1   Tier.2
## 1    DIC 1520.9741 713.3523
## 2   WAIC 1535.7986 690.0296
## 3   lCPO    3.6834   0.0359

Get Random Fields (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.FJGamBi2TBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.JD.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)


Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.JDsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

7.9 Joint Model (Outbreaks, BWTE occurrence probability)

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 

Frm.JBi = Y ~ -1 +  intercept1 + 
                    intercept3 +
                          f(field.bwte1, 
                              model = spde0) +
                          f(field.bwte3, 
                              model = spde0) +
                          f(field.bwteXX, 
                              copy = "field.bwte1", 
                              fixed = FALSE)  +
                          f(nOutBrk,
                    model = "rw1",
                    scale.model = TRUE,
                    hyper = pcprior) +
                          f(TStep.wk, Lat.disp,
                             model = "rw1",
                             scale.model = TRUE,
                             constr = TRUE,
                             hyper= LCPrior) +
                        SoilF + TopoF + wDsR + GlobPoultry + Pop
              

theta.jnc = c(-1.527355, 1.479108, -0.2613676, 0.3012231, -2.2324618, 0.3541266, 1.14)

Model.FJBiBi2TBT = inla(Frm.JBi, 
                             #num.threads = 8,
                             data = inla.stack.data(StpOvr2.Jstk, spde=spde0), 
                             family = c("binomial", "binomial"), 
                             verbose = TRUE,
                             control.fixed = list(prec = 0.1, 
                                                  prec.intercept = 0.01), 
                             control.predictor = list(
                                                    A = inla.stack.A(StpOvr2.Jstk), 
                                                    compute = TRUE, 
                                                    link = 1), 
                             control.mode = list(restart = TRUE, theta = theta.jnc),
                             control.inla = list(strategy="gaussian", 
                                                 int.strategy = "eb"),
                             control.results = list(return.marginals.random = TRUE,
                                                    return.marginals.predictor = TRUE),
                             control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE)) 
GetMets(Model.FJBiBi2TBT)
##   Metric    Tier.1   Tier.2
## 1    DIC 1577.7238 708.8757
## 2   WAIC 1455.3290 688.1714
## 3   lCPO    0.1168   0.0359

Get Random Field (plotted later)

Pred.pnts = Grid.pnts
ModResult = Model.FJBiBi2TBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF = drop(Ap %*% ModResult$summary.random$field.bwte3$mean)

Mod.JO.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "RF", 
                    background = NA)

Pred.pnts$SD = drop(Ap %*% ModResult$summary.random$field.bwte3$sd)

Mod.JOsd.r = rasterize(Pred.pnts, 
                    Domain.r, 
                    "SD", 
                    background = NA)

7.10 Non-Spatial Model (Outbreaks)

For comparison and to judge need for spatial effects.

pcprior = list(prec = list(prior="pc.prec", param = c(3, 0.001))) 

Frm.JBi = OutBreak ~ -1 +  intercept3 +
                                  GlobPoultry + Pop  

theta.jnc = c(0.7764397, -2.4866705, -0.1443130, -1.2197074, 0.2659467, -1.9934657, 0.4901098, 1.14)

Model.nsd = inla(Frm.JBi, 
                                 #num.threads = 8,
                                 data = inla.stack.data(OBS.stk, spde=spde0), 
                                 family = "binomial", 
                                 verbose = TRUE,
                                 #E = inla.stack.data(OBS.stk)$e,
                                 control.fixed = list(prec = 0.1, 
                                                      prec.intercept = 0.01), 
                                 control.predictor = list(
                                                             A = inla.stack.A(OBS.stk), 
                                                     compute = TRUE, 
                                                          link = 1), 
                                 #control.mode = list(restart = TRUE, theta = theta.jnc),
                                 control.inla = list(strategy="gaussian", 
                                                     int.strategy = "eb"),
                                 control.results = list(return.marginals.random = TRUE,
                                                        return.marginals.predictor = TRUE),
                                 control.compute=list(dic = TRUE, cpo = TRUE, waic = TRUE))
GetMets(Model.nsd)
##   Metric    Tier.1
## 1    DIC 1360.5809
## 2   WAIC 1364.7510
## 3   lCPO    0.0704

8 Results

8.1 Compare Random Fields

CompRFs = stack(Mod.OB.r, Mod.OBt.r, Mod.OBS.r, Mod.OBc.r, Mod.JD.r,  Mod.JO.r) 
names(CompRFs) = c("Base", "Thinning", "Clustering", "Covariates", "Duration", "Occurrence") 

names(CompRFs) = LETTERS[1:6]


rng = seq(-2, 7.6, 0.001)
mCols  = c("#A50026", "#D73027", "#F46D43", "#FDAE61",  "#ABD9E9", "#74ADD1", "#4575B4","#313695")
cr0 = rev(colorRampPalette(mCols)(n = 2000))
cr = colorRampPalette(c(cr0), 
         bias = 2)

Plt.rfs = levelplot(CompRFs, 
               layout=c(3, 2),
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               #sub= "Compare Random Fields",
               col.regions = cr, at = rng,
               colorkey = list(
                               space = "bottom"), 
                               par.strip.text = list(fontface='bold', cex=1.25),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.75)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) 

Plt.rfs

8.2 Compare Standard Deviations

CompSDs = stack(Mod.OBsd.r, Mod.OBtsd.r, Mod.OBSsd.r, Mod.OBcsd.r, Mod.JDsd.r, Mod.JOsd.r)
names(CompSDs) = c("Model1", "thinned", "clustering", "covariates", "Duration", "Occurrence")

names(CompSDs) = LETTERS[1:6]


rng = seq(0, 2.8, 0.001)
mCols  = brewer.pal(9, "PuBuGn")#[-c(1:2)]
cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(cr0), 
         bias = 1.2)

Plt.rfs = levelplot(CompSDs, 
               layout=c(3, 2),
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               #sub= "Compare Standard Deviation",
               col.regions = cr, at = rng,
               colorkey = list(
                               space = "bottom"), 
                               par.strip.text = list(fontface='bold', cex=1.25),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.75)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) 

Plt.rfs

Compare Parsimony (Outbreaks)

Models = c("Model1", "Model2",
           "Model3", "Model4",
           "Model5", "Model6",
           "Model7")

Type = c("Space Only",
         "Space + Clustering",
         "Space + Time",
         "Space + Time + Covariates",
         "Joint Model (Duration)",
         "Joint Model (Occurrence)",
         "Non-Spatial + Covariates")

#Deviance information criteria
DICs = c(GetMets(Model.OB)[1,"Tier.1"],
         GetMets(Model.OBS)[1,"Tier.1"],
         GetMets(Model.OBst)[1,"Tier.1"],
         GetMets(Model.OBcov)[1,"Tier.1"],
         GetMets(Model.FJGamBi2TBT)[1,"Tier.2"],
         GetMets(Model.FJBiBi2TBT)[1,"Tier.2"],
         GetMets(Model.nsd)[1,"Tier.1"])

#Watanabe-Akaike information criteria
WAICs = c(GetMets(Model.OB)[2,"Tier.1"],
          GetMets(Model.OBS)[2,"Tier.1"],
          GetMets(Model.OBst)[2,"Tier.1"],
          GetMets(Model.OBcov)[2,"Tier.1"],
          GetMets(Model.FJGamBi2TBT)[2,"Tier.2"],
          GetMets(Model.FJBiBi2TBT)[2,"Tier.2"],
          GetMets(Model.nsd)[2,"Tier.1"])

#log-Conditional Predictive Ordinate
lCPOs = c(GetMets(Model.OB)[3,"Tier.1"],
          GetMets(Model.OBS)[3,"Tier.1"],
          GetMets(Model.OBst)[3,"Tier.1"],
          GetMets(Model.OBcov)[3,"Tier.1"],
          GetMets(Model.FJGamBi2TBT)[3,"Tier.2"],
          GetMets(Model.FJBiBi2TBT)[3,"Tier.2"],
          GetMets(Model.nsd)[3,"Tier.1"])

Model_mets = as.data.frame(cbind(Model = Models,
                                 DIC = round(DICs, 2),
                                 WAIC = round(WAICs, 2),
                                 lCPO = round(lCPOs, 3),
                                 Type = Type))
kable(Model_mets) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) 
Model DIC WAIC lCPO Type
Model1 1075.2 979.85 0.076 Space Only
Model2 836.23 817.4 0.043 Space + Clustering
Model3 730.16 709.9 0.037 Space + Time
Model4 729.1 709.57 0.037 Space + Time + Covariates
Model5 713.35 690.03 0.036 Joint Model (Duration)
Model6 708.88 688.17 0.036 Joint Model (Occurrence)
Model7 1360.58 1364.75 0.07 Non-Spatial + Covariates

8.3 Compare Parsimony (BWTE)

Models = c("Model8", "Model9",
           "Model5", "Model6")

Type = c("Duration (base)",
         "Occurrence (base)",
         "Duration (joint)",
         "Occurrence (joint)")

#Deviance information criteria
DICs = c(GetMets(Model.dur)[1,"Tier.1"],
         GetMets(Model.occur)[1,"Tier.1"],
         GetMets(Model.FJGamBi2TBT)[1,"Tier.1"],
         GetMets(Model.FJBiBi2TBT)[1,"Tier.1"])

#Watanabe-Akaike information criteria
WAICs = c(GetMets(Model.dur)[2,"Tier.1"],
         GetMets(Model.occur)[2,"Tier.1"],
         GetMets(Model.FJGamBi2TBT)[2,"Tier.1"],
         GetMets(Model.FJBiBi2TBT)[2,"Tier.1"])

Model_mets2 = as.data.frame(cbind(Model = Models,
                                 DIC = round(DICs, 2),
                                 WAIC = round(WAICs, 2),
                                 Type = Type))
kable(Model_mets2) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) 
Model DIC WAIC Type
Model8 1526.23 1540.95 Duration (base)
Model9 1642.21 1515.85 Occurrence (base)
Model5 1520.97 1535.8 Duration (joint)
Model6 1577.72 1455.33 Occurrence (joint)

Get Model Effect Coefficents (Joint Models)

DFixed.df = Model.FJGamBi2TBT$summary.fixed[,c(1:3,5)]
names(DFixed.df) = c("Mean", "SD", "Q025", "Q975")
rDFixed.df = Model.FJGamBi2TBT$summary.hyperpar[,c(1:3,5)]
names(rDFixed.df) = c("Mean", "SD", "Q025", "Q975")
rownames(rDFixed.df) = c("pGamma", rownames(rDFixed.df[2:8,]))
Dur.effects = rbind(DFixed.df, rDFixed.df)

OFixed.df = Model.FJBiBi2TBT$summary.fixed[,c(1:3,5)]
names(OFixed.df) = c("Mean", "SD", "Q025", "Q975")
rOFixed.df = Model.FJBiBi2TBT$summary.hyperpar[,c(1:3,5)]
names(rOFixed.df) = c("Mean", "SD", "Q025", "Q975")
Occur.effects = rbind(OFixed.df, rOFixed.df)

8.4 Joint Model Effect Coefficients (Duration)

kable(Dur.effects) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Mean SD Q025 Q975
intercept1 1.4853994 0.2276490 1.0384475 1.9319782
intercept3 -8.4204028 1.1879282 -10.7527064 -6.0900456
SoilF 0.1476417 0.0673379 0.0154348 0.2797384
TopoF 0.1574223 0.1205692 -0.0792958 0.3939428
wDsR -0.0216481 0.0072584 -0.0358987 -0.0074094
GlobPoultry 0.1267750 0.0347257 0.0585966 0.1948964
Pop 0.0411023 0.0104055 0.0206728 0.0615148
pGamma 2.2243929 0.2757195 1.7219572 2.8038226
Range for field.bwte1 0.0825605 0.0227426 0.0490268 0.1372418
Stdev for field.bwte1 0.8809553 0.1392739 0.6443230 1.1910630
Range for field.bwte3 0.3478694 0.1539002 0.1542319 0.7426735
Stdev for field.bwte3 1.7733306 0.5605311 0.9646065 3.1429369
Precision for nOutBrk 0.1556677 0.0631639 0.0655855 0.3101663
Precision for TStep.wk 0.6552436 0.2503287 0.2900578 1.2601415
Beta for field.bwteXX 0.6847041 0.2420878 0.2070036 1.1598275
#SelMod.fx = xtable(Dur.effects)
#print(SelMod.fx, include.rownames = T)

8.5 Joint Model Effect Coefficients (Occurrence Probability)

kable(Occur.effects) %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Mean SD Q025 Q975
intercept1 -6.2949322 0.6210179 -7.5141998 -5.0766822
intercept3 -8.3553562 1.1888500 -10.6894695 -6.0231907
SoilF 0.2691977 0.1390869 -0.0038769 0.5420443
TopoF 0.5516549 0.1862255 0.1860315 0.9169732
wDsR -0.0312681 0.0098996 -0.0507043 -0.0118482
GlobPoultry 0.1236982 0.0340253 0.0568951 0.1904456
Pop 0.0406810 0.0102301 0.0205959 0.0607493
Range for field.bwte1 0.1499333 0.0395401 0.0930040 0.2462927
Stdev for field.bwte1 3.8852475 0.6358966 2.8205773 5.3142841
Range for field.bwte3 0.3855132 0.1869489 0.1569028 0.8674847
Stdev for field.bwte3 1.5640894 0.4953134 0.8304287 2.7580643
Precision for nOutBrk 0.1527814 0.0621050 0.0650043 0.3052690
Precision for TStep.wk 0.6493483 0.2483023 0.2877627 1.2500930
Beta for field.bwteXX 0.2522038 0.0934599 0.0656414 0.4334678
#SelMod.fx = xtable(Occur.effects)
#print(SelMod.fx, include.rownames = T)

8.6 Distance to Outbreak

Plt.df = as.data.frame(Model.FJBiBi2TBT$summary.random$nOutBrk)
names(Plt.df) = c("ID", "Mean", "sd", "Q025", "Q50", "Q975")

FindZero(Plt.df$Q025, Plt.df$ID)*100
## [1] 50
FindZero(Plt.df$Mean, Plt.df$ID)*100
## [1]  550 2050 3850
ggplot(Plt.df, aes(ID*100, Mean)) +
        geom_smooth(col = "black", 
                  linetype= "solid",
                  method = "loess",
                  span = 0.7,
                  se = FALSE,
                  lwd = 1) +
        geom_smooth(data = Plt.df, aes(ID*100, Q025), 
                    col = "grey", 
                    method = "loess",
                    span = 0.7,
                    se = FALSE,
                    linetype= "dashed") +
        geom_smooth(data = Plt.df, aes(ID*100, Q975), 
                    col = "grey", 
                    method = "loess",
                    span = 0.7,
                    se = FALSE,
                    linetype= "dashed") +
        geom_hline(yintercept = 0, 
                   linetype = "solid",
                   col = "lightgray",
                   size = 0.5) +  
        geom_vline(xintercept = 0, 
                   linetype = "solid",
                   col = "lightgray",
                   size = 0.5) +
        xlim(0, 800) +
        ylim(-5, 10) +
        xlab("Distance to Neighboring Event (km)") +
        ylab("AI Event Probability (logit)") +  
        theme_classic() +
        theme(axis.text=element_text(size=16),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(face="bold", size = 20),
              axis.title.x = element_text(face="bold", size = 20, vjust=-2))

8.7 Time Effect

Plt.df = as.data.frame(Model.FJBiBi2TBT$summary.random$TStep.wk)
names(Plt.df) = c("ID", "Mean", "sd", "Q025", "Q50", "Q975")

ggplot(Plt.df, aes(ID, Mean)) +
        geom_smooth(col = "black", 
                  linetype= "solid",
                  method = "loess",
                  span = 0.3,
                  se = FALSE,
                  lwd = 1) +
        geom_smooth(data = Plt.df, aes(ID, Q025), 
                    col = "grey", 
                    method = "loess",
                    span = 0.3,
                    se = FALSE,
                    linetype= "dashed") +
        geom_smooth(data = Plt.df, aes(ID, Q975), 
                    col = "grey", 
                    method = "loess",
                    span = 0.3,
                    se = FALSE,
                    linetype= "dashed") +
        geom_hline(yintercept = 0, 
                   linetype = "solid",
                   col = "lightgray",
                   size = 0.5) +  
        geom_vline(xintercept = 0, 
                   linetype = "solid",
                   col = "lightgray",
                   size = 0.5) +
        xlim(0, 52) +
        ylim(-5,5) +
        xlab("Time (Week of Year)") +
        ylab("AI Event Probability (logit)") +  
        theme_classic() +
        theme(axis.text=element_text(size=16),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(face="bold", size = 20),
              axis.title.x = element_text(face="bold", size = 20, vjust=-2))

8.8 Time Effect Over Latitudinal Displacement

OutBrks.set2T = train

Lat.frame = OutBrks.set2T %>%
                mutate(Week = TStep.wk) %>%
                group_by(Week) %>%
                summarise(Events = sum(OBS))

Mx.val = 40

My.lat.lu2$Disp = Scale(My.lat.lu2$delta)*Mx.val

ggplot(Lat.frame, 
       aes(Week, Events)) + 
       geom_bar(stat="identity", 
                 col = "darkgray",
                 fill = "darkgray") +
       geom_hline(yintercept = 20, 
                   linetype = "solid",
                   col = "lightgray",
                   size = 0.5) + 
       geom_line(data=My.lat.lu2,
                 aes(LocWKs, Disp),
                 col = "black",
                 lwd=1) +
        scale_y_continuous(sec.axis = sec_axis(~.*1/Mx.val*40, 
                           name = "Count of AI Events"),
                           breaks = seq(0, 40, 10), 
                           limits = c(0, 40),
                           labels = c("-4", "-2", "0", "2", "4")) + 
       xlab("Time   (Week of Year)") +
        ylab(expression("Latitudinal Displacement ("~degree~"Latitude)")) +  
        theme_classic() +
        theme(axis.text.x = element_text(face="bold", size=12),
              axis.text.y = element_text(face="bold", size=14),
              strip.text = element_text(face="bold", size = 20),
              axis.title.y = element_text(size = 20),
              axis.title.x = element_text(size = 20, vjust=-2))

8.9 Duration (Residence Time)

Estimated residence time from joint model.

Pred.pnts = Grid.pnts
ModResult = Model.FJGamBi2TBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF1 = drop(Ap %*% ModResult$summary.random$field.bwte1$mean)

mydf = ModResult$summary.fix

Pred.pnts$Int1 = ModResult$summary.fix[1,1]

Pred.pnts@data = Pred.pnts@data %>%
            mutate(LP = Int1 + RF1 +
                        SoilF*mydf[3,1] +
                        TopoF*mydf[4,1] +
                        wDsR *mydf[5,1], 
              T1.dur = exp(LP)) 

T1.dur.r = rasterize(Pred.pnts, 
                  Domain.r, 
                  "T1.dur", 
                  background = NA)

#View
plt.dur = T1.dur.r
plt.dur = disaggregate(T1.dur.r, fact=2, method="bilinear")

StpOvr.r = plt.dur
StpOvr.r[StpOvr.r < 4] = 0
StpOvr.r[StpOvr.r > 5] = 0
StpOvr.r[StpOvr.r > 0] = 1

plt.dur[plt.dur < 2] = 0
plt.dur[plt.dur >= 20] = 25

breaks = seq(0, 25, 0.5)
rc = cut(plt.dur, breaks=breaks)

rng = seq(0, 50, 0.1)
mCols  = rev(cividis(10))
mCols[1:3] = "#FFEA46FF"

#mCols  = brewer.pal(9, "YlOrRd")[2:9]
cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(cr0), 
         bias = 1.3)

Plt.a = levelplot(rc, 
               #layout=c(2, 1),
               maxpixels = 1e6,
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               main = "Estimated Duration (Residence time)",
               col.regions = cr, at = rng,
               colorkey =list(labels=list(at=c(0.5, 10, 20, 30, 40, 48.5),  
                             labels=c("1 Day", "5", "10", "15", "20", "25+ Days"), cex=1),
                             labels=list(cex=12),
                             space = "bottom"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Lakes, col = "black", fill = "white", lwd = 1))

Plt.a

8.10 Occurrence Probability

Estimated occurrence probabilty from joint model.

Pred.pnts = Grid.pnts
ModResult = Model.FJBiBi2TBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)

Pred.pnts$RF1 = drop(Ap %*% ModResult$summary.random$field.bwte1$mean)

mydf = ModResult$summary.fix

Pred.pnts$Int1 = ModResult$summary.fix[1,1]

Pred.pnts@data = Pred.pnts@data %>%
            mutate(LP = #Int1 + RF1 +
                        SoilF*mydf[3,1] +
                        TopoF*mydf[4,1] +
                        wDsR *mydf[5,1], 
              T1.occ = plogis(LP)) 


T1.occ.r = rasterize(Pred.pnts, 
                  Domain.r, 
                  "T1.occ", 
                  background = NA)

#View
plt.occ = disaggregate(T1.occ.r, fact=2, method="bilinear")

rng = seq(0, 1, 0.01)
mCols  = rev(viridis(10))[-c(5:6)]
mCols[1:2] = "#FDE725FF"

cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(cr0), 
         bias = 0.8)

Plt.a = levelplot(plt.occ, 
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               maxpixels = 1e5,
               main = "Occurrence Probability",
               col.regions = cr, at = rng,
               colorkey = list(labels=list(at=c(0, 0.25, 0.50, 0.75, 1),  
                             labels=c("0%", "25%", "50%", "75%", "100%"), cex=1),
                             labels=list(cex=12),
                             space = "bottom"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Lakes, col = "black", fill = "white", lwd = 1))

Plt.a

8.11 Risk Zones

Rsk.zones = raster("./RiskZones18.tif")

Rsk.zones[Rsk.zones <= 10] = NA

values(Rsk.zones) = Scale(values(Rsk.zones))

Rsk.zones2 = rasterToPoints(Rsk.zones, spatial = T)

Zone.map.r = rasterize(Rsk.zones2, 
                  Domain.r, 
                  "RiskZones18", 
                  background = NA)

Zone.map.r[is.na(Zone.map.r)] = 0

Zone.map.r = Zone.map.r + Domain.r

Lakes = readOGR(dsn = "./Arc/Lakes", 
                layer = "Lakes", 
                stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\Lakes", layer: "Lakes"
## with 1 features
## It has 11 fields
## Integer64 fields read as strings:  OBJECTID gid water sqmi sqkm
Lakes = spTransform(Lakes, proj4string(NAmer.reg))


rng = seq(0, 1, 0.01)
mCols  = rev(inferno(10))
mCols[1] = "lightgray"
cr = colorRampPalette(c(mCols), 
         bias = 1)

Plt.F = levelplot(Zone.map.r, 
               #layout=c(2, 1),
               maxpixels = 1e5,
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               main = "Risk (Week 18)",
               col.regions = cr, at = rng,
               colorkey = list(labels=list(at=c(0, 0.25, 0.50, 0.75, 1),  
                                 labels=c("0%", "25%", "50%", "75%", "100%"), cex=1),
                                 labels=list(cex=12),
                                 space = "bottom"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "black", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Lakes, col = "black", fill = "darkgray", lwd = 1))

Plt.F

Get States

States = map("state", 
            fill = TRUE,
            plot = FALSE)

IDs = sapply(strsplit(States$names, ":"),
             function(x) x[1])

LL84 = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

States = map2SpatialPolygons(States, IDs = IDs,
                              proj4string = CRS(LL84))

#Add a dataframe  
pid = sapply(slot(States, "polygons"), 
             function(x) slot(x, "ID"))

p.df = data.frame(ID=1:length(States), 
                  row.names = pid)

States = SpatialPolygonsDataFrame(States, p.df)



##U.S. County Boundaries
Counties = map("county", 
            fill = TRUE,
            plot = FALSE)

IDs = sapply(strsplit(Counties$names, ":"),
             function(x) x[1])

LL84 = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84 +towgs84=0,0,0"

Counties = map2SpatialPolygons(Counties, IDs = IDs,
                              proj4string = CRS(LL84))

#Add a dataframe  
pid = sapply(slot(Counties, "polygons"), 
             function(x) slot(x, "ID"))

p.df = data.frame(ID=1:length(Counties), 
                  row.names = pid)

Counties = SpatialPolygonsDataFrame(Counties, p.df)

Counties = gBuffer(Counties, width = 0, byid=T)

Great Lakes Region (Week 18)

MyStates = subset(States, ID == 48 | ID == 34 | ID == 21 | ID == 12 | ID == 13)
MyStates = gBuffer(MyStates, width = 0, byid=T)

MyCounties = crop(Counties, MyStates)

State.r = raster(res=0.3)
extent(State.r) = extent(MyStates)

MyStates.r = rasterize(MyStates,
                     State.r,
                     field = 0,
                     background = NA)

Zone.map.rZ = crop(Zone.map.r, MyStates.r)
Zone.map.rZ = resample(Zone.map.rZ, MyStates.r)

Zone.map.rZ = Zone.map.rZ + MyStates.r


AI_Events.pnt$Week18 = extract(Zone.map.r, AI_Events.pnt, method = "simple")
AI_Events.pnt$MyStates = extract(Zone.map.rZ, AI_Events.pnt, method="simple")
BWTE18 = subset(AI_Events.pnt, is.na(MyStates) == FALSE)

MyStatesp = spTransform(MyStates, nProj)
MyCountiesp = spTransform(MyCounties, nProj)
Lakesp = spTransform(Lakes, nProj)
Zone.map.rZ = projectRaster(Zone.map.rZ, crs =proj4string(MyCountiesp))

rng = seq(0, 1, 0.01)
mCols  = rev(inferno(10))
mCols[1] = "lightgray"
cr = colorRampPalette(c(mCols), 
         bias = 0.75)

MySeqLabs = c("100"," ", "300", " ", "500")

Plt.F2 = levelplot(Zone.map.rZ, 
               maxpixels = 1e5,
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               main = "Great Lakes Region (Week 18)",
               col.regions = cr, at = rng,
               colorkey = list(labels=list(at=c(0, 0.25, 0.50, 0.75, 1),  
                                 labels=c("0%", "25%", "50%", "75%", "100%"), cex=1),
                                 labels=list(cex=12),
                                 space = "right"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(MyCountiesp, col = "darkgray", lwd = 1)) +
               latticeExtra::layer(sp.polygons(MyStatesp, col = "black", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Lakesp, col = "black", fill = "darkgray", lwd = 1)) +
               latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                offset = c(-10320,5050),
                                 scale = 350)}) +
               latticeExtra::layer({xs = seq(-9550, -9150, by=100)
                                 grid.rect(x=xs, y=4490,
                                 width=100, height=30,
                                 gp=gpar(fill=rep(c('transparent', 'black'), 2)),
                                 default.units='native')
                                 grid.text(x= xs + 50, y=4524, MySeqLabs,
                                 gp=gpar(cex=0.8), rot=0,
                                 default.units='native')}) +
               latticeExtra::layer(sp.text(c(-9055, 4490), txt = "km", 
                                  col="black",font=list(face="bold"), cex=1.5))

Plt.F2

Southeast Region (Week 18)

MyStates = subset(States, ID == 1 | ID == 10 | ID == 39 | ID == 41 | ID == 32 | ID == 23)
MyStates = gBuffer(MyStates, width = 0, byid=T)

MyCounties = crop(Counties, MyStates)

State.r = raster(res=0.3)
extent(State.r) = extent(MyStates)

MyStates.r = rasterize(MyStates,
                     State.r,
                     field = 0,
                     background = NA)

Zone.map.rZ = crop(Zone.map.r, MyStates.r)
Zone.map.rZ = resample(Zone.map.rZ, MyStates.r)

Zone.map.rZ = Zone.map.rZ + MyStates.r


MyStatesp = spTransform(MyStates, nProj)
MyCountiesp = spTransform(MyCounties, nProj)
Zone.map.rZ = projectRaster(Zone.map.rZ, crs =proj4string(MyCountiesp))


rng = seq(0, 1, 0.01)
mCols  = rev(inferno(10))
mCols[1] = "lightgray"
cr = colorRampPalette(c(mCols), 
         bias = 0.75)

MySeqLabs = c("100"," ", "300", " ", "500")

Plt.F2 = levelplot(Zone.map.rZ, 
               maxpixels = 1e5,
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               main = "Southeast Region (Week 18)",
               col.regions = cr, at = rng,
               colorkey = list(labels=list(at=c(0, 0.25, 0.50, 0.75, 1),  
                                 labels=c("0%", "25%", "50%", "75%", "100%"), cex=1),
                                 labels=list(cex=12),
                                 space = "right"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(MyCountiesp, col = "darkgray", lwd = 1)) +
               latticeExtra::layer(sp.polygons(MyStatesp, col = "black", lwd = 1)) +
               latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                offset = c(-8700,3680),
                                 scale = 300)}) +
               latticeExtra::layer({xs = seq(-8950, -8550, by=100)
                                 grid.rect(x=xs, y=3600,
                                 width=100, height=30,
                                 gp=gpar(fill=rep(c('transparent', 'black'), 2)),
                                 default.units='native')
                                 grid.text(x= xs + 50, y=3629, MySeqLabs,
                                 gp=gpar(cex=0.8), rot=0,
                                 default.units='native')}) +
               latticeExtra::layer(sp.text(c(-8460, 3598), txt = "km", 
                                  col="black",font=list(face="bold"), cex=1.5))

Plt.F2

Mexico (Week 3)

Rsk.zones = raster("./RiskZones03.tif")

values(Rsk.zones) = Scale(values(Rsk.zones))

Rsk.zones2 = rasterToPoints(Rsk.zones, spatial = T)

Zone.map.r = rasterize(Rsk.zones2, 
                  Domain.r, 
                  "RiskZones03", 
                  background = NA)

Zone.map.r[is.na(Zone.map.r)] = 0

Zone.map.r = Zone.map.r + Domain.r

Mex.states = readOGR(dsn = "./Arc/Mexico", 
                    layer = "MEX_adm1", 
                    stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\Mexico", layer: "MEX_adm1"
## with 32 features
## It has 9 fields
## Integer64 fields read as strings:  ID_0 ID_1
Mex.counties = readOGR(dsn = "./Arc/Mexico", 
                    layer = "MEX_adm2", 
                    stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\Arc\Mexico", layer: "MEX_adm2"
## with 1853 features
## It has 11 fields
## Integer64 fields read as strings:  ID_0 ID_1 ID_2
Mex.states = gBuffer(Mex.states, width = 0, byid=T)
Mex.counties = gBuffer(Mex.counties, width = 0, byid=T)

Mex.statesp = spTransform(Mex.states, nProj)
Mex.countiesp = spTransform(Mex.counties, nProj)

MState.r = raster(res=0.3)
extent(MState.r) = extent(Mex.states)

MxStates.r = rasterize(Mex.states,
                       MState.r,
                       field = 0,
                       background = NA)

MZone.map.rZ = crop(Zone.map.r, MxStates.r)
MZone.map.rZ = resample(MZone.map.rZ, MxStates.r)

MZone.map.rZ = MZone.map.rZ + MxStates.r

MZone.map.rZ = projectRaster(MZone.map.rZ, crs = nProj)


rng = seq(0, 1, 0.01)
mCols  = rev(inferno(10))
mCols[1] = "lightgray"
cr = colorRampPalette(c(mCols), 
         bias = 0.75)

MySeqLabs = c("100"," ", "200", " ", "400"," ", "600", " ", "800", " ", "1000")

Plt.F2 = levelplot(MZone.map.rZ, 
               maxpixels = 1e5,
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               main = "Mexico (Week 3)",
               col.regions = cr, at = rng,
               colorkey = list(labels=list(at=c(0, 0.25, 0.50, 0.75, 1),  
                                 labels=c("0%", "25%", "50%", "75%", "100%"), cex=1),
                                 labels=list(cex=12),
                                 space = "right"), 
               par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(Mex.countiesp, col = "darkgray", lwd = 0.2)) +
               latticeExtra::layer(sp.polygons(Mex.statesp, col = "black", lwd = 1)) +
               latticeExtra::layer({SpatialPolygonsRescale(layout.north.arrow(),
                                offset = c(-10500,2550),
                                 scale = 600)}) +
               latticeExtra::layer({xs = seq(-13000, -12000, by=100)
                                 grid.rect(x=xs, y=1945,
                                 width=100, height=50,
                                 gp=gpar(fill=rep(c('transparent', 'black'), 2)),
                                 default.units='native')
                                 grid.text(x= xs + 50, y=2000, MySeqLabs,
                                 gp=gpar(cex=0.8), rot=0,
                                 default.units='native')}) +
               latticeExtra::layer(sp.text(c(-12900, 1875), txt = "Scale (km)", 
                                  col="black",font=list(face="bold"), cex=1))

Plt.F2

9 Model Validation

9.1 Predictive Accuracy

Valid1 = test %>%
         select(Long, Lat, OBS, TStep.wk, nOutBrk, SoilF, TopoF, wDsR, GlobPoultry, Pop, AI)

##Random Sample
set.seed(1976)
RandSamp = sample_n(Node.set@data, dim(test)[1], replace = TRUE)

RandSamp = RandSamp %>%
           select(Long, Lat, OBS, TStep.wk, nOutBrk, SoilF, TopoF, wDsR, GlobPoultry, Pop, AI)
###

Pred.pnts = rbind(Valid1, RandSamp)
Pred.pnts = SpatialPointsDataFrame(Pred.pnts[,c("Long","Lat")], Pred.pnts)
proj4string(Pred.pnts) = proj4string(Valid.set)

ModResult = Model.FJGamBi2TBT

pLoc = cbind(Pred.pnts$Long, Pred.pnts$Lat)

pLoc = inla.mesh.map(pLoc, 
                     projection = "longlat", 
                     inverse = TRUE)

Ap = inla.spde.make.A(ModMesh, loc=pLoc)
mydf = ModResult$summary.fix

Pred.pnts$Int1 = ModResult$summary.fix[1,1]
Pred.pnts$Int2 = ModResult$summary.fix[2,1]


#Clustering
LuTable = ModResult$summary.random$nOutBrk
LuTable$POS = 1:dim(LuTable)[1]
          
Pred.pnts$RW = sapply(Pred.pnts$nOutBrk, function(x)which.min(abs(x - LuTable$ID)))

Pred.pnts$nObrk.ef = as.numeric(with(LuTable,
                              mean[match(Pred.pnts$RW, 
                                                   POS)]))

Pred.pnts$Week.effectP = ModResult$summary.random$TStep.wk$mean[Pred.pnts$TStep.wk]
Pred.pnts = subset(Pred.pnts, is.na(Week.effectP) == FALSE)

Pred.pnts@data = Pred.pnts@data %>%
            mutate(LP = #Int1 + 
                      # Int2 + 
                       Week.effectP +
                        nObrk.ef +
                       #RF1 + 
                       # RF3 +
                        #RFX +
                        SoilF*mydf[3,1] +
                        TopoF*mydf[4,1] +
                        wDsR*mydf[5,1] + 
                        GlobPoultry*mydf[6,1] + 
                        Pop*mydf[7,1], 
            Mod.Fit = plogis(LP)) 


Pred.pnts$Timep1 = plogis(Pred.pnts$Week.effectP)

Pred.pnts$Cluster1 = plogis(Pred.pnts$nObrk.ef)



####
ModResult = Model.FJBiBi2TBT

mydf = ModResult$summary.fix

Pred.pnts$Int1 = ModResult$summary.fix[1,1]
Pred.pnts$Int2 = ModResult$summary.fix[2,1]


#Clustering
LuTable = ModResult$summary.random$nOutBrk
LuTable$POS = 1:dim(LuTable)[1]
          
Pred.pnts$RW = sapply(Pred.pnts$nOutBrk, function(x)which.min(abs(x - LuTable$ID)))

Pred.pnts$nObrk.ef2 = as.numeric(with(LuTable,
                              mean[match(Pred.pnts$RW, 
                                                   POS)]))

Pred.pnts$Week.effectP2 = ModResult$summary.random$TStep.wk$mean[Pred.pnts$TStep.wk]

Pred.pnts@data = Pred.pnts@data %>%
            mutate(LP = #Int1 + 
                      # Int2 + 
                       Week.effectP2 +
                       nObrk.ef2 +
                       #RF1 + 
                       # RF3 +
                        #RFX +
                        SoilF*mydf[3,1] +
                        TopoF*mydf[4,1] +
                        wDsR*mydf[5,1] + 
                        GlobPoultry*mydf[6,1] + 
                        Pop*mydf[7,1], 
            Mod.Fit2 = plogis(LP)) 


Pred.pnts$Mod.Fit3 = (Pred.pnts$Mod.Fit + Pred.pnts$Mod.Fit2)/2


Pred.pnts$Timep2 = plogis(Pred.pnts$Week.effectP2)

Pred.pnts$Cluster2 = plogis(Pred.pnts$nObrk.ef2)

Pred.pnts$Timep3 = (Pred.pnts$Timep1 + Pred.pnts$Timep2)/2
Pred.pnts$Cluster3 = (Pred.pnts$Cluster1 + Pred.pnts$Cluster2)/2


Pred.pnts$ID = 1:nrow(Pred.pnts@data)


###Eval
NSMod.df = as.data.frame(cbind(Pred.pnts$ID, Pred.pnts$OBS, Pred.pnts$Mod.Fit3))
library(PresenceAbsence)
## 
## Attaching package: 'PresenceAbsence'
## The following object is masked from 'package:spatstat':
## 
##     auc
MaxPrev = optimal.thresholds(NSMod.df, opt.methods = "PredPrev=Obs")[1,2]
MaxSens = optimal.thresholds(NSMod.df, opt.methods = "MaxSens+Spec")[1,2]
MaxPrev
## [1] 0.535
MaxSens
## [1] 0.74
NSMod.out = presence.absence.accuracy(NSMod.df, threshold = MaxPrev)
NSMod.out2 = presence.absence.accuracy(NSMod.df, threshold = MaxSens)
NSMod.out3 = presence.absence.accuracy(NSMod.df, threshold = 0.5)

NSMod.out = rbind(NSMod.out3, NSMod.out, NSMod.out2)

#Calculate TSS
NSMod.out$TSS = NSMod.out$sensitivity + NSMod.out$specificity - 1
NSMod.out$MSE = mean((plogis(Pred.pnts$Mod.Fit) - Pred.pnts$OBS)^2)

NSMod.out.df = as.data.frame(
                NSMod.out %>%
                  select(threshold, PCC, sensitivity, specificity, AUC, TSS))

kable(NSMod.out.df, 
      caption = "Model validation summary") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Model validation summary
threshold PCC sensitivity specificity AUC TSS
0.500 0.9007092 0.9166667 0.8840580 0.9746377 0.8007246
0.535 0.9007092 0.9027778 0.8985507 0.9746377 0.8013285
0.740 0.9219858 0.8888889 0.9565217 0.9746377 0.8454106
#SelMod.fx = xtable(NSMod.out.df) #Print for ms
#print(SelMod.fx, include.rownames = F)

9.2 Virus Subtype

Strain.df = Pred.pnts@data %>% filter(OBS == 1)

Strain.df$H7N3 = grepl("H7N3", Strain.df$AI)
Strain.df$H7N9 = grepl("H7N9", Strain.df$AI)
Strain.df$H5N1 = grepl("H5N1", Strain.df$AI)
Strain.df$H5N2 = grepl("H5N2", Strain.df$AI)
Strain.df$H5N8 = grepl("H5N8", Strain.df$AI)
Strain.df$H5 = grepl("H5", Strain.df$AI)
Strain.df$H7 = grepl("H7", Strain.df$AI)

Strain.df$SubType = ifelse(Strain.df$H7N3 == TRUE, "H7N3",
                           ifelse(Strain.df$H7N9 == TRUE, "H7N9",
                                ifelse(Strain.df$H5N1 == TRUE, "H5N1",
                                    ifelse(Strain.df$H5N2 == TRUE, "H5N2",
                                        ifelse(Strain.df$H5N8 == TRUE, "H5N8",
                                              ifelse(Strain.df$AI == "H7N8 LPAI", "H7N8 LPAI",
                                                     ifelse(Strain.df$AI == "EA/AM-H5N2", "H5N2 HPAI",
                                                            NA)))))))

Strain.df$AI[Strain.df$AI == "EA/AM-H5N2"] = "H5N2 HPAI"

Strain.df %>%
  group_by(AI) %>%
  summarise(Count = length(SubType),
            Mean = mean(Mod.Fit3),
            Cluster = mean(Cluster3),
            TimeP = mean(Timep3))
## # A tibble: 6 x 5
##   AI        Count  Mean Cluster TimeP
##   <chr>     <int> <dbl>   <dbl> <dbl>
## 1 H5N1 HPAI     1 1.000   0.990 0.855
## 2 H5N2 HPAI    40 0.967   0.897 0.822
## 3 H5N2 LPAI     1 0.516   0.138 0.518
## 4 H7N3 HPAI    27 0.813   0.741 0.735
## 5 H7N8 LPAI     2 0.917   0.657 0.843
## 6 H7N9 HPAI     1 0.838   0.375 0.879
AI.lvls = levels(factor(Strain.df$AI))

for(i in 1:length(AI.lvls)){
  
      tmp.ai = Strain.df %>% filter(AI == AI.lvls[i])
      
      if(dim(tmp.ai)[1] < 2){ upper = c(tmp.ai$Mod.Fit3)
                              lower = mean = upper
                              tmp.ciF = c(upper, mean, lower)
                         
                              upper = c(tmp.ai$Timep3)
                              lower = mean = upper
                              tmp.ciT = c(upper, mean, lower)
                           
                              upper = c(tmp.ai$Cluster3)
                              lower = mean = upper
                              tmp.ciC = c(upper, mean, lower)
                              
                              my.stack = as.data.frame(rbind(tmp.ciF, tmp.ciT, tmp.ciC))
                              my.stack$AI =  AI.lvls[i]
                              
                              names(my.stack) = c("upper", "mean", "lower", "AI")}
      
          else{tmp.ciF = Rmisc::CI(tmp.ai$Mod.Fit3)
               tmp.ciT = Rmisc::CI(tmp.ai$Timep3)
               tmp.ciC = Rmisc::CI(tmp.ai$Cluster3)
               my.stack = as.data.frame(rbind(tmp.ciF, tmp.ciT, tmp.ciC))
               my.stack$AI =  AI.lvls[i]}
      
      if(i == 1){OutSubType =  my.stack}
        else{OutSubType = rbind(OutSubType, my.stack)}}


OutSubType$Metric = rep(c("Fit", "Time", "Cluster"), 6)


Mean.df = as.data.frame(
            OutSubType %>%
            filter(Metric == "Fit") %>%
            group_by(AI) %>%
            summarise(Mean = mean(mean),
                      Low = mean(lower),
                      Upper = mean(upper)))

Mean.df$FitM = paste(round(Mean.df$Mean,2), " (",
                     round(Mean.df$Low,2), ",",
                     round(Mean.df$Upper,2), ")", sep="")


TimeP.df = as.data.frame(
            OutSubType %>%
            filter(Metric == "Time") %>%
            group_by(AI) %>%
            summarise(Mean = mean(mean),
                      Low = mean(lower),
                      Upper = mean(upper)))

TimeP.df$TimM = paste(round(TimeP.df$Mean,2), " (",
                     round(TimeP.df$Low,2), ",",
                     round(TimeP.df$Upper,2), ")", sep="")

clustP.df = as.data.frame(
            OutSubType %>%
            filter(Metric == "Cluster") %>%
            group_by(AI) %>%
            summarise(Mean = mean(mean),
                      Low = mean(lower),
                      Upper = mean(upper)))

clustP.df$ClsM = paste(round(clustP.df $Mean,2), " (",
                     round(clustP.df $Low,2), ",",
                     round(clustP.df $Upper,2), ")", sep="")



Validation2 = as.data.frame(cbind(Mean.df[,c("AI", "FitM")], 
                                  TimeP.df$TimM,
                                  clustP.df$ClsM))

names(Validation2) = c("Subtype", "Estimate", "Time", "Cluster")

Validation2$Count = c(1, 40, 1, 28, 2, 1)


kable(Validation2, 
      caption = "Prediction by Virus Subtype") %>%
      kable_styling("striped", full_width = F) %>%
      row_spec(0, font_size = 20) %>%
      column_spec(1, bold = T)
Prediction by Virus Subtype
Subtype Estimate Time Cluster Count
H5N1 HPAI 1 (1,1) 0.86 (0.86,0.86) 0.99 (0.99,0.99) 1
H5N2 HPAI 0.97 (0.93,1) 0.82 (0.75,0.89) 0.9 (0.82,0.97) 40
H5N2 LPAI 0.52 (0.52,0.52) 0.52 (0.52,0.52) 0.14 (0.14,0.14) 1
H7N3 HPAI 0.81 (0.7,0.92) 0.74 (0.63,0.84) 0.74 (0.64,0.84) 28
H7N8 LPAI 0.92 (0.68,1.16) 0.84 (0.84,0.84) 0.66 (0.39,0.93) 2
H7N9 HPAI 0.84 (0.84,0.84) 0.88 (0.88,0.88) 0.37 (0.37,0.37) 1
#SelMod.fx = xtable(Validation2)
#print(SelMod.fx, include.rownames = F)

10 Additional Figures

10.1 View Poultry Events (Outbreaks map)

wmap_df = fortify(NAmer.reg)
## Regions defined for each Polygons
LAkes_df = fortify(Lakes)
## Regions defined for each Polygons
tmp.df = AI_Events.pnt@data %>% 
            filter(Source != "EmValidate") %>%
            select(Long, Lat, AI)

tmpTst.df = test %>% 
          select(Long, Lat, AI)

tmpTst.df$Hold = "test"
tmp.df$Hold = "train"

Plot.set = rbind(tmpTst.df, tmp.df)

Plot.set$dups = duplicated(Plot.set[,c("Long", "Lat")])

Plot.set = Plot.set %>%
           filter(dups == FALSE | Hold == "test")

Plot.set$AI[Plot.set$AI == "EA/AM-H5N2"] = "H5N2 HPAI"
Plot.set$AI[Plot.set$AI == "EA-H5N8"] = "H5N8 HPAI"
Plot.set$AI[Plot.set$AI == "EA -H5N8"] = "H5N8 HPAI"

tmp.df = Plot.set %>% filter(Hold == "train")
tmpTst.df = Plot.set %>% filter(Hold == "test")



myCol = c("black", "red")

Plot.set$Hold[Plot.set$Hold == "train"] = "black"
Plot.set$Hold[Plot.set$Hold == "test"] = "red"

names(myCol) = levels(factor(Plot.set$Hold))

myShapes = c(0:10)

names(myShapes) = levels(factor(Plot.set$AI))


colScale = scale_colour_manual(name = "Model", values = myCol, 
                               labels = c("Testing", "Training"))

ShpScale = scale_shape_manual(name = "Subtype", values = myShapes,
                             labels = c("H5N1 HPAI", "H5N1 LPAI", "H5N2 HPAI", "H5N2 LPAI", "H5N8 HPAI",
                                        "H7N3 HPAI", "H7N3 LPAI", "H7N7 LPAI", "H7N8 LPAI", "H7N9 HPAI", "H7N9 LPAI"))

ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_polygon(data = LAkes_df,
                     aes(long,lat, group=group), 
                     fill = "lightgray", col="darkgray") +
        geom_point(data=Plot.set, 
                   aes(Long, Lat, 
                      group=factor(Plot.set$AI), fill=NULL,
                      shape=factor(Plot.set$AI)),
                      col = Plot.set$Hold, 
                      size=4) +
        ShpScale +
        colScale + 
        xlab("Longitude") +
        ylab("Latitude") +
        #ggtitle("Historic A.I. Outbreaks") +
        scale_x_longitude(xmin=-150, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              plot.background = element_blank(),
              panel.border = element_blank(),
              #legend.direction = "horizontal",
              #legend.position = "bottom",
              strip.text = element_text(size=16, face="bold"),
              strip.background = element_blank(),
              legend.position=c(0.11, 0.32),
              #legend.key.size = unit(1.5,"line"),
              legend.text = element_text(size=14, face="bold"),
              legend.title = element_text(size=18, face="bold"),
              axis.text.x = element_text(size=14, face="bold"),
              axis.text.y =element_text(size=14, face="bold"),
              axis.title.x = element_text(size=16, face="bold"),
              axis.title.y =element_text(size=16, face="bold"),
              plot.title = element_blank())

10.2 BWTE Tracks

Paths = readOGR(dsn = "./LInes", 
                  layer = "Paths", 
                  stringsAsFactors = FALSE)
## OGR data source with driver: ESRI Shapefile 
## Source: "C:\Users\humph173\Documents\LabWork\Patuxent\Patux_level1\LInes", layer: "Paths"
## with 42 features
## It has 2 fields
## Integer64 fields read as strings:  Id
Pth2 = fortify(Paths)


ggplot(wmap_df, aes(long,lat, group=group)) + 
        geom_polygon(fill = "lightgray", col="darkgray") + 
        geom_polygon(data = LAkes_df,
                     aes(long,lat, group=group), 
                     fill = "lightgray", col="darkgray") +
        geom_path(data=Pth2, 
                   aes(long, lat, 
                      group=group, fill=NULL),
                      # col = tmp.df$AI, 
                   col= "black",
                   size=0.5) +
        #ShpScale +
        #colScale + 
        xlab("Longitude") +
        ylab("Latitude") +
        #ggtitle("Historic A.I. Outbreaks") +
        scale_x_longitude(xmin=-150, xmax=80, step=10) +
        scale_y_latitude(ymin=20, ymax=60, step=10) +
        coord_equal() + 
        theme(panel.grid.minor = element_blank(),
              panel.grid.major = element_blank(),
              panel.background = element_blank(),
              plot.background = element_blank(),
              panel.border = element_blank(),
              #legend.direction = "horizontal",
              legend.position = "right",
              strip.text = element_text(size=16, face="bold"),
              strip.background = element_blank(),
              #legend.position=c(0.65, 0.05),
              #legend.key.size = unit(1.5,"line"),
              legend.text = element_text(size=10, face="bold"),
              legend.title = element_text(size=18, face="bold"),
              axis.text.x = element_text(size=12, face="bold"),
              axis.text.y =element_text(size=12, face="bold"),
              axis.title.x = element_text(size=16, face="bold"),
              axis.title.y =element_text(size=16, face="bold"),
              plot.title = element_blank())

10.3 Tracks with Poultry

Qck_chk = crop(GlobChkn, extent(Domain.r))

Qck_chk2 = resample(Qck_chk, Domain.r)
Qck_chk2 = Qck_chk2+Domain.r
Qck_chk2[Qck_chk2 < 100] = 0
Qck_chk2[Qck_chk2 >= 100 & Qck_chk2 < 1000] = 1
Qck_chk2[Qck_chk2 >= 1000 & Qck_chk2 < 10000] = 2
Qck_chk2[Qck_chk2 >= 10000 & Qck_chk2 < 100000] = 3
Qck_chk2[Qck_chk2 >= 100000 & Qck_chk2 < 500000] = 4
Qck_chk2[Qck_chk2 >= 500000] = 5

Qck_chk2 = disaggregate(Qck_chk2, fact=2, method="bilinear")

rng = seq(0, 5, 0.1)
mCols  = brewer.pal(9, "YlOrBr")[3:9]
cr0 = colorRampPalette(mCols)(n = 2000)
cr = colorRampPalette(c(c("lightgray",cr0)), 
         bias = 0.8)

Plt.a = levelplot(Qck_chk2, 
               #layout=c(2, 1),
               margin = FALSE,
               xlab = " ", ylab = NULL, 
               #main = "Estimated Duration (Residence time)",
               col.regions = cr, at = rng,
               colorkey =list(labels=list(at=c(0.05, 0.725, 1.5, 2.5, 3.5, 4.65),  
                              labels=c("0", "500", "1k", "10k", "100k", "500k+"), cex=1.1),
                              labels=list(cex=14),
                              space = "bottom"), 
                             par.strip.text = list(fontface='bold', cex=1),
               par.settings = list(axis.line = list(col = "black"),
                                   strip.background = list(col = 'transparent'), 
                                   strip.border = list(col = 'transparent')),
                                   scales = list(cex = 1.5)) + 
               latticeExtra::layer(sp.polygons(NAmer.reg, col = "darkgray", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Lakes, col = "darkgray", fill= "lightgray", lwd = 1)) +
               latticeExtra::layer(sp.polygons(Paths, col = "black", lwd = 1)) 

Plt.a