1 Clusters de Choques por Affinity Propagation

library(leaflet.extras)
library(apcluster)
library(magrittr)
library(dplyr)   
library(leaflet)
library(rgdal)
library(rgeos)
library(geojsonio)
library(mapview)
library(contoureR)
library(geosphere)
library(cluster)
library(sp)
library(rgdal)
library(RColorBrewer)
library(sp)
library(foreign)

1.1 Carga de Dados

dados = read.dbf("/Users/fagne/OneDrive/r-files/CIET/acidentes2020/_Base/acidentes_2014a2020_WGS84.dbf")
dados = dados[dados$ANO > 2014, ]
table(dados$TIPO_ACID)
## 
##   ABALROAMENTO  ATROPELAMENTO      CAPOTAGEM         CHOQUE        COLISAO 
##          36700           4514            256           6024          24419 
##       EVENTUAL       INCENDIO NAO CADASTRADO          QUEDA     TOMBAMENTO 
##           1038             23              4           1944            180
table(dados$FERIDOS)
## 
##    -1     0     1     2     3     4     5     6     7     8     9    19    21 
##     1 50751 20508  3112   505   134    49    14    15     4     3     1     1 
##    25    35 
##     3     1
dados = dados[(dados$TIPO_ACID == "CHOQUE"), ]

sort(unique(dados$ANO))
## [1] 2015 2016 2017 2018 2019 2020
anos = length(unique(dados$ANO))
anos
## [1] 6
class(dados)
## [1] "data.frame"
x2 <- cbind(dados$LONGITUDE, dados$LATITUDE)
x2 <- x2[complete.cases(x2), ]
dim(x2)
## [1] 6024    2
head(x2)
##           [,1]      [,2]
## [1,] -51.05470 -30.23517
## [2,] -51.08157 -30.17278
## [3,] -51.08426 -30.23896
## [4,] -51.08560 -30.23843
## [5,] -51.08564 -30.14368
## [6,] -51.08599 -30.14484

1.2 Preparação

x1 <- x2
x2 <- x2[sample(nrow(x2), 5000), ]
x2 = as.data.frame(x2)
names(x2) = c("LONGITUDE", "LATITUDE" )
head(x2)
save(x2, file = "data/x2-choque-999.rda")
dim(x1)
## [1] 6024    2
dim(x2)
## [1] 5000    2

1.3 Treino

apres <- apcluster(negDistMat(r=2), x2, q=0.999)
plot(apres, x2)
A caption

A caption

summary(apres)
##   Length    Class     Mode 
##     1853 APResult       S4
save(apres, file = "data/apres2-choque-999.rda")

1.4 Obtenção de Centróides

centroides = unique(apres@exemplars)
poly = data.frame()
centr_indice = 0
for (i in centroides){
  centr_indice = centr_indice + 1
  centr_lat=x2[i,1]
  centr_lon=x2[i,2]
  poly = rbind(poly, c(centr_lat, centr_lon, centr_indice))
}
names(poly) = c("Lat", "Lon", "Cluster")
head(poly)
dim(poly)
## [1] 1853    3
exemplars = poly
save(exemplars, file = "data/exemplars-choque-999.rda")

1.5 Classificação Global

predict.apcluster <- function(s, exemplars, newdata){
  simMat <- s(rbind(exemplars, newdata), sel=(1:nrow(newdata)) + nrow(exemplars))[1:nrow(exemplars), ]
  unname(apply(simMat, 2, which.max))
}
resultado <- list()

dados$cluster = 0
for(i in seq(from=1, to=length(dados$ID)-1000, by=1000)){
  inicio = i
  final = i+999
  resultado = predict.apcluster(negDistMat(r=2), x2[apres@exemplars, ],  dados[inicio:final, 2:3])
  dados$cluster[inicio:final] = resultado
}
controle = length(dados$cluster)  - final

resultado = predict.apcluster(negDistMat(r=2), x2[apres@exemplars, ],  dados[(final + 1):length(dados$cluster), 2:3])
dados$cluster[(final + 1):length(dados$cluster)] = resultado
head(dados)
tail(dados)
save(dados, file = "data/acidentes-choque-999.rda")

1.6 Acidentes por Cluster

pal <- colorFactor(
  palette = 'Dark2',
  domain = dados$cluster
)

leaflet(dados) %>%
  addTiles(group="Mapa") %>% 
  addCircles(group="Acidentes", ~LONGITUDE, ~LATITUDE, weight = 0.1, radius=7, color=~pal(cluster),
             stroke = TRUE, fillOpacity = 0.8, popup=~paste("Cluster Nº: ", cluster,  
            "<br>Ano: ", ANO, "<br>Tipo: ", TIPO_ACID, "<br>Local: ", LOG1,  "<br>UPS: ", UPS,   sep = " ")) %>% 
  addLegend(group="Legenda", "topright", colors= "", labels=paste("Classificados em meio a ", summary(apres)[1], "Clusters"), title="Acidentes em Porto Alegre") %>% 
  addLayersControl(overlayGroups = c("Mapa", "Acidentes", "Legenda"),
                   options = layersControlOptions(collapsed = FALSE)) %>% 
  addProviderTiles(providers$CartoDB.DarkMatter)

A caption

1.7 Acidentes por Cluster pelo ponto de corte

dados = dados[dados$cluster >0 ,]
pal <- colorFactor(
  palette = 'Dark2',
  domain = dados$cluster
)

leaflet(dados) %>%
  addTiles(group="Mapa") %>% 
  addCircles(group="Acidentes", ~LONGITUDE, ~LATITUDE, weight = 0.1, radius=7, color=~pal(cluster),
             stroke = TRUE, fillOpacity = 0.8, popup=~paste("Cluster Nº: ", cluster,  
            "<br>Ano: ", ANO, "<br>Tipo: ", TIPO_ACID, "<br>Local: ", LOG1,  "<br>UPS: ", UPS,   sep = " ")) %>% 
  addLegend(group="Legenda", "topright", colors= "", labels=paste("Classificados em meio a ", summary(apres)[1], "Clusters"), title="Acidentes em Porto Alegre") %>% 
  addLayersControl(overlayGroups = c("Mapa", "Acidentes", "Legenda"),
                   options = layersControlOptions(collapsed = FALSE)) %>% 
  addProviderTiles(providers$CartoDB.DarkMatter)

A caption

1.8 Enriquecimento Informacional

dados = dados[dados$cluster >0 ,]
rm(apres)
clusters_encontrados = sort(unique(dados$cluster))
#clusters_encontrados
parq = dados
poly = data.frame()
for (i in clusters_encontrados){
  temp = parq[parq$"cluster" == i,  ]
  ch1 = convexHullAM_Indexes(temp[,2],temp[,3], includeColinear=FALSE,zeroBased = FALSE)
  #print(i)
  #print(ch1)
  poligono = temp[ch1, 2:3 ]
  area <- geosphere::areaPolygon(x = poligono)
  acidentes = nrow(temp)
  pol = temp
  coordinates(pol) = ~LONGITUDE+LATITUDE
  centr_lat=gCentroid(pol, byid=FALSE)$x
  centr_lon=gCentroid(pol, byid=FALSE)$y
  if(nrow(temp) >= anos) {
    for (ii in ch1) {
    polying = temp[ii,]
    polying$area = area
    polying$acidentes = acidentes
    polying$centroide_lat = centr_lat
    polying$UPS = sum(temp$UPS)
    polying$centroide_lon = centr_lon
    poly = rbind(poly, polying)
    }  
  }
}
head(poly)
tail(poly)
mean(poly$area)
## [1] 7299.947
median(poly$area)
## [1] 4056.534
minimoquantil = quantile(poly$area, probs = 0.01)
maximoquantil = quantile(poly$area, probs = 0.90)
quantile(poly$area, probs = c(0.01, 0.25, 0.5,0.75,0.99))
##          1%         25%         50%         75%         99% 
##    16.24783  1228.36379  4056.53382  8792.83624 27229.20382
poly = poly[(poly$area < maximoquantil) & (poly$area > minimoquantil), ]
dim(poly)
## [1] 1350   48
class(poly)
## [1] "data.frame"
pol = poly

1.8.1 Área

areas = poly[!duplicated(poly$cluster),]
summary(areas$area)
##     Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
##    45.81  1019.64  3604.10  4356.17  7038.17 12726.48

1.8.2 UPS

summary(areas$UPS)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##    6.00   10.00   13.00   14.44   18.00   49.00

1.9 Combinando Informações

#save(pol, file = "data/pol.Rds")
#load("data/acidentes.rda")
#load("data/apres2.rda")
#load("data/exemplars.rda")
#load("data/x2.rda")
dados = poly[,c(1:9, 11:13,38,41,44:48)]
head(dados)
names(dados) = c("ID","lat", "lon", "log1", "Log2", "Pred", "Local", "Tipo", "Via", "Data", "Dia", "Hora", 
                 "Fx_horaria","UPS", "box_id", "Area", "Acidentes", "CentLon", "CentLat")
dados$id = (dados$box_id * 11)
dados$group = dados$id
head(dados)
dadostemp = dados[, c(15:21)]
coordinates(dados)=c("lat","lon")
df = dados
df
## class       : SpatialPointsDataFrame 
## features    : 1350 
## extent      : -51.25512, -51.10405, -30.15666, -29.9799  (xmin, xmax, ymin, ymax)
## crs         : NA 
## variables   : 19
## names       :     ID,            log1,                                      Log2, Pred,      Local,   Tipo,                                                    Via,       Data,         Dia,  Hora, Fx_horaria, UPS, box_id,             Area, Acidentes, ... 
## min values  : 601159, AL ALCEU WAMOSY,                           AV ASSIS BRASIL,    0, Cruzamento, CHOQUE,                                    10 AV IRENE RUPERTI, 01/01/2015,     DOMINGO, 00:00,          0,   6,     18, 45.8140107993386,         6, ... 
## max values  : 683115,   VDT SAO JORGE, TUN NS DA CONCEICAO-AC RUA SARMENTO LEITE, 9505, Logradouro, CHOQUE, VDT IMPERATRIZ LEOPOLDINA & R ANTONIO CARLOS GUIMARAES, 31/08/2019, TERCA-FEIRA, 23:55,         23,  49,   1851, 12726.4798464105,        21, ...
data <- data.frame(box_id=unique(df$box_id),row.names=unique(df$id))
head(data)
dadostemp2 = dados[!duplicated(dados$id),]
#head(dadostemp2, 15)
data = as.data.frame(cbind(data, dadostemp2@data))

1.10 Criação de Polígonos

points2polygons <- function(df,data) {
  get.grpPoly <- function(group,ID,df) {
    Polygon(coordinates(df[df$id==ID & df$group==group,]))
  }
  get.spPoly  <- function(ID,df) {
    Polygons(lapply(unique(df[df$id==ID,]$group),get.grpPoly,ID,df),ID)
  }
  spPolygons  <- SpatialPolygons(lapply(unique(df$id),get.spPoly,df))
  SpatialPolygonsDataFrame(spPolygons,match.ID=T,data=data)
}

#Criamos o SpatialPolygonsDataFrame
data$Log2 = NULL
spDF <- points2polygons(df,data)
spDF
## class       : SpatialPolygonsDataFrame 
## features    : 255 
## extent      : -51.25512, -51.10405, -30.15666, -29.9799  (xmin, xmax, ymin, ymax)
## crs         : NA 
## variables   : 19
## names       : box_id,     ID,                     log1, Pred,      Local,   Tipo,                                                 Via,       Data,         Dia,  Hora, Fx_horaria, UPS, box_id.1,             Area, Acidentes, ... 
## min values  :     18, 601926,            AV A J RENNER,    0, Cruzamento, CHOQUE,               1000 AV DA LEGALIDADE E DA DEMOCRACIA, 01/04/2016,     DOMINGO, 00:05,          0,   6,       18, 45.8140107993386,         6, ... 
## max values  :   1851, 682511, VDT ABDIAS DO NASCIMENTO, 9505, Logradouro, CHOQUE, VDT ABDIAS DO NASCIMENTO & AV EDVALDO PEREIRA PAIVA, 31/08/2019, TERCA-FEIRA, 23:40,         23,  49,     1851, 12726.4798464105,        21, ...
class(spDF)
## [1] "SpatialPolygonsDataFrame"
## attr(,"package")
## [1] "sp"
spDF@data$group = 1
spDF@data$box_id = NULL
dim(spDF@data)
## [1] 255  18
dadostemp = unique(dadostemp)
spDF@data = merge(spDF@data, dadostemp, by = "box_id")
dim(spDF@data)
## [1] 255  24
spDF$log1 = spDF$Pred = spDF$CentLon.x = spDF$CentLat.x = spDF$CentLon.y = spDF$CentLat.y   = spDF$id.y = spDF$group.y  = spDF$Tipo = spDF$Via = spDF$Tipo = spDF$Dia =  spDF$Data= spDF$group.x =  spDF$Local=   spDF$Hora= spDF$Area.x=   spDF$Fx_horaria = NULL
plot(spDF,col=spDF$box_id+1)
A caption

A caption

library(rgdal)
rgdal::writeOGR(obj = spDF,
                dsn = "data/choque.json",
                layer = "myParq",
                driver = "GeoJSON",
                overwrite_layer = TRUE)

Acidentes por Cluster

#carregamos os dados SpatialPolygonsDataFrame
parqs <- geojsonio::geojson_read("data/choque.json", what = "sp")
#Verificamos o objeto
parqs
## class       : SpatialPolygonsDataFrame 
## features    : 255 
## extent      : -51.25512, -51.10405, -30.15666, -29.9799  (xmin, xmax, ymin, ymax)
## crs         : +proj=longlat +datum=WGS84 +no_defs 
## variables   : 7
## names       : box_id,     ID, UPS, Acidentes.x,  id.x,           Area.y, Acidentes.y 
## min values  :     18, 601926,   6,           6,   198, 45.8140107993386,           6 
## max values  :   1851, 682511,  49,          21, 20361, 12726.4798464105,          21
dim(parqs)
## [1] 255   7
library(raster)
projection(parqs)
## [1] "+proj=longlat +datum=WGS84 +no_defs"
library(mapview)
mapviewPalette(name = "Viridis")
library(RColorBrewer)
mapview(parqs, zcol = "Acidentes.x", col.regions=brewer.pal(9, "YlOrRd"))

Acidentes por m2

parqs@data$densidade = (parqs@data$Acidentes.x/parqs@data$Area.y)*1000000
mapview(parqs, zcol = "densidade", col.regions=brewer.pal(9, "YlOrRd"))

Acidentes por poligono

hist(parqs@data$Acidentes.x, col = "magenta")
A caption

A caption

Acidentes por KM2

hist(parqs@data$densidade, col = "orange")
A caption

A caption

Locais mais densos

quantile(parqs$densidade, probs = 0.99)
##     99% 
## 85820.4
temp = parqs[parqs$densidade > quantile(parqs$densidade, probs = 0.001), ] # Atualziar
mapview(temp, zcol = "densidade")

1.11 Calculando a matriz de vizinhanças

projection(parqs)
## [1] "+proj=longlat +datum=WGS84 +no_defs"
parqstemp = parqs
require(sf)
shape <- read_sf(dsn = ".", layer = "mercator_32722_2014_2019")
projection(shape)
## [1] "+proj=utm +zone=22 +south +datum=WGS84 +units=m +no_defs"
projection(parqstemp) = projection(shape)

ccods = coordinates(parqs)
temps = as.data.frame(ccods)
cord.dec = SpatialPoints(cbind(temps$V1, temps$V2), proj4string=CRS("+proj=longlat"))
cord.UTM <- spTransform(cord.dec, CRS("+init=epsg:32722"))
ccods = as.data.frame(cord.UTM)
points = cbind(ccods[,1],ccods[,2])
head(points)
##          [,1]    [,2]
## [1,] 484622.5 6674160
## [2,] 482782.2 6680251
## [3,] 478092.6 6678279
## [4,] 480714.4 6675229
## [5,] 480902.2 6681184
## [6,] 477396.9 6669943
library(spdep)
distNeighbors = 400
dnb = dnearneigh(points,0,distNeighbors)
class(dnb)
## [1] "nb"
subsets = as.data.frame(matrix(dnb))
class(subsets)
## [1] "data.frame"
subsets = subsets$V1
lengths(subsets)
##   [1] 1 1 1 1 2 1 1 1 1 4 3 1 2 2 1 1 1 1 4 2 1 1 1 1 3 2 1 1 1 2 1 2 3 1 1 4 2
##  [38] 1 4 1 5 1 2 5 1 2 1 1 1 2 4 2 3 2 1 3 4 1 1 1 1 1 1 4 2 1 1 1 1 1 2 4 2 2
##  [75] 2 2 4 5 2 2 1 2 6 2 3 2 1 1 4 1 1 4 2 2 1 1 3 1 3 3 2 2 1 2 1 1 4 1 1 1 5
## [112] 1 1 4 1 1 1 2 1 1 3 4 3 3 2 3 1 1 1 2 3 1 1 5 1 4 1 1 3 2 1 7 1 2 3 1 1 1
## [149] 2 2 4 1 1 2 1 7 2 4 3 5 2 1 3 4 2 1 3 1 1 4 5 1 2 3 5 1 1 1 1 1 6 1 2 1 1
## [186] 8 2 1 2 2 1 1 1 1 2 2 1 1 1 1 3 1 1 1 1 3 1 2 1 1 1 1 1 1 1 1 1 4 3 1 3 6
## [223] 5 1 1 1 1 1 2 1 1 1 3 8 1 1 1 1 1 2 2 3 2 3 7 4 1 2 4 1 5 1 2 7 1
parqs$n = 1
sub = which(subsets == '0')
sub
##  [1]   4   8   9  16  17  24  35  40  42  45  58  59  62  63  68  70  81  90  95
## [20]  98 105 112 113 115 117 127 128 141 146 155 166 168 176 177 178 179 180 182
## [39] 184 185 188 191 192 193 194 197 200 202 203 204 210 211 213 214 216 224 225
## [58] 226 227 228 231 235 239 252
parqs$n[sub] = 0
length(parqs)
## [1] 255
parqs = parqs[parqs$n > 0,]
length(parqs)
## [1] 191
length(dnb)
## [1] 255
#dim(ccods)
ccods = ccods[-sub, ]
dim(ccods)
## [1] 191   2
points = cbind(ccods[,1],ccods[,2])
head(points)
##          [,1]    [,2]
## [1,] 484622.5 6674160
## [2,] 482782.2 6680251
## [3,] 478092.6 6678279
## [4,] 480902.2 6681184
## [5,] 477396.9 6669943
## [6,] 485402.0 6674491
#dnb = dnearneigh(points,0,2000)
dnb = dnearneigh(points,0,distNeighbors)
dnb
## Neighbour list object:
## Number of regions: 191 
## Number of nonzero links: 458 
## Percentage nonzero weights: 1.255448 
## Average number of links: 2.397906
length(dnb)
## [1] 191

1.12 Matriz de Vizinhanca

1.12.1 Matriz Binária

W.Bin= nb2mat(neighbours = dnb, style = "B")
#parqs <- parqs[!sub,]

1.12.2 Matriz Normalizada

W.Normal= nb2mat(neighbours = dnb, style = "W")
#head(W.Normal)

1.13 KNN

vizinhos_4 <- knearneigh(points, k = 4)
class(vizinhos_4)
## [1] "knn"
head(vizinhos_4$nn)
##      [,1] [,2] [,3] [,4]
## [1,]   48    6   87  170
## [2,]   71   57   92  100
## [3,]  180  129   34  186
## [4,]  160   82   65   15
## [5,]  157   49  120   83
## [6,]  170   37    1  162
vizinhanca_4 <- knn2nb(vizinhos_4)
class(vizinhanca_4)
## [1] "nb"

Preparação para Análisis Global e Local

mv_simpl = st_as_sf(parqs)
plot(mv_simpl)
A caption

A caption

class(mv_simpl)
## [1] "sf"         "data.frame"
library(dplyr)
mv_simpl =  mv_simpl %>% dplyr::select(Acidentes.y)
#mv_simpl <- st_simplify(mv_simpl, preserveTopology = FALSE,                  dTolerance = 1)
class(mv_simpl)
## [1] "sf"         "data.frame"
mapview::mapview(mv_simpl)
sf::sf_use_s2(FALSE)#trips and tiks
## Spherical geometry (s2) switched off
mv_simpl = st_as_sf(mv_simpl)
vizinhanca_neig <- poly2nb(mv_simpl)
ShapeNEIG = parqs
ShapeNEIG$vizinhos = card(vizinhanca_neig)
ShapeNEIG <- subset(ShapeNEIG, parqs$vizinhos != 0)
#vizinhanca2neig <- poly2nb(ShapeNEIG)

1.14 Calculando o Índice de Moran Global

Os índices de autocorrelção espacial global calculados pelos testes de normalidade e permutação.

1.14.1 Pelo teste de Normalidade

moran.test(parqs$Acidentes.y,listw=nb2listw(dnb, style = "W"), randomisation= FALSE)
## 
##  Moran I test under normality
## 
## data:  parqs$Acidentes.y  
## weights: nb2listw(dnb, style = "W")    
## 
## Moran I statistic standard deviate = 1.9727, p-value = 0.02426
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic       Expectation          Variance 
##       0.146205064      -0.005263158       0.005895436

1.14.2 Pelo teste de Permutação ou Teste de pseudo-significˆancia

 moran.test(parqs$Acidentes.y,listw=nb2listw(dnb, style = "W"), randomisation= TRUE)
## 
##  Moran I test under randomisation
## 
## data:  parqs$Acidentes.y  
## weights: nb2listw(dnb, style = "W")    
## 
## Moran I statistic standard deviate = 2.0283, p-value = 0.02126
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic       Expectation          Variance 
##       0.146205064      -0.005263158       0.005576513

1.14.3 Por simulação de Monte-Carlo

moran.mc(parqs$Acidentes.y, listw=nb2listw(dnb, style = "W"), nsim=999)
## 
##  Monte-Carlo simulation of Moran I
## 
## data:  parqs$Acidentes.y 
## weights: nb2listw(dnb, style = "W")  
## number of simulations + 1: 1000 
## 
## statistic = 0.14621, observed rank = 968, p-value = 0.032
## alternative hypothesis: greater

1.14.4 Pelo teste de Permutação

Diferente dos demais testes globais o teste para o EBI é exclusivo para taxas e tem-se apenas a opção de teste da permutação

EBImoran.mc(parqs$Acidentes.y,parqs$Area.y,
            nb2listw(dnb, style="B", zero.policy=TRUE), nsim=999, zero.policy=TRUE)
## 
##  Monte-Carlo simulation of Empirical Bayes Index (mean subtracted)
## 
## data:  cases: parqs$Acidentes.y, risk population: parqs$Area.y
## weights: nb2listw(dnb, style = "B", zero.policy = TRUE)
## number of simulations + 1: 1000
## 
## statistic = 0.0078251, observed rank = 669, p-value = 0.331
## alternative hypothesis: greater

1.14.5 Por simulação de Monte-Carlo

shapeCG.p=parqs$Acidentes.y/parqs$Area.y
moran.mc(shapeCG.p, nb2listw(dnb, style="B", zero.policy=TRUE),
         nsim=999, zero.policy=TRUE)
## 
##  Monte-Carlo simulation of Moran I
## 
## data:  shapeCG.p 
## weights: nb2listw(dnb, style = "B", zero.policy = TRUE)  
## number of simulations + 1: 1000 
## 
## statistic = -0.0071803, observed rank = 577, p-value = 0.423
## alternative hypothesis: greater

1.15 Calculando a Estatística C de Geary Global

1.15.1 Pelo teste de Normalidade

geary.test(parqs$Acidentes.y, listw=nb2listw(dnb, style = "W"), randomisation= FALSE)
## 
##  Geary C test under normality
## 
## data:  parqs$Acidentes.y 
## weights: nb2listw(dnb, style = "W") 
## 
## Geary C statistic standard deviate = 1.6202, p-value = 0.05259
## alternative hypothesis: Expectation greater than statistic
## sample estimates:
## Geary C statistic       Expectation          Variance 
##       0.872527908       1.000000000       0.006189688

1.15.2 Pelo teste de Permutação

geary.test(parqs$Acidentes.y, listw=nb2listw(dnb, style = "W"), randomisation=TRUE)
## 
##  Geary C test under randomisation
## 
## data:  parqs$Acidentes.y 
## weights: nb2listw(dnb, style = "W") 
## 
## Geary C statistic standard deviate = 1.4532, p-value = 0.07308
## alternative hypothesis: Expectation greater than statistic
## sample estimates:
## Geary C statistic       Expectation          Variance 
##        0.87252791        1.00000000        0.00769417

1.15.3 Por simulação de Monte-Carlo

geary.mc(parqs$Acidentes.y, listw=nb2listw(dnb, style = "W"),nsim=999)
## 
##  Monte-Carlo simulation of Geary C
## 
## data:  parqs$Acidentes.y 
## weights: nb2listw(dnb, style = "W") 
## number of simulations + 1: 1000 
## 
## statistic = 0.87253, observed rank = 62, p-value = 0.062
## alternative hypothesis: greater

1.16 Calculando Índice de Getis e Ord Global

Getis-Ord é um indicador que mede a concentração local de uma variável de atributo distribuída espacialmente

globalG.test(parqs$Acidentes.y, nb2listw(dnb, style="B"))
## 
##  Getis-Ord global G statistic
## 
## data:  parqs$Acidentes.y 
## weights: nb2listw(dnb, style = "B") 
## 
## standard deviate = 2.9463, p-value = 0.001608
## alternative hypothesis: greater
## sample estimates:
## Global G statistic        Expectation           Variance 
##       1.360299e-02       1.262056e-02       1.111902e-07

1.17 Getis e Ord Local

localG(parqs$Acidentes.y, nb2listw(dnb, style="B"), zero.policy=NULL, spChk=NULL, return_internals=FALSE)
##   [1]  1.209259676 -0.715551687  1.694994775  1.714693677  1.691947361
##   [6]  0.251517864 -0.961716763  0.424672929 -0.238803381  0.704231968
##  [11] -0.667000731 -0.715551687 -0.235976013  3.183311951  1.047319338
##  [16]  0.243884304 -0.713904240  1.692430116 -0.135659408  0.003602298
##  [21] -0.238803381 -0.721491067  0.726571990  0.003602298 -0.721491067
##  [26]  0.348998548  1.268053374 -0.718077546  0.739191874 -0.338615978
##  [31] -0.721491067  2.234266835 -0.976093857  2.076581445  0.338350609
##  [36] -0.338615978  0.248666898 -0.715551687 -0.719218394  0.010793943
##  [41]  0.744319369 -0.676409558 -1.256324082 -1.018212260 -0.238803381
##  [46] -0.389249472 -0.724725402 -0.713135704 -0.229222191  0.253217977
##  [51]  0.014409888  1.216939967  1.775179006 -0.718077546 -0.334606856
##  [56] -0.475742867  0.010793943 -0.327824954  0.003602298 -1.023052530
##  [61] -1.204703711  2.078505743  0.694410351 -0.680834254  1.743626911
##  [66]  0.624405114 -1.023052530 -0.966151455 -0.676409558 -0.721491067
##  [71]  0.243884304  0.501328509 -0.721491067  0.248404113  0.345820573
##  [76] -1.023052530 -0.718077546 -0.966151455 -1.256324082 -0.399141959
##  [81]  0.003602298  0.704231968 -0.238803381  0.003602298  0.243884304
##  [86] -0.724725402 -0.721491067 -0.238803381 -0.721491067  0.987577314
##  [91] -0.968007781 -0.721491067 -1.012294638 -0.238803381  1.691947361
##  [96]  2.690512466 -1.211290159 -0.415825576 -0.695991745  0.688038849
## [101] -0.695991745 -0.721491067 -0.680834254  0.144506761 -0.721491067
## [106] -0.721491067  4.696733972 -0.718077546 -0.961716763 -0.235976013
## [111] -0.718077546 -0.402574034 -0.334606856  2.890808914  3.658315401
## [116] -1.018212260 -0.686321003 -0.721491067 -0.715551687  0.348998548
## [121] -1.014630668  2.496775820  0.243884304 -0.718077546 -0.338615978
## [126]  3.631542149  0.694410351 -0.455842888  0.424672929  0.114834571
## [131] -0.680834254 -0.233442394 -0.122394303 -0.466100382  0.356644730
## [136] -1.256324082 -0.715551687 -1.447690659 -0.508440471 -0.238803381
## [141] -1.018212260 -0.960897309  2.296691429  2.213139927  1.719834962
## [146]  0.330068162  0.345820573 -1.012294638  2.061643165  1.032603952
## [151]  0.014409888 -0.238803381 -0.238803381 -0.415825576 -0.238803381
## [156] -0.976157913 -0.713256462 -0.680834254  0.248666898 -0.713135704
## [161] -0.721491067  0.248666898 -0.718729815 -0.976157913 -0.227526255
## [166] -0.131076365  2.805225191  0.551205943 -1.023052530  0.730776191
## [171]  1.210328584 -0.402574034  2.514247726 -0.718077546  0.734228915
## [176] -0.238803381 -0.680834254 -1.023052530  1.268053374 -0.322625354
## [181] -0.690728261  3.261175532 -0.968007781 -0.718077546 -0.676409558
## [186]  1.708098385  0.248666898  0.779939382  0.003602298  3.251426554
## [191] -0.238803381
## attr(,"gstari")
## [1] FALSE
## attr(,"call")
## localG(x = parqs$Acidentes.y, listw = nb2listw(dnb, style = "B"), 
##     zero.policy = NULL, spChk = NULL, return_internals = FALSE)
## attr(,"class")
## [1] "localG"

1.18 Moran Local

Todas as análises feitas até o momento foram de escala global. No entanto, é necessário que seja feita também uma análise local do estudo. Essa análise pode ser feita pelo índice local de autocorrelaçãoo espacial (LISA). Para isso é preciso calcular o índice de Moran local.

ShapePB.mloc <- localmoran(parqs$Acidentes.y, listw=nb2listw(dnb, style="W")) 
head(ShapePB.mloc)
##           Ii          E.Ii     Var.Ii       Z.Ii Pr(z != E(Ii))
## 1 -0.8728167 -0.0027179818 0.51772352 -1.2092597     0.22656310
## 2 -0.1782001 -0.0003236399 0.06179521 -0.7155517     0.47426821
## 3  0.4210290 -0.0003236399 0.06179521  1.6949948     0.09007641
## 4 -0.8728167 -0.0027179818 0.25749212 -1.7146937     0.08640140
## 5 -1.2201250 -0.0027179818 0.51772352 -1.6919474     0.09065601
## 6  0.1813374 -0.0028145278 0.53606179  0.2515179     0.80141375

1.19 Mapa das probabilidades (Signific?ncias do I de Moral Local)

Por meio dos valor-p do éndice de Moran local é possível construir um mapa de probabilidades.

library(classInt)
INT4 <- classIntervals(ShapePB.mloc[,5], style="fixed", 
                       fixedBreaks=c(0,0.01, 0.05, 0.10))
CORES.4 <- c(rev(brewer.pal(3, "Reds")), brewer.pal(3, "Blues"))
COL4 <- findColours(INT4, CORES.4)
parqs$COL = COL4  
parqs$p_valor = ifelse(parqs$COL == "#DE2D26", "[0,0.01)", ifelse(parqs$COL == "#EEE5E4", "[0.01,0.05)", "[0.05,0.1]"))
plot(parqs, col=COL4)
title("P-valores do I de Moran Local por Distäncia de Centróides")
TB4 <- attr(COL4, "table")
legtext <- paste(names(TB4))
legend("bottomright", fill=attr(COL4, "palette"), legend=legtext, 
       bty="n", cex=0.7, y.inter=0.7)
A caption

A caption

mapview(parqs, zcol = "p_valor", col.regions=c("red", "orange", "green"))
temp = parqs[parqs$p_valor != "[0.05,0.1]", ]
mapview(temp, zcol = "p_valor", col.regions=c("red", "orange"))

1.19.1 Montando matrix W de vizinhança

ShapeCG.nb1.mat <- nb2mat(dnb)

1.19.2 Incidência de acidentes padronizada

Acidentes_SD <- scale(parqs$Acidentes.y)

1.19.3 Média das incidências de acedentes padronizada

Acidentes_W <- ShapeCG.nb1.mat %*% Acidentes_SD

2 Diagrama de espalhamento de Moran

plot(Acidentes_SD, Acidentes_W,xlab="Z",ylab="WZ")
abline(v=0, h=0)
title("Diagrama de Espalhamento de Moran por Distancia de Centróides")
A caption

A caption

Q <- vector(mode = "numeric", length = nrow(ShapePB.mloc))
Q[(Acidentes_SD>0  & Acidentes_W > 0)] <- 1            
Q[(Acidentes_SD<0  & Acidentes_W < 0)] <- 2
Q[(Acidentes_SD>=0 & Acidentes_W < 0)] <- 3
Q[(Acidentes_SD<0  & Acidentes_W >= 0)]<- 4
signif=0.05
parqs$Q = Q

3 Mapa LISA

Q[ShapePB.mloc[,5]>signif]<-5
CORES.5 <- c("blue", "green" , "red", "yellow", "gray", rgb(0.95,0.95,0.95))
#CORES.5 <- c(1:5, rgb(0.95,0.95,0.95))
parqs$cores5Q = CORES.5[Q]
plot(parqs, col=CORES.5[Q])
title("Mapa LISA por Distancia Centroides")
legend("bottomright", c("Q1(+/+)", "Q2(-/-)", "Q3(+/-)", "Q4(-/+)","NS"), 
       fill=CORES.5)
A caption

A caption

CORES.5[Q][1:5]
## [1] "gray" "gray" "gray" "gray" "gray"
head(CORES.5[Q])
## [1] "gray" "gray" "gray" "gray" "gray" "gray"
#save(parqs, file = "parqsFinal-975.Rds")
parqs$cores5 =  ifelse(parqs$cores5Q == "blue", "A-A", ifelse(parqs$cores5Q == "green", "B-B", 
            ifelse(parqs$cores5Q == "red", "A-B", ifelse(parqs$cores5Q == "yellow", "B-A", "NA"))))
mapview(parqs, zcol = "cores5", col.regions=c("red", "orange", "green", "yellow", "grey"))
temp = parqs[parqs$cores5 == "A-A", ]
mapview(temp, zcol = "cores5", col.regions=c("red", "orange", "green", "yellow", "grey"))
mapview(temp, zcol = "Acidentes.x", col.regions=brewer.pal(9, "YlOrRd"))