html Documents

This is an R Markdown format used for publishing markdown documents to GitHub. When you click the Knit button all R code chunks are run and a markdown file (.md) suitable for publishing to GitHub is generated.

#Message = FALSE, Warning = FALSE, additional message to hide warnings
library(sf)
library(tidyverse)
library(dplyr)
library(ggmap)
library(rnoaa)
library(spData)
data(world)
data(us_states)
knitr::opts_chunk$set(cache=TRUE)  # cache the results for quick compiling
#Step 1: Read storm data - Use read_sf() to read storm shape file data. 

setwd("/Users/Gabby Thom/Downloads/R Data Science/Week 8")
storms=read.csv("08b_data/ibtracs.NA.list.v04r00.csv",header=T, stringsAsFactors = F)

storms_sf <- read_sf('08b_data/IBTrACS.NA.list.v04r00.points.shp',  
                     quiet = T,  stringsAsFactors = FALSE)

#Step 2: Wrangle the data.
#Filter to storms 1950-present with filter(). 

#Use mutate_if() to convert -9999.0 to NA in all numeric columns with the #following command from the dplyr package: mutate_if(is.numeric, function(x) if #else(x==-9999.0,NA,x)).
storms_sf_filtered <- filter(storms_sf, SEASON >= 1950) %>%
  mutate_if(is.numeric, function(x) ifelse(x==-9999.0,NA,x))

#Add a column for decade value.
# Mutate() = derives new variables from existing variables. 
#by adding a character name = to the second argument of the function 
#you are able to create a new column wit that name. 

storms_wdecade <- storms_sf_filtered %>% 
     mutate(decade = year-(year %% 10))

#Use st_bbox() to identify the bounding box of the storm data and save this as an #object called region.

region <- st_bbox(storms_wdecade)

#Step 3: Make the plot.

world_new <- filter(world,name_long != "Antarctica")

ggplot()+
  geom_sf(data = world_new)+ #puts down base map
  stat_bin2d(data=storms_wdecade, aes(y=st_coordinates(storms_wdecade)[,2], x=st_coordinates(storms_wdecade)[,1]),bins=100)+
  facet_wrap(~decade)+
  scale_fill_distiller(palette="YlOrRd", trans="log", direction=-1, breaks= c(1,10,100,1000))+
coord_sf(ylim = region[c(2,4)], xlim = region[c(1,3)])+
  xlab(label = "longitude")+
  ylab(label = "laditude")+
  ggtitle("Number of storms that hit the United States", subtitle = "after 1950")

#Step 4: Calculate table of the five states with most storms.

storms_wdecade_crs <- st_crs(storms_wdecade)

states <-st_transform(us_states, storms_wdecade_crs)

states <-rename(states, StatesName = NAME)

storm_states<-st_join(storms_wdecade, states, join = st_intersects,left = FALSE)

#Create table of states with the largest amount of unique storms.  

storm_states %>% 
  group_by(StatesName)%>% 
  count(SID) %>%   #SID = storm identifier 
  count(StatesName) %>% 
  select(StatesName, n) %>% 
  ungroup() %>% 
  arrange(desc(n)) %>% #arranges the states from the 
  #largest amount of unique storms to the lowest.  
  slice(1:5) %>% #takes the top five storms
  st_drop_geometry() %>% #removes the geometry column from our table
  rename("Unique Storms"=n) %>% 
  rename("State"=StatesName) %>% 
  knitr::kable()
State Unique Storms
Florida 131
North Carolina 84
Texas 69
Georgia 68
Louisiana 63
knitr::opts_chunk$set(cache=TRUE)  # cache the results for quick compiling