library(httr)
library(dplyr)
library(stringi)
library(stringr)
library(tidyr)
library(lubridate)
library(Nippon)
library(tidyr)
library(purrr)
library(leaflet)
res_data <- GET(
"http://api.e-stat.go.jp/", path = "rest/2.0/app/json/getStatsData",
query = list(
appId = Sys.getenv("ESTAT_KEY"),
statsDataId = "0003103532",
cdCat01 = "010800130,010800140"
))
j_data <- content(res_data)
value_df <- j_data$GET_STATS_DATA$STATISTICAL_DATA$DATA_INF$VALUE %>%
bind_rows
force_bind_rows <- function(x) {
if(is.list(x[[1]])) bind_rows(x) else as_data_frame(x)
}
class_obj <- j_data$GET_STATS_DATA$STATISTICAL_DATA$CLASS_INF$CLASS_OBJ
class_df <- lapply(class_obj, function(x) force_bind_rows(x$CLASS))
names(class_df) <- sapply(class_obj, function(x) x$"@id")
value_df <- value_df %>%
mutate(value = as.numeric(`$`))
## Warning in eval(substitute(expr), envir, enclos): NAs introduced by
## coercion
df_time <- class_df$time %>%
select(`@time` = `@code`, date = `@name`)
df_cat01 <- class_df$cat01 %>%
select(`@cat01` = `@code`, item = `@name`)
df_area <- class_df$area %>%
select(`@area` = `@code`, city = `@name`)
d <-
value_df %>%
left_join(df_time, by = "@time") %>%
left_join(df_cat01, by = "@cat01") %>%
left_join(df_area, by = "@area")
d <-
d %>%
mutate(date = stri_enc_tonative(date),
date = fast_strptime(date, "%Y年%m月"))
d <-
d %>%
mutate_each(funs(str_replace(., "^[ 0-9]+", "")), item, city)
# 必要なデータだけ残す
d <-
d %>%
select(date, value, item, city) %>%
filter(!is.na(value), city != "全国")
tmp <- tempfile(fileext = ".xls")
download.file("http://www.soumu.go.jp/main_content/000318342.xls", destfile = tmp, method = "curl")
prefnames_soumu <- readxl::read_excel(tmp)
names(prefnames_soumu) <- c("code", "pref_kanji", "city_kanji", "pref_kana", "city_kana")
unlink(tmp)
prefnames <-
prefnames_soumu %>%
mutate(pref_alpha = str_sub(pref_kanji, end = -2), # 都道府県を取り除く
pref_alpha = kakasi(stri_enc_tonative(pref_alpha)), # アルファベットに変換
pref_alpha = str_replace_all(pref_alpha, "o[ou]", "o"), # choroplethrと表記を合わせる
pref_alpha = ifelse(pref_alpha == "hokkai", "hokkaido", pref_alpha)) # 北海道だけは「道」がつく
d <-
d %>%
left_join(prefnames, by = c("city" = "city_kanji")) %>%
mutate(pref_alpha = ifelse(city == "東京都区部", "tokyo", pref_alpha))
d_summary <-
d %>%
filter(between(date, as.POSIXct("2012-01-01"), as.POSIXct("2014-12-31"))) %>%
group_by(pref_alpha, item) %>%
summarise(value = mean(value) * 12)
d_spread <-
d_summary %>%
spread(item, value) %>%
mutate(合計 = チョコレート + チョコレート菓子)
library(rgdal)
library(maptools)
library(dplyr)
library(ggplot2)
library(rgeos)
# f is path to shapefile, which is downloaded from
# http://www1.gsi.go.jp/geowww/globalmap-gsi/download/data/gm-japan/gm-jpn-bnd_u_2_1.zip
l <- readOGR(f, layer = "polbnda_jpn", encoding = "UTF-8", verbose = FALSE)
l_union <- gUnaryUnion(l, id = as.character(l@data$nam))
ids <- sapply(l_union@polygons, function(x) x@ID) %>%
str_to_lower %>%
str_split(., " ") %>%
map(1) %>%
map_if(. == "hokkai", ~("hokkaido")) %>%
unlist
d_spread_orderd <-
data_frame(ids = ids) %>%
inner_join(d_spread, by = c("ids" = "pref_alpha"))
leaflet() %>%
addTiles() %>%
addPolygons(data = l_union, color = colorNumeric("Oranges", domain = d_spread_orderd$チョコレート)(d_spread_orderd$チョコレート),
fillOpacity = 1)