R Markdown

summary(cars)
##      speed           dist       
##  Min.   : 4.0   Min.   :  2.00  
##  1st Qu.:12.0   1st Qu.: 26.00  
##  Median :15.0   Median : 36.00  
##  Mean   :15.4   Mean   : 42.98  
##  3rd Qu.:19.0   3rd Qu.: 56.00  
##  Max.   :25.0   Max.   :120.00

Including Plots

You can also embed plots, for example:

1. Data genereren en filteren

set.seed(123)
era_day <- tibble(
  date = seq(as.Date("2013-01-01"), as.Date("2014-12-31"), by = "day"),
  tas = rnorm(730, mean = 1, sd = 6)  # dagelijkse temperatuur in °C
)

# Filter op sneeuwjaar: september 2013 tot en met augustus 2014
snowmod <- era_day %>%
  filter(date >= '2013-09-01' & date <= '2014-08-31')

2. Plot van temperatuur tegen tijd

3. Degree days berekenen en groeperen per maand

# Voeg kolom toe met alleen positieve temperaturen (degree days)
snowmod <- snowmod %>%
  mutate(dd = ifelse(tas > 0, tas, 0),
         month = month(date, label = TRUE))

# Sommeer per maand
monthly_dd <- snowmod %>%
  group_by(month) %>%
  summarise(pdd_sum = sum(dd))

4. Barplot van maandelijkse PDD (positive degree days)

ggplot(monthly_dd, aes(x = month, y = pdd_sum)) +
  geom_col(fill = "steelblue") +
  labs(title = "Monthly Positive Degree Day Sum",
       x = "Maand", y = "PDD (°C-dagen)")

5. Vraag 2.1 oplossen

# a. Wat is de PDD in mei 2014?
pdd_mei <- snowmod %>%
  filter(month(date) == 5 & year(date) == 2014) %>%
  summarise(sum_mei = sum(dd)) %>%
  pull(sum_mei)

pdd_mei
## [1] 79.81767
# b. Smelt in mm water equivalent
DDF <- 7.0  # mm w.e. per °C per dag
density <- 0.4  # g/cm^3

# Bereken smelt in mm
snowmelt_mm <- pdd_mei * DDF

# Zet mm om naar cm en vervolgens volume naar dikte (m)
snowmelt_cm <- snowmelt_mm / 10  # van mm naar cm
snowmelt_m <- snowmelt_cm / 100  # van cm naar m

# Hoe dik is dit als sneeuw (rekening houdend met dichtheid)
# waterdichtheid = 1 g/cm^3 -> verhouding is 1/0.4 = 2.5
snow_thickness_m <- snowmelt_m * (1 / density)

snow_thickness_m
## [1] 1.396809
library(dplyr)
library(ggplot2)

# Voeg neerslagkolom toe (simulatie)
set.seed(456)
snowmod <- snowmod %>%
  mutate(pr = rgamma(n(), shape = 2, scale = 1)) # dagelijkse neerslag in mm

# Bereken sneeuwval (alleen als tas < 0)
snowmod <- snowmod %>%
  mutate(snowfall = ifelse(tas < 0, pr, 0))

# Maak tijdserieplot
print(
  ggplot(snowmod) +
    geom_area(aes(x = date, y = pr), fill = "lightblue", alpha = 0.5) +
    geom_area(aes(x = date, y = snowfall), fill = "darkblue", alpha = 0.7) +
    labs(
      title = "Dagelijkse neerslag en sneeuwval",
      x = "Datum",
      y = "Neerslag (mm)"
    )
)

# Bereken de totale sneeuwval in het jaar 2013 (in mm)
total_snowfall <- sum(snowmod$snowfall, na.rm = TRUE)
total_snowfall
## [1] 304.6334
# Bereken de totale neerslag
total_precip <- sum(snowmod$pr, na.rm = TRUE)

# Hergebruik berekende sneeuwval of opnieuw berekenen
total_snowfall <- sum(snowmod$snowfall, na.rm = TRUE)

# Bereken percentage
percentage_snow <- (total_snowfall / total_precip) * 100
percentage_snow
## [1] 40.28681
# Start with initial snowpack = 0
snowmod <- snowmod %>% mutate(snowpack = 0)

# Set Degree Day Factor (DDF)
ddf <- 7.0

# Loop over each day to update snowpack
for (i in 2:nrow(snowmod)) {
  # Accumulate yesterday's snowpack with today's snowfall
  snowmod$snowpack[i] <- snowmod$snowpack[i - 1] + snowmod$snowfall[i]

  # Subtract melt if degree days > 0
  snowmod$snowpack[i] <- snowmod$snowpack[i] - ddf * snowmod$dd[i]

  # Prevent negative snowpack
  snowmod$snowpack[i] <- ifelse(snowmod$snowpack[i] < 0, 0, snowmod$snowpack[i])
}
library(ggplot2)
library(cowplot)

# Plot 1: Temperature with 0°C line
p1 = ggplot(snowmod) +
  geom_line(aes(date, tas)) +
  geom_hline(yintercept = 0, linetype = 'dotted') +
  labs(x = NULL, y = 'Temperature (°C)')

# Plot 2: Snowpack as area
p2 = ggplot(snowmod) +
  geom_area(aes(date, snowpack), fill = 'skyblue') +
  labs(x = NULL, y = 'Snow (mm w.e.)')

# Combine vertically
plot_grid(p1, p2, align = 'v', ncol = 1)

calcSnow <- function(t_offset = 0, ddf = 7, makeplot = TRUE) {
  input <- era_day %>% filter(date >= '2013-09-01' & date <= '2014-08-31')
  
  p <- input$pr
  t <- input$tas + t_offset
  
  snowfall <- snowmelt <- snowpack <- rep(0, nrow(input))
  
  for (i in 2:nrow(input)) {
    snowfall[i]  <- ifelse(t[i] < 0, p[i], 0)
    snowmelt[i]  <- ifelse(t[i] > 0, t[i], 0) * ddf
    snowpack[i]  <- snowpack[i - 1] + snowfall[i] - snowmelt[i]
    if (is.na(snowpack[i]) || snowpack[i] < 0) {
      snowpack[i] <- 0
    }
  }

  if (makeplot) {
    p1 <- ggplot() +
      geom_path(aes(input$date, t, color = t), size = 2, show.legend = FALSE) +
      geom_hline(yintercept = 0) +
      labs(x = NULL, y = 'Temperature (°C)') +
      scale_color_gradient2(low = 'navyblue', mid = 'gray85', high = 'darkred')
    
    p2 <- ggplot() +
      geom_area(aes(input$date, snowpack), fill = 'steelblue') +
      labs(x = NULL, y = 'Snow (mm w.e.)')
    
    print(plot_grid(p1, p2, align = 'v', ncol = 1))
  }

  return(data.frame(
    t_offset = t_offset,
    peak_swe = ifelse(length(snowpack) == 0, NA, max(snowpack, na.rm = TRUE)),
    snowdays = ifelse(length(snowpack) == 0, NA, sum(snowpack > 0, na.rm = TRUE))
  ))
}
# Voeg dummy neerslag toe (voor testdoeleinden)
era_day$pr <- runif(nrow(era_day), min=0, max=5)  # willekeurige neerslagwaarden tussen 0 en 5 mm
# Zorg dat era_day de neerslag kolom bevat (dummy voorbeeld)
era_day$pr <- runif(nrow(era_day), min=0, max=5)  # willekeurige neerslag

calcSnow <- function(t_offset = 0, ddf = 7, makeplot = TRUE) {
  input <- era_day %>% filter(date >= '2013-09-01' & date <= '2014-08-31')
  
  p <- input$pr
  t <- input$tas + t_offset
  
  snowfall <- snowmelt <- snowpack <- rep(0, nrow(input))
  
  for (i in 2:nrow(input)) {
    snowfall[i]  <- ifelse(t[i] < 0, p[i], 0)
    snowmelt[i]  <- ifelse(t[i] > 0, t[i], 0) * ddf
    snowpack[i]  <- snowpack[i - 1] + snowfall[i] - snowmelt[i]
    if (is.na(snowpack[i]) || snowpack[i] < 0) {
      snowpack[i] <- 0
    }
  }

  if (makeplot) {
    p1 <- ggplot() +
      geom_path(aes(input$date, t, color = t), size = 2, show.legend = FALSE) +
      geom_hline(yintercept = 0) +
      labs(x = NULL, y = 'Temperature (°C)') +
      scale_color_gradient2(low = 'navyblue', mid = 'gray85', high = 'darkred')
    
    p2 <- ggplot() +
      geom_area(aes(input$date, snowpack), fill = 'steelblue') +
      labs(x = NULL, y = 'Snow (mm w.e.)')
    
    print(plot_grid(p1, p2, align = 'v', ncol = 1))
  }

  return(data.frame(
    t_offset = t_offset,
    peak_swe = ifelse(length(snowpack) == 0, NA, max(snowpack, na.rm = TRUE)),
    snowdays = ifelse(length(snowpack) == 0, NA, sum(snowpack > 0, na.rm = TRUE))
  ))
}

# Run de functie nu met de dummy neerslagdata
result <- calcSnow()
## Warning: Using `size` aesthetic for lines was deprecated in ggplot2 3.4.0.
## ℹ Please use `linewidth` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

print(result)
##   t_offset peak_swe snowdays
## 1        0  16.1248      152
t_seq <- seq(from = -8, to = 8, by = 0.1)
sens <- lapply(t_seq, calcSnow, makeplot = FALSE) %>% bind_rows()
sens$t_offset <- t_seq
# Maak het lineaire model aan
model_lm <- lm(peak_swe ~ t_offset, data = sens)

# Bekijk een samenvatting van het model (optioneel)
summary(model_lm)
## 
## Call:
## lm(formula = peak_swe ~ t_offset, data = sens)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -22.654 -14.517   1.004  11.317  55.062 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  29.5933     1.1699   25.30   <2e-16 ***
## t_offset     -5.4280     0.2517  -21.56   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 14.84 on 159 degrees of freedom
## Multiple R-squared:  0.7452, Adjusted R-squared:  0.7436 
## F-statistic: 464.9 on 1 and 159 DF,  p-value: < 2.2e-16
# Haal de helling (slope) eruit - de coëfficiënt bij t_offset
slope <- coef(model_lm)["t_offset"]
print(paste("dSWE/dT =", slope))
## [1] "dSWE/dT = -5.42795320730424"
model_lm <- lm(peak_swe ~ t_offset, data = sens)
# Pak intercept en slope uit het model
intercept <- coef(model_lm)["(Intercept)"]
slope <- coef(model_lm)["t_offset"]

# Bereken temperatuur waarbij peak_swe = 0
temp_no_snow <- -intercept / slope
print(paste("Temperature without snow (offset) =", temp_no_snow))
## [1] "Temperature without snow (offset) = 5.45202047567555"
offsets_test <- seq(-3.2, -2.3, by = 0.1)

# Run het model voor elke offset en bewaar resultaten
results_test <- lapply(offsets_test, calcSnow, makeplot = TRUE)

# Bekijk resultaten voor elke offset
for (i in seq_along(offsets_test)) {
  cat("Offset:", offsets_test[i], "\n")
  print(results_test[[i]])
  cat("\n")
}
## Offset: -3.2 
##   t_offset peak_swe snowdays
## 1     -3.2 26.88083      245
## 
## Offset: -3.1 
##   t_offset peak_swe snowdays
## 1     -3.1 25.48083      243
## 
## Offset: -3 
##   t_offset peak_swe snowdays
## 1       -3 24.08083      242
## 
## Offset: -2.9 
##   t_offset peak_swe snowdays
## 1     -2.9 22.68083      240
## 
## Offset: -2.8 
##   t_offset peak_swe snowdays
## 1     -2.8 22.67337      238
## 
## Offset: -2.7 
##   t_offset peak_swe snowdays
## 1     -2.7 22.67337      236
## 
## Offset: -2.6 
##   t_offset peak_swe snowdays
## 1     -2.6 22.67337      230
## 
## Offset: -2.5 
##   t_offset peak_swe snowdays
## 1     -2.5 22.67337      229
## 
## Offset: -2.4 
##   t_offset peak_swe snowdays
## 1     -2.4 22.67337      227
## 
## Offset: -2.3 
##   t_offset peak_swe snowdays
## 1     -2.3 22.67337      225
library(dplyr)
library(ggplot2)

# Define temperature offsets close to the jump range
jump_offsets <- seq(-3.2, -2.3, by = 0.1)

# Run calcSnow function for each offset, suppress plots
jump_results <- lapply(jump_offsets, calcSnow, makeplot = FALSE) %>% bind_rows()

# Add temperature offsets as a column
jump_results$t_offset <- jump_offsets

# Plot peak SWE vs temperature offset zoomed near jump
ggplot(jump_results, aes(x = t_offset, y = peak_swe)) +
  geom_point(size = 3, color = 'steelblue') +
  geom_line(color = 'darkblue') +
  geom_vline(xintercept = c(-3.0, -2.5), linetype = 'dashed', color = 'red') +
  labs(title = "Snowpack (Peak SWE) Near Temperature Offset Jump",
       x = "Temperature Offset (°C)",
       y = "Peak Snow Water Equivalent (mm w.e.)") +
  theme_minimal()

# Assuming 'sens' is your data frame with columns 't_offset', 'peak_swe', and 'snowdays'

# 1. Find the row where snowdays is closest to 143
calibrated_row <- sens[which.min(abs(sens$snowdays - 143)), ]

# Extract calibrated temperature offset
calibrated_t_offset <- calibrated_row$t_offset

# Extract peak SWE at this offset
peak_swe_at_calibrated <- calibrated_row$peak_swe

# Print results
print(paste("Calibrated temperature offset T:", calibrated_t_offset))
## [1] "Calibrated temperature offset T: 0.300000000000001"
print(paste("Peak SWE at calibrated offset:", peak_swe_at_calibrated))
## [1] "Peak SWE at calibrated offset: 11.8091467854944"
modout = snowModel(
  xy              = filter(stations, name=='Kyangjin AWS') %>% st_coordinates,  # kyangjin xy
  mask            = aoi,             # outline of entire study area (i.e. no mask)
  modelresolution = 0.25,            # use quarter degree ERA5 resolution
  zresolution     = 500,             # use 500 m elevation bands in the subgrid routine
  degdayfac       = 7.0,             # same as before
  startdate       = '2013-09-01',    # same as before
  enddate         = '2014-08-31',    # same as before
  zband_output    = T
)
## Warning: The `x` argument of `as_tibble.matrix()` must have unique column names if
## `.name_repair` is omitted as of tibble 2.0.0.
## ℹ Using compatibility `.name_repair`.
## ℹ The deprecated feature was likely used in the hcccR package.
##   Please report the issue to the authors.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.
ggplot(modout$zbands[[1]]) +
  geom_point(aes(sdays_mod,sdays, fill=factor(zmid)), pch=21, size=4) +  # plot the points
  scale_fill_manual(values=pkRamp('elevnat2',nrow(modout$zbands[[1]])),  # use custom color
                    name='Elevation band') +  
  geom_abline(linetype='dotted')                                         # add 1:1 line

# Extract the daily weighted aggregation from the model output
daily_data <- modout$daily[[1]]

# Plot SWE against date using geom_area
ggplot(daily_data) +
  geom_area(aes(x = date, y = swe), fill = "steelblue", alpha = 0.6) +
  labs(x = "Date", y = "Snow Water Equivalent (mm)") +
  ggtitle("Modeled Snow Water Equivalent (SWE) over Time")

# Compare with calibrated plot (t_offset = 5.6)
calibrated_result <- calcSnow(t_offset = 5.6)

# This will print the calibrated plot from the function
# (Make sure the function has makeplot = TRUE to show the plot)
library(ggplot2)
library(dplyr)
library(tidyr)

# Assuming modout$daily[[1]] contains modeled daily snow data
daily_data <- modout$daily[[1]]

# Calculate modeled snow season length (number of snow days) per year or period
# Here, sum days where SWE > 0 per year or season (adjust date range if needed)
daily_data <- daily_data %>% 
  mutate(date = as.Date(date)) %>%
  mutate(year = format(date, "%Y")) %>%
  group_by(year) %>%
  summarise(
    snow_season_length = sum(swe > 0)
  )

# Load observed snow season length data (replace with your actual data)
# For example, a dataframe `obs_snow` with columns 'year' and 'snow_season_length'
# obs_snow <- read.csv('path_to_observed_snow_data.csv')

# For illustration, create dummy observed data with some differences
obs_snow <- daily_data %>%
  mutate(snow_season_length = snow_season_length + sample(-30:30, n(), replace = TRUE)) 

# Combine modeled and observed for plotting
combined <- daily_data %>%
  rename(modeled = snow_season_length) %>%
  left_join(obs_snow %>% rename(observed = snow_season_length), by = "year") %>%
  pivot_longer(cols = c(modeled, observed), names_to = "type", values_to = "snow_season_length")

# Plot modeled vs observed snow season length by year
ggplot(combined, aes(x = year, y = snow_season_length, color = type, group = type)) +
  geom_line(size = 1.2) +
  geom_point(size = 2) +
  labs(
    title = "Modeled vs Observed Snow Season Length by Year",
    x = "Year",
    y = "Snow Season Length (days)",
    color = "Data Type"
  ) +
  theme_minimal() +
  theme(axis.text.x = element_text(angle = 45, hjust = 1))

# do not plot columns (elevation bands) that have no snow
keepcol = which(colSums(modout$bandswe[[1]][,-1]) > 1) + 1

# make SWE plot
pdat1 = modout$bandswe[[1]][, c(1, keepcol)] %>%
  gather('elev', 'swe', -1) %>%
  mutate(elev = factor(elev, levels = rev(unique(elev))))

p1 = ggplot(pdat1) +
  geom_area(aes(date, swe, fill = elev)) +
  labs(x = NULL, y = 'Snow (mm w.e.)') +
  scale_fill_manual(values = pkRamp('elevnat2', length(keepcol), rev = TRUE))

# make cover plot
pdat2 = modout$bandcov[[1]][, c(1, keepcol)] %>%
  gather('elev', 'cov', -1) %>%
  mutate(elev = factor(elev, levels = rev(unique(elev))))

p2 = ggplot(pdat2) +
  geom_area(aes(date, cov, fill = elev)) +
  labs(x = NULL, y = 'Fractional snow cover') +
  scale_fill_manual(values = pkRamp('elevnat2', length(keepcol), rev = TRUE))

p1

p2

# Identify elevation bands with snow
keepcol = which(colSums(modout$bandswe[[1]][,-1]) > 1) + 1

# Prepare SWE data for plotting
pdat1 = modout$bandswe[[1]][,c(1, keepcol)] %>%
  gather('elev', 'swe', -1) %>%
  mutate(elev = factor(elev, levels=rev(unique(elev))))

p1 = ggplot(pdat1) +
  geom_area(aes(date, swe, fill = elev)) +
  labs(x = NULL, y = 'Snow (mm w.e.)') +
  scale_fill_manual(values = pkRamp('elevnat2', length(keepcol), rev=TRUE))

# Prepare snow cover data for plotting
pdat2 = modout$bandcov[[1]][,c(1, keepcol)] %>%
  gather('elev', 'cov', -1) %>%
  mutate(elev = factor(elev, levels=rev(unique(elev))))

p2 = ggplot(pdat2) +
  geom_area(aes(date, cov, fill = elev)) +
  labs(x = NULL, y = 'Fractional snow cover') +
  scale_fill_manual(values = pkRamp('elevnat2', length(keepcol), rev=TRUE))

print(p1)

print(p2)

modout <- snowModel(
  xy = stations %>% filter(name == 'Kyangjin AWS') %>% st_coordinates(),
  mask = aoi,                  # polygon of entire study area
  modelresolution = 0.25,      # quarter degree ERA5 resolution
  zresolution = 100,           # use 100 m elevation bands in subgrid
  degdayfac = 7.0,             # same as before
  startdate = '1979-09-01',    # start of snow year 1979
  enddate = '2018-08-31'       # end of snow year 2018
)
library(ggplot2)
library(dplyr)

# Extract the monthly statistics for SWE from modout
monthly_swe <- modout$histstat[[1]] %>% 
  dplyr::select(date, swe_max)

# Plot the peak SWE over time
ggplot(monthly_swe, aes(x = date, y = swe_max)) +
  geom_area(fill = "steelblue", alpha = 0.7) +
  labs(
    title = "Monthly Maximum Snow Pack (Peak SWE)",
    x = "Date",
    y = "Peak SWE (mm w.e.)"
  ) +
  theme_minimal()

# get annual peaks and plot
annual_peak = modout$histstat[[1]] %>%
  filter(year > 1979) %>%  # drop first year (no peak, as series starts in September)
  group_by(year) %>%       # group by year
  summarise(peak_swe = max(swe_max))  # get the largest monthly peak SWE of the year

ggplot(annual_peak, aes(year, peak_swe)) +  # initiate ggplot with correct data
  geom_line() +                             # use line geometry to plot the data
  geom_smooth(method = 'lm')                # overlay linear regression on the plot
## `geom_smooth()` using formula = 'y ~ x'

# get linear model and summary statistics
annual_peak %>%
  lm(peak_swe ~ year, data = .) %>%
  summary()
## 
## Call:
## lm(formula = peak_swe ~ year, data = .)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -91.741 -27.136   1.182  24.279 113.340 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)  
## (Intercept) 2379.1091  1291.0495   1.843   0.0734 .
## year          -1.0908     0.6458  -1.689   0.0996 .
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 45.39 on 37 degrees of freedom
## Multiple R-squared:  0.07158,    Adjusted R-squared:  0.04649 
## F-statistic: 2.853 on 1 and 37 DF,  p-value: 0.09962
# perform regression per year on groups on calendar months
linreg = modout$histstat[[1]] %>%
  group_by(ymon) %>%             # group by calendar month
  do(fit=lm(swe_max~year, data=.)) %>%  # fit model for each group
  ungroup() %>% 
  # tidy up the results and remove the table rows with model intercept
  mutate(tidied=purrr::map(fit, ~tidy(.x)),
         tidied=purrr::map(tidied, ~filter(.x, term!='(Intercept)'))) %>%
  unnest(tidied)

# plot the results as a bar plot
ggplot(linreg) +
  geom_col(aes(x=factor(ymon, levels=c(9:12,1:8)), # columns per month, Sep-Aug
               y=estimate,
               fill=p.value<=0.05)) +           # colour bars by significance
  labs(x='Month', y='dSWE / dt')

modout = snowModel(
  xy              = NA,              # set to NA to run all snow pixels within the mask
  mask            = catchment,       # polygon of the langtang catchment
  modelresolution = 1/50,            # 12.5 times finer resolution than ERA5
  zresolution     = 100              # use 100 m elevation bands in subgrid routine
)
## Warning in searchCommandline(parallel, cpus = cpus, type = type, socketHosts =
## socketHosts, : Unknown option on commandline:
## rmarkdown::render('/Users/zahranassralla/Opdr2.Rmd',~+~~+~encoding~+~
## R Version:  R version 4.4.2 (2024-10-31)
## snowfall 1.84-6.3 initialized (using snow 0.4-4): parallel execution on 7 CPUs.
## Library raster loaded.
## Library raster loaded in cluster.
## Library abind loaded.
## Library abind loaded in cluster.
## Library tidyr loaded.
## Library tidyr loaded in cluster.
## Library tibble loaded.
## Library tibble loaded in cluster.
## 
## Stopping cluster
ggplot(modout) + 
  geom_tile(aes(x,y, fill=z), colour='black') +
  scale_fill_gradientn(colours=pkRamp('elevnat2')) +
  layer_spatial(catchment, fill=NA, size=1) + labs(x=NULL, y=NULL)

# function to apply to the histstat tibble of each row of modout
getMeanPeak = function(subtibble){
  subtibble %>%
  filter(year>1979) %>% 
  group_by(year) %>% 
  summarise(peak_swe=max(swe_max)) %>%
  pull(peak_swe) %>%
  mean
}

# run function for each histstat subtibble using the map function from purrr package 
modout = modout %>%
  # run the function for each row, creating column 'meanpeak' based on column histstat
  mutate(meanpeak=purrr::map_dbl(histstat, getMeanPeak)) %>%
   # keep only pixels that actually have SWE
  filter(meanpeak > 0)     
ggplot(modout) + 
  geom_tile(aes(x,y, fill=meanpeak), colour='black') +
  scale_fill_gradientn(colours=pkRamp('blues'), name='(mm w.e.)') + 
  coord_sf(datum=NA) + labs(x=NULL, y=NULL, title='Mean peak SWE')

library(dplyr)
library(purrr)

# Define function to calculate mean peak SWE for each histstat subtibble
getMeanPeak <- function(subtibble) {
  subtibble %>%
    filter(year > 1979) %>%
    group_by(year) %>%
    summarise(peak_swe = max(swe_max)) %>%
    pull(peak_swe) %>%
    mean()
}

# Apply the function to each row of modout and create a column 'meanpeak'
modout <- modout %>%
  mutate(meanpeak = purrr::map_dbl(histstat, getMeanPeak)) %>%
  filter(meanpeak > 0)

# Calculate the maximum mean annual peak SWE in the study area
max_mean_peak_swe <- max(modout$meanpeak)

print(max_mean_peak_swe)
## [1] 1502.857
library(dplyr)
library(purrr)
library(ggplot2)

# Function to compute mean annual peak SWE for one pixel's histstat
getMeanPeak <- function(subtibble) {
  subtibble %>%
    filter(year > 1979) %>%
    group_by(year) %>%
    summarise(peak_swe = max(swe_max)) %>%
    pull(peak_swe) %>%
    mean()
}

# Add meanpeak column to modout, filtering pixels without SWE
modout <- modout %>%
  mutate(meanpeak = purrr::map_dbl(histstat, getMeanPeak)) %>%
  filter(meanpeak > 0)

# Create elevation bands by rounding elevation to nearest 100m (adjust as needed)
modout <- modout %>%
  mutate(elev_band = round(z / 100) * 100)

# Summarize meanpeak by elevation band
peak_swe_per_band <- modout %>%
  group_by(elev_band) %>%
  summarise(mean_peak_swe = mean(meanpeak, na.rm = TRUE))

# Plot elevation band vs mean peak SWE
ggplot(peak_swe_per_band, aes(x = elev_band, y = mean_peak_swe)) +
  geom_point() +
  geom_line() +
  labs(
    x = "Elevation Band (m)",
    y = "Mean Annual Peak SWE (mm w.e.)",
    title = "Mean Annual Peak SWE by Elevation Band"
  ) +
  theme_minimal()

# function to calculate either the trend in swe or its p-value
getPeakTrend = function(subtibble, type='slope'){
  linmod = subtibble %>%
    filter(year>1979) %>% 
    group_by(year) %>% 
    summarise(peak_swe=max(swe_max)) %>%
    lm(peak_swe~year, data=.)
  if (type=='slope'){return(coef(linmod)[2])}
  if (type=='prob'){return(glance(linmod)$p.value)}
}

# run function for each histstat subtibble
modout = modout %>%
  mutate(trend = purrr::map_dbl(histstat, getPeakTrend, type='slope'),
         pval  = purrr::map_dbl(histstat, getPeakTrend, type='prob'),
         reltrend = trend / meanpeak * 100)   # also get normalized trend
p1 = ggplot(modout) + 
  geom_tile(aes(x,y, fill=trend, color=pval<=0.05), size=0.8) +
  scale_color_manual(values=c(NA,'gold'), name='p <= 0.05') + 
  scale_fill_gradient2(name=expression('(mm w.e. a'^'-1'*')')) + 
  coord_sf(datum=NA) + labs(x=NULL, y=NULL, title='Trend in peak SWE')
p2 = ggplot(modout) + 
  geom_tile(aes(x,y, fill=reltrend, color=pval<=0.05), size=0.8) +
  scale_color_manual(values=c(NA,'gold'), name='p <= 0.05') + 
  scale_fill_gradient2(name=expression('(% a'^'-1'*')')) + 
  coord_sf(datum=NA) + labs(x=NULL, y=NULL, title='Relative trend in peak SWE')
plot_grid(p1,p2,ncol=1,align='v')

plot_grid(
  ggplot(modout) + geom_point(aes(x=trend,    y=z, shape=pval<=0.05, col=pval<=0.05)),
  ggplot(modout) + geom_point(aes(x=reltrend, y=z, shape=pval<=0.05, col=pval<=0.05)),
  align='h', nrow=1
)

# Calculate percentage of pixels with significant trends in annual peak SWE
percent_significant <- mean(modout$pval <= 0.05) * 100

# Print result with formatted message
cat(sprintf("Percentage of pixels with statistically significant trends (p ≤ 0.05): %.2f%%\n", percent_significant))
## Percentage of pixels with statistically significant trends (p ≤ 0.05): 21.54%
modout = snowModel(
  xy              = NA,             
  mask            = catchment,       
  modelresolution = 1/50,            
  zresolution     = 100,             
  add_melt        = T,            # add melt estimation to daily subtibble
  add_meteo       = T             # add model pixel P and T to daily subtibble
)
## Warning in searchCommandline(parallel, cpus = cpus, type = type, socketHosts =
## socketHosts, : Unknown option on commandline:
## rmarkdown::render('/Users/zahranassralla/Opdr2.Rmd',~+~~+~encoding~+~
## snowfall 1.84-6.3 initialized (using snow 0.4-4): parallel execution on 7 CPUs.
## Library raster loaded.
## Library raster loaded in cluster.
## Library abind loaded.
## Library abind loaded in cluster.
## Library tidyr loaded.
## Library tidyr loaded in cluster.
## Library tibble loaded.
## Library tibble loaded in cluster.
## 
## Stopping cluster
catch_mean = modout$daily %>%
  bind_rows() %>%                                 # merge all pixels in one big tibble
  select(date,pr,snow,melt) %>%                   # keep required columns only
  group_by(date) %>%                              # group by date
  summarise_all(mean) %>%                         # get mean of all pixels
  filter(format(date, '%m%d') != '0229') %>%      # remove leap days from the data
  mutate(rain=pr-snow) %>%                        # get rain fraction of precip
  mutate(melt=-melt) %>%                          # make melt a positive streamflow contribution
  mutate(doy=rep(1:365,39))                       # add day-of-snowyear as an aggregator value
# parameters required to calculate discharge
px_area    = 4.34             # pixel area is approx 4.34 km2
tot_px     = nrow(modout)
tot_area   = px_area*1e6 * tot_px 
sec_in_day = 24 * 60 * 60

# get average per day-of-snowyear over 1979-2018
catch_doyavg = catch_mean %>%
  group_by(doy) %>%                               # group by day of snowyear
  summarise_all(mean) %>%                         # get mean over entire period per doy
  mutate_at(vars(pr:rain), function(x) x*1e-3 * tot_area / sec_in_day) %>%      # convert to m3/s
  mutate(discharge=rain+melt)                     # discharge estimation
ggplot(catch_doyavg) +
  geom_ribbon(aes(date, ymin=0,    ymax=rain,      fill='Rain')) + 
  geom_ribbon(aes(date, ymin=rain, ymax=rain+melt, fill='Snow melt')) + 
  geom_area(aes(date, -snow, fill='Snow fall')) + 
  scale_fill_manual(values=c('steelblue','snow3','lightblue'), name='Legend') + 
  labs(x='Average year 1979-2018', y='Discharge (m3/s)')

modout = snowModel(
  xy              = NA,                
  mask            = catchment,         
  modelresolution = 1/32,            
  future_runs     = T,                 # enable the future runs
  tas_deltas      = seq(-1, 5, 1),     # vector of T deltas to evaluate (in K) 
  pr_deltas       = seq(0.8, 1.3, 0.1) # vector of P deltas to evaluate (in fractions)
)
## Warning in searchCommandline(parallel, cpus = cpus, type = type, socketHosts =
## socketHosts, : Unknown option on commandline:
## rmarkdown::render('/Users/zahranassralla/Opdr2.Rmd',~+~~+~encoding~+~
## snowfall 1.84-6.3 initialized (using snow 0.4-4): parallel execution on 7 CPUs.
## Library raster loaded.
## Library raster loaded in cluster.
## Library abind loaded.
## Library abind loaded in cluster.
## Library tidyr loaded.
## Library tidyr loaded in cluster.
## Library tibble loaded.
## Library tibble loaded in cluster.
## 
## Stopping cluster
catch_fut = modout$futstat %>%
  bind_rows() %>%                       
  group_by(t_delta,p_delta,month) %>%     
  summarise(peak_swe=mean(peak_swe))
## `summarise()` has grouped output by 't_delta', 'p_delta'. You can override
## using the `.groups` argument.
library(dplyr)

# Filter catch_fut for March (month == 3) and no forcing (t_delta == 0)
no_forcing <- catch_fut %>%
  filter(month == 3, t_delta == 0) %>%
  select(peak_swe) %>%
  pull()
## Adding missing grouping variables: `t_delta`, `p_delta`
# Filter catch_fut for March (month == 3) and dT = +4 degrees
plus_4deg <- catch_fut %>%
  filter(month == 3, t_delta == 4) %>%
  select(peak_swe) %>%
  pull()
## Adding missing grouping variables: `t_delta`, `p_delta`
# Calculate percentage decrease in SWE from no forcing to +4 degrees
percentage_less <- ((no_forcing - plus_4deg) / no_forcing) * 100

# Print results
cat("Catchment-wide average peak SWE for March with no forcing: ", no_forcing, "mm w.e.\n")
## Catchment-wide average peak SWE for March with no forcing:  233.7715 266.9501 299.172 333.4392 364.8418 401.7281 mm w.e.
cat("Catchment-wide average peak SWE for March with +4°C forcing: ", plus_4deg, "mm w.e.\n")
## Catchment-wide average peak SWE for March with +4°C forcing:  132.6906 150.4455 167.8393 185.7366 205.5953 225.4951 mm w.e.
cat("Percentage decrease in peak SWE: ", round(percentage_less, 2), "%\n")
## Percentage decrease in peak SWE:  43.24 43.64 43.9 44.3 43.65 43.87 %
plotdat = catch_fut %>%
  group_by(t_delta,p_delta) %>%
  summarise(peak_swe=max(peak_swe))
## `summarise()` has grouped output by 't_delta'. You can override using the
## `.groups` argument.
ggplot(plotdat) +
    geom_tile(aes(x=factor(t_delta), y=factor(p_delta), fill=peak_swe)) +
    scale_fill_gradient(low='gold',high='darkblue')

# get relative swe with respect to normal situation
catch_fut = catch_fut %>%
  group_by(month) %>%
  mutate(relswe=peak_swe / peak_swe[t_delta==0 & p_delta==1] * 100)
# get annual peak
catch_relfut = catch_fut %>%
  group_by(t_delta,p_delta) %>%
  summarise(relswe=max(relswe))
## `summarise()` has grouped output by 't_delta'. You can override using the
## `.groups` argument.
# plot the data in a heatmap
ggplot(catch_relfut) +
  geom_tile(aes(x=factor(t_delta), y=factor(p_delta), fill=relswe)) +
  scale_fill_gradient2(midpoint=100, name='Relative\nSWE (%)')

catch_fut = catch_fut %>%
  group_by(month) %>%
  mutate(relswe = peak_swe / peak_swe[t_delta == 0 & p_delta == 1] * 100)
# Filter for 5°C temperature rise and normal precipitation (p_delta == 1)
future_fraction_5C <- catch_fut %>%
  filter(t_delta == 5, p_delta == 1) %>%
  summarise(avg_relswe = mean(relswe)) %>%
  pull(avg_relswe)

cat("Future fraction of snow relative to current situation at +5°C rise:", round(future_fraction_5C, 2), "%\n")
## Future fraction of snow relative to current situation at +5°C rise: 37.95 46.76 50.11 49.29 44.43 32.79 14.56 4.55 8.78 17.46 21.66 27.41 %
# Example percentages from your results
future_fraction <- c(37.95, 46.76, 50.11, 49.29, 44.43, 32.79, 14.56, 4.55, 8.78, 17.46, 21.66, 27.41)
months <- factor(month.abb, levels = month.abb)  # Ordered month abbreviations

# Create a data frame
df <- data.frame(
  Month = months,
  Fraction = future_fraction
)

library(ggplot2)

# Plot as bar plot
ggplot(df, aes(x = Month, y = Fraction)) +
  geom_col(fill = "steelblue") +
  labs(
    title = "Future Fraction of Snow Relative to Current Situation (+5°C Rise)",
    x = "Month",
    y = "Snow Fraction (%)"
  ) +
  theme_minimal() +
  ylim(0, 60)  # adjust y-axis limit for better visualization

ggplot(catch_fut) +
  geom_col(aes(x=month, y=peak_swe, fill=relswe, group=interaction(p_delta,t_delta))) +
  geom_line(aes(x=month, y=peak_swe, group=interaction(p_delta,t_delta))) +
  facet_grid(p_delta~t_delta, as.table=F) + 
  scale_fill_gradient2(midpoint=100, name='Relative\nSWE (%)')