The geographical response of western North Pacific subtropical high (WNPSH) to environmental conditions such as the El Niño-Southern Oscillation (ENSO) and global warming has been one of the main concerns with respect to extreme events induced by tropical convections. By considering outgoing longwave radiation (OLR) as the strength of subtropical high, this study attempts to further understand the geographical response of WNPSH to ENSO and global warming. First, the response of OLR to ENSO clearly shows a meridional seesaw pattern of OLR anomalies especially at 130\(^\circ\)–175\(^\circ\)E. Thus, the La Niña environment with weaker convections at lower latitudes is characterized by farther westward expansion of WNPSH but with a weaker strength. Conversely, the El Niño environment with stronger convections at lower latitudes leads to shrunken WNPSH but with a greater strength. Second, the response of OLR to global warming is explored. Global warming is found to simultaneously inhibit tropical convections at lower latitudes and increase the WNPSH strength at higher latitudes. In contrast, the relatively smaller modulation of WNPSH strength at higher latitudes is understood to be caused by the weaker tropical convections at lower latitudes in a warmer environment. The importance of this study lies in the fact that the statistical model of OLR effectively illustrates how the ENSO variation and global warming bring the zonally nonlinear strength of boreal-summer WNPSH.
library(ncdf4)
library(tidyverse)
library(dplyr)
filter = dplyr::filter
# Seasonal extraction -----------------------------------------------------
# type : monthly, seasonal, annual
# dname : data name of ncdf4
# degree : 2.5 or 0.25 (NCEP)
# beforemonth : before first month of season (e.g. Jun-Nov -> 5)
# monthlyrange : range of season (e.g. Jun-Nov -> 6)
extseason = function(data,year,degree,beforemonth,monthlyrange){
vec = as.vector(data) ; EndMon = seq(beforemonth,(length(year)*12),12) ; imsi = NULL
for(i in EndMon){imsi = c(imsi,vec[(10512*i+1):(10512*(i+monthlyrange))])}
imsimat = matrix(imsi,nrow=(dim(data)[1]*dim(data)[2]))
extraction.imsi = matrix(rep(NA,length(year)*dim(data)[1]*dim(data)[2]),ncol=length(year))
for(j in 1:(dim(data)[1]*dim(data)[2])){for(k in 1:length(year)){extraction.imsi[j,k] = mean(imsimat[j,monthlyrange*(k-1)+c(1:monthlyrange)])}}
if(degree == 2.5){extraction.imsi = data.frame(rep(seq(0,357.5,2.5),73),rep(seq(90,-90,-2.5),each=144),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
if(degree == 0.25){extraction.imsi = data.frame(rep(seq(0,357.5,0.25),1431),rep(seq(90,-90,-0.25),each=721),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
return(extraction.imsi)
}
# Setting -----------------------------------------------------------------
year=1985:2020
# 1. OLR (period = 1974/06~, setting for start in Jan, 1975)
OLR = ncvar_get(nc_open(paste0(getwd(),"/","olr.mon.mean.nc")),"olr")[,,-(1:7)]
OLR = OLR[,,((year[1]-1975)*12+1):((rev(year)[1]-1975)*12+12)]
OLR = extseason(OLR,year,2.5,5,3)
OLR = OLR[c(order(OLR[,1],-OLR[,2])),] ; rownames(OLR) = NULL
OLR_clim = OLR %>% mutate(Clim = rowSums(OLR[,3:(length(year)+2)])/length(year)) %>% relocate(Clim, .after=lat)
# WNP filter
OLR_clim = OLR_clim %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(0,50,2.5))
# Fig.1
library(maps)
library(akima)
library(RColorBrewer)
# Color
colist_negative = colorRampPalette(brewer.pal(8,'Blues'))(9)
colist_positive = colorRampPalette(brewer.pal(8,'YlOrRd'))(9)
colist = c(rev(colist_negative),colist_positive)
A1 = OLR_clim[,1] ; A2 = OLR_clim[,2] ; A3 = OLR_clim[,3]
par(mar=c(6,7,2,2))
plot(-999,-999,xlim=c(100,225),ylim=c(0,50),xlab='',ylab='',axes=F,xaxs='i',yaxs='i')
image(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=144*7),
yo=seq(min(A2),max(A2),length=73*7),duplicate='mean'),axes=F,
xlab='',ylab='',breaks=c(min(A3),seq(190,270,5),max(A3)),
col=colist,main='',xlim=c(100,220),ylim=c(0,50),add=T)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 144 * :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 144 * :
## success: collinearities reduced through jitter
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=length(year)*5),yo=seq(min(A2),max(A2),length=49*5),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280], labcex=1,levels=seq(190,270,10),
labels=seq(190,270,10),method='flattest')
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : success: collinearities reduced through jitter
m=map('world',col=colors()[213],interior=F,add=T,lty=0,xlim=c(100,220),ylim=c(0,50))
map(m,boundary=T,interior=F,add=T,col=colors()[213],lwd=2,xlim=c(100,220),ylim=c(0,50))
abline(v=seq(0,360,20),lty=1,lwd=0.7,col=grey(0.8))
abline(h=seq(-90,90,10),lty=1,lwd=0.7,col=grey(0.8))
axis(1,at=seq(100,220,20),
labels=expression(100~degree~E,120~degree~E,140~degree~E,160~degree~E,180~degree,
160~degree~W,140~degree~W),cex.axis=1.2)
axis(2,at=seq(0,50,10),
labels=expression(EQ.,10~degree~N,20~degree~N,30~degree~N,40~degree~N,50~degree~N),cex.axis=1.2,las=1)
rect(220,0,225,50,col='white',border='white',xpd=T)
mtext('Longitude',at=c(160,-10),side=1,line=3.5,cex=1.5)
mtext('Latitude',side=2,line=4.8,cex=1.5)
text(225,52,expression(paste(W,' ',m^-2)),cex=0.9,xpd=T,adj=0,pos=2)
rect(100,0,220,50,lty=1,lwd=1,xpd=T,lend='square',ljoin='mitre') # it same box()
rect(100,20,220,30,lty=1,lwd=4,border=grey(0.3),xpd=T,lend='square',ljoin='mitre')
rect(100,5,220,20,lty=1,lwd=4,border=grey(0.3),xpd=T,lend='square',ljoin='mitre')
Figure 1. Geographical distribution of OLR. The values are averaged over 36 years (1985–2020) during JJA. The upper and lower rectangles outlined in black indicate the main areas of WNPSH and tropical convections, respectively. Here, OLR of 230\(~\)W\(~\)m\(^{-2}\) is set as the climatological boundary between the cyclonic and anticyclonic flows in line with the boundary of stream functions in a previous study (Yun et al., 2023).
library(ncdf4)
library(tidyverse)
library(dplyr)
filter = dplyr::filter
# Seasonal extraction -----------------------------------------------------
# type : monthly, seasonal, annual
# dname : data name of ncdf4
# degree : 2.5 or 0.25 (NCEP)
# beforemonth : before first month of season (e.g. Jun-Nov -> 5)
# monthlyrange : range of season (e.g. Jun-Nov -> 6)
extseason = function(data,year,degree,beforemonth,monthlyrange){
vec = as.vector(data) ; EndMon = seq(beforemonth,(length(year)*12),12) ; imsi = NULL
for(i in EndMon){imsi = c(imsi,vec[(10512*i+1):(10512*(i+monthlyrange))])}
imsimat = matrix(imsi,nrow=(dim(data)[1]*dim(data)[2]))
extraction.imsi = matrix(rep(NA,length(year)*dim(data)[1]*dim(data)[2]),ncol=length(year))
for(j in 1:(dim(data)[1]*dim(data)[2])){for(k in 1:length(year)){extraction.imsi[j,k] = mean(imsimat[j,monthlyrange*(k-1)+c(1:monthlyrange)])}}
if(degree == 2.5){extraction.imsi = data.frame(rep(seq(0,357.5,2.5),73),rep(seq(90,-90,-2.5),each=144),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
if(degree == 0.25){extraction.imsi = data.frame(rep(seq(0,357.5,0.25),1431),rep(seq(90,-90,-0.25),each=721),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
return(extraction.imsi)
}
# Setting -----------------------------------------------------------------
year=1985:2020
# 1. OLR (period = 1974/06~, setting for start in Jan, 1975)
OLR = ncvar_get(nc_open(paste0(getwd(),"/","olr.mon.mean.nc")),"olr")[,,-(1:7)]
OLR = OLR[,,((year[1]-1975)*12+1):((rev(year)[1]-1975)*12+12)]
OLR = extseason(OLR,year,2.5,5,3)
OLR = OLR[c(order(OLR[,1],-OLR[,2])),] ; rownames(OLR) = NULL
OLR_2030 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(20,30,2.5))
OLR_2030 = as.data.frame(pivot_longer(OLR_2030, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_2030 = OLR_2030 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_2030 = pivot_wider(OLR_2030,names_from=year,values_from=Mean)
OLR_520 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(5,20,2.5))
OLR_520 = as.data.frame(pivot_longer(OLR_520, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_520 = OLR_520 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_520 = pivot_wider(OLR_520,names_from=year,values_from=Mean)
library(maps)
library(akima)
library(RColorBrewer)
# Fig.2 a
par(mar=c(4,4.5,3,2),oma=c(1,1,1,1))
# Color
colist_negative = colorRampPalette(brewer.pal(8,'Blues'))(9)
colist_positive = colorRampPalette(brewer.pal(8,'YlOrRd'))(9)
colist = c(rev(colist_negative),colist_positive)
A1 = rep(seq(100,220,2.5),each=length(year))
A2 = rep(1:length(year),length(seq(100,220,2.5)))
A3 = as.data.frame(pivot_longer(OLR_2030, cols=-c(lon),names_to='year',values_to='OLR'))[,3]
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=length(year)*7),yo=seq(min(A2),max(A2),length=49*7),duplicate='mean'),
xlim=c(100,220),ylim=c(length(year),1),breaks=c(min(A3),seq(190,270,5),max(A3)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : success: collinearities reduced through jitter
## Warning in image.default(interp(A1, A2, A3, xo = seq(min(A1), max(A1), length =
## length(year) * : 정렬되지 않은 'breaks'는 사용전에 자동으로 정렬될 것입니다
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=length(year)*9),yo=seq(min(A2),max(A2),length=49*9),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280], breaks=seq(190,270,10), labcex=1,levels=seq(190,270,10),
labels=seq(190,270,10))
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : success: collinearities reduced through jitter
axis(1,at=seq(100,220,40),labels=expression(100~degree~E,140~degree~E,180~degree,140~degree~W),cex.axis=1.2)
axis(2, at = c(1,6,11,16,21,26,31,36),labels= c(1985,1990,1995,2000,2005,2010,2015,2020),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36),labels= c('','','','','','','',''),las=1)
mtext('Longitude',at=c(160,-5),side=1,line=3,cex=1.5)
mtext('Year',side=2,line=4, cex=1.5)
text(230,-0.2,expression(paste(W,' ',m^-2)),cex=1.2,xpd=T,adj=0,pos=2)
text(75,-1.5,'a',cex=2.8,font=2,xpd=T)
box()
library(maps)
library(akima)
library(RColorBrewer)
# Fig.2 b
par(mar=c(4,4.5,3,2),oma=c(1,1,1,1))
colist_negative = colorRampPalette(brewer.pal(8,'Blues'))(9)
colist_positive = colorRampPalette(brewer.pal(8,'YlOrRd'))(9)
colist = c(rev(colist_negative),colist_positive)
A1 = rep(seq(100,220,2.5),each=length(year))
A2 = rep(1:length(year),length(seq(100,220,2.5)))
A3 = as.data.frame(pivot_longer(OLR_520, cols=-c(lon),names_to='year',values_to='OLR'))[,3]
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=length(year)*7),yo=seq(min(A2),max(A2),length=49*7),duplicate='mean'),
xlim=c(100,220),ylim=c(length(year),1),breaks=c(min(A3),seq(190,270,5),max(A3)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : success: collinearities reduced through jitter
## Warning in image.default(interp(A1, A2, A3, xo = seq(min(A1), max(A1), length =
## length(year) * : 정렬되지 않은 'breaks'는 사용전에 자동으로 정렬될 것입니다
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=length(year)*9),yo=seq(min(A2),max(A2),length=49*9),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280], breaks=seq(190,270,10), labcex=1,levels=seq(190,270,10),
labels=seq(190,270,10))
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = length(year)
## * : success: collinearities reduced through jitter
axis(1,at=seq(100,220,40),labels=expression(100~degree~E,140~degree~E,180~degree,140~degree~W),cex.axis=1.2)
axis(2, at = c(1,6,11,16,21,26,31,36),labels= c(1985,1990,1995,2000,2005,2010,2015,2020),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36),labels= c('','','','','','','',''),las=1)
mtext('Longitude',at=c(160,-5),side=1,line=3,cex=1.5)
mtext('Year',side=2,line=4, cex=1.5)
text(230,-0.2,expression(paste(W,' ',m^-2)),cex=1.2,xpd=T,adj=0,pos=2)
text(75,-1.5,'b',cex=2.8,font=2,xpd=T)
box()
Figure 2. Hovmoller diagrams of the zonally distributed OLR. The annual values are meridionally averaged over (a) 20\(^\circ\)–30\(^\circ\)N, and (b) 5\(^\circ\)–20\(^\circ\)N.
library(ncdf4)
library(tidyverse)
library(dplyr)
filter = dplyr::filter
# Seasonal extraction -----------------------------------------------------
# type : monthly, seasonal, annual
# dname : data name of ncdf4
# degree : 2.5 or 0.25 (NCEP)
# beforemonth : before first month of season (e.g. Jun-Nov -> 5)
# monthlyrange : range of season (e.g. Jun-Nov -> 6)
extseason = function(data,year,degree,beforemonth,monthlyrange){
vec = as.vector(data) ; EndMon = seq(beforemonth,(length(year)*12),12) ; imsi = NULL
for(i in EndMon){imsi = c(imsi,vec[(10512*i+1):(10512*(i+monthlyrange))])}
imsimat = matrix(imsi,nrow=(dim(data)[1]*dim(data)[2]))
extraction.imsi = matrix(rep(NA,length(year)*dim(data)[1]*dim(data)[2]),ncol=length(year))
for(j in 1:(dim(data)[1]*dim(data)[2])){for(k in 1:length(year)){extraction.imsi[j,k] = mean(imsimat[j,monthlyrange*(k-1)+c(1:monthlyrange)])}}
if(degree == 2.5){extraction.imsi = data.frame(rep(seq(0,357.5,2.5),73),rep(seq(90,-90,-2.5),each=144),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
if(degree == 0.25){extraction.imsi = data.frame(rep(seq(0,357.5,0.25),1431),rep(seq(90,-90,-0.25),each=721),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
return(extraction.imsi)
}
# Setting -----------------------------------------------------------------
year=1985:2020
# 1. Base data
gmsst=read.table('gmsst_jja_1854to2022.dat',T)[(year[1]-1853):(rev(year)[1]-1853),1]
soi=read.table('soi_jja_1951to2022.dat',T)[(year[1]-1950):(rev(year)[1]-1950),1]
# 2. OLR (period = 1974/06~, setting for start in Jan, 1975)
OLR = ncvar_get(nc_open(paste0(getwd(),"/","olr.mon.mean.nc")),"olr")[,,-(1:7)]
OLR = OLR[,,((year[1]-1975)*12+1):((rev(year)[1]-1975)*12+12)]
OLR = extseason(OLR,year,2.5,5,3)
OLR = OLR[c(order(OLR[,1],-OLR[,2])),] ; rownames(OLR) = NULL
OLR_2030 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(20,30,2.5))
OLR_2030 = as.data.frame(pivot_longer(OLR_2030, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_2030 = OLR_2030 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_2030 = pivot_wider(OLR_2030,names_from=year,values_from=Mean)
OLR_520 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(5,20,2.5))
OLR_520 = as.data.frame(pivot_longer(OLR_520, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_520 = OLR_520 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_520 = pivot_wider(OLR_520,names_from=year,values_from=Mean)
x = scale(soi)[,1]
yS = OLR_2030 # Subtropical
yT = OLR_520 # Tropical
sigma = round(seq(2,-2,-0.1),1) ; imsipred = NULL
for(i in 1:49){imsipred = cbind(imsipred,
predict(lm(as.numeric(yS[i,2:(length(year)+1)])~x),
data.frame(x=sigma)))}
par(mar=c(4,7,4,2))
library(maps)
library(akima)
library(RColorBrewer)
# Color
colist_negative = colorRampPalette(brewer.pal(8,'Blues'))(9)
colist_positive = colorRampPalette(brewer.pal(8,'YlOrRd'))(9)
colist = c(rev(colist_negative),colist_positive)
A1 = rep(seq(100,220,2.5),each=41)
A2 = rep(1:41,length(seq(100,220,2.5)))
A3 = imsipred
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
xlim=c(100,220),ylim=c(41,1),breaks=c(min(A3),seq(190,270,5),max(A3)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
## Warning in image.default(interp(A1, A2, A3, xo = seq(min(A1), max(A1), length =
## 49 * : 정렬되지 않은 'breaks'는 사용전에 자동으로 정렬될 것입니다
label = c(190,'',200,'',210,'',220,'',230,'',240,'',250,'',260,'',270)
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280], labcex=1,levels=seq(190,270,5), labels=label)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = c(1,6,11,16,21,26,31,36,41),labels= c('2',' ',1,' ',0,' ',-1,' ','2'),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36,41),labels= c('','','','','','','','',''),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext('Standardized soi (s.d.)',side=2,line=2.5, cex=1)
text(225,-2.5,expression(paste(W,' ',m^-2)),cex=0.9,xpd=T,adj=0,pos=2)
text(80,-5.5,'a',cex=2.5,font=2,xpd=T)
box()
sigma = round(seq(2,-2,-0.1),1) ; imsipred = NULL
for(i in 1:49){imsipred = cbind(imsipred,predict(lm(as.numeric(yS[i,2:(length(year)+1)])~x),data.frame(x=sigma)))}
Clim = imsipred[21,]
dev = NULL ; for(i in 1:41){dev = rbind(dev,imsipred[i,]-Clim)}
par(mar=c(4,7,4,2))
colist_negative = colorRampPalette(brewer.pal(7,'Blues'))(8)
colist_positive = colorRampPalette(brewer.pal(7,'YlOrRd'))(8)
colist = c(rev(colist_negative),c('#FFFFFF','#FFFFFF','#FFFFFF'),colist_positive)
A1 = rep(seq(100,220,2.5),each=41)
A2 = rep(1:41,length(seq(100,220,2.5)))
A3 = dev
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
xlim=c(100,220),ylim=c(41,1),breaks=c(seq(-9,-1,1),c(0.01,0.01),seq(1,9,1)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280],labcex=1,levels=c(seq(-9,-1,1),seq(1,9,1)),
labels=c(seq(-9,-1,1),seq(1,9,1)))
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = c(1,6,11,16,21,26,31,36,41),labels= c('2',' ',1,' ',0,' ',-1,' ','2'),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36,41),labels= c('','','','','','','','',''),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext('Standardized soi (s.d.)',side=2,line=2.5, cex=1)
text(225,-2.5,expression(paste(W,' ',m^-2)),cex=0.9,xpd=T,adj=0,pos=2)
text(80,-5.5,'b',cex=2.5,font=2,xpd=T)
box()
sigma = round(seq(2,-2,-0.1),1) ; imsipred = NULL
for(i in 1:49){imsipred = cbind(imsipred,predict(lm(as.numeric(yT[i,2:(length(year)+1)])~x),data.frame(x=sigma)))}
par(mar=c(4,7,4,2))
# Color
colist_negative = colorRampPalette(brewer.pal(8,'Blues'))(9)
colist_positive = colorRampPalette(brewer.pal(8,'YlOrRd'))(9)
colist = c(rev(colist_negative),colist_positive)
A1 = rep(seq(100,220,2.5),each=41)
A2 = rep(1:41,length(seq(100,220,2.5)))
A3 = imsipred
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
xlim=c(100,220),ylim=c(41,1),breaks=c(min(A3),seq(190,270,5),max(A3)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
## Warning in image.default(interp(A1, A2, A3, xo = seq(min(A1), max(A1), length =
## 49 * : 정렬되지 않은 'breaks'는 사용전에 자동으로 정렬될 것입니다
label = c(190,'',200,'',210,'',220,'',230,'',240,'',250,'',260,'',270)
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280], labcex=1,levels=seq(190,270,5), labels=label)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = c(1,6,11,16,21,26,31,36,41),labels= c('2',' ',1,' ',0,' ',-1,' ','2'),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36,41),labels= c('','','','','','','','',''),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext('Standardized soi (s.d.)',side=2,line=2.5, cex=1)
text(225,-2.5,expression(paste(W,' ',m^-2)),cex=0.9,xpd=T,adj=0,pos=2)
text(80,-5.5,'c',cex=2.5,font=2,xpd=T)
box()
sigma = round(seq(2,-2,-0.1),1) ; imsipred = NULL
for(i in 1:49){imsipred = cbind(imsipred,predict(lm(as.numeric(yT[i,2:(length(year)+1)])~x),data.frame(x=sigma)))}
Clim = imsipred[21,]
dev = NULL ; for(i in 1:41){dev = rbind(dev,imsipred[i,]-Clim)}
par(mar=c(4,7,4,2))
colist_negative = colorRampPalette(brewer.pal(7,'Blues'))(8)
colist_positive = colorRampPalette(brewer.pal(7,'YlOrRd'))(8)
colist = c(rev(colist_negative),c('#FFFFFF','#FFFFFF','#FFFFFF'),colist_positive)
A1 = rep(seq(100,220,2.5),each=41)
A2 = rep(1:41,length(seq(100,220,2.5)))
A3 = dev
image(interp(A1,A2,A3, xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
xlim=c(100,220),ylim=c(41,1),breaks=c(min(A3),seq(-8,-1,1),c(0.01,0.01),seq(1,8,1),max(A3)),axes=F,xlab='',ylab='',col=colist)
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
## Warning in image.default(interp(A1, A2, A3, xo = seq(min(A1), max(A1), length =
## 49 * : 정렬되지 않은 'breaks'는 사용전에 자동으로 정렬될 것입니다
contour(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=49*7),yo=seq(min(A2),max(A2),length=41*7),duplicate='mean'),
lwd=0.5, add = TRUE, col=colors()[280],labcex=1,levels=c(seq(-9,-1,1),seq(1,9,1)),
labels=c(seq(-9,-1,1),seq(1,9,1)))
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## collinear points, trying to add some jitter to avoid colinearities!
## Warning in interp(A1, A2, A3, xo = seq(min(A1), max(A1), length = 49 * 7), :
## success: collinearities reduced through jitter
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = c(1,6,11,16,21,26,31,36,41),labels= c('2',' ',1,' ',0,' ',-1,' ','2'),las=1)
axis(4, at = c(1,6,11,16,21,26,31,36,41),labels= c('','','','','','','','',''),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext('Standardized soi (s.d.)',side=2,line=2.5, cex=1)
text(225,-2.5,expression(paste(W,' ',m^-2)),cex=0.9,xpd=T,adj=0,pos=2)
text(80,-5.5,'d',cex=2.5,font=2,xpd=T)
box()
Figure 3. Response of OLR to ENSO variation. (a) and (c) Predicted values of OLR, and (b) and (d) anomaly from each longitudinal OLR mean. (a) and (b) are for 20\(^\circ\)–30\(^\circ\)N, while (c) and (d) are for 5\(^\circ\)–20\(^\circ\)N. Here, OLR represents the SH strength, and the explanatory ENSO status is indicated by SOI.
library(ncdf4)
library(tidyverse)
library(dplyr)
filter = dplyr::filter
# Seasonal extraction -----------------------------------------------------
# type : monthly, seasonal, annual
# dname : data name of ncdf4
# degree : 2.5 or 0.25 (NCEP)
# beforemonth : before first month of season (e.g. Jun-Nov -> 5)
# monthlyrange : range of season (e.g. Jun-Nov -> 6)
extseason = function(data,year,degree,beforemonth,monthlyrange){
vec = as.vector(data) ; EndMon = seq(beforemonth,(length(year)*12),12) ; imsi = NULL
for(i in EndMon){imsi = c(imsi,vec[(10512*i+1):(10512*(i+monthlyrange))])}
imsimat = matrix(imsi,nrow=(dim(data)[1]*dim(data)[2]))
extraction.imsi = matrix(rep(NA,length(year)*dim(data)[1]*dim(data)[2]),ncol=length(year))
for(j in 1:(dim(data)[1]*dim(data)[2])){for(k in 1:length(year)){extraction.imsi[j,k] = mean(imsimat[j,monthlyrange*(k-1)+c(1:monthlyrange)])}}
if(degree == 2.5){extraction.imsi = data.frame(rep(seq(0,357.5,2.5),73),rep(seq(90,-90,-2.5),each=144),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
if(degree == 0.25){extraction.imsi = data.frame(rep(seq(0,357.5,0.25),1431),rep(seq(90,-90,-0.25),each=721),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
return(extraction.imsi)
}
# Setting -----------------------------------------------------------------
year=1985:2020
# 1. OLR (period = 1974/06~, setting for start in Jan, 1975)
OLR = ncvar_get(nc_open(paste0(getwd(),"/","olr.mon.mean.nc")),"olr")[,,-(1:7)]
OLR = OLR[,,((year[1]-1975)*12+1):((rev(year)[1]-1975)*12+12)]
OLR = extseason(OLR,year,2.5,5,3)
OLR = OLR[c(order(OLR[,1],-OLR[,2])),] ; rownames(OLR) = NULL
OLR_clim = OLR %>% mutate(Clim = rowSums(OLR[,3:38])/36) %>% relocate(Clim, .after=lat)
# WNP filter
OLR_clim = OLR_clim %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(0,50,2.5))
# ABtrack
gmsst=read.table('gmsst_jja_1854to2022.dat',T)[(year[1]-1853):(rev(year)[1]-1853),1]
soi=read.table('soi_jja_1951to2022.dat',T)[(year[1]-1950):(rev(year)[1]-1950),1]
soi = scale(soi)[,1] ; gmsst = scale(gmsst)[,1]
soidf = data.frame(year,soi) ; names(soidf) = c('yr','soi')
gmsstdf = data.frame(year,gmsst) ; names(gmsstdf) = c('yr','gmsst')
AB = read.table('ABtrack2020RIeq.dat')
ABjt = AB[AB$bst=='JT' & AB$yr %in% year & AB$mo %in% c(6,7,8),-c(4,7:19)]
ABjt = ABjt[ABjt$ln1 <= 175,]
ABjt = left_join(ABjt,soidf)
## Joining with `by = join_by(yr)`
# 20-30N, 100-130E : WNS (western North seesaw)
# 20-30N, 130E-175E : ENS (eastern North seesaw)
# 5-19.9N, 100-130E : WSS (western South seesaw)
# 5-19.9N, 130E-175E : ESS (eastern South seesaw)
ABjt = ABjt %>% mutate(Section = ifelse(ln1 <= 130 & (lt1 <= 30 & lt1 >= 20), 'WNS',
ifelse((ln1 <= 175 & ln1 >= 130) & (lt1 <= 30 & lt1 >= 20),'ENS',
ifelse((ln1 <= 130 & (lt1 < 20 & lt1 >= 5)),'WSS',
ifelse((ln1 <= 175 & ln1 >= 130) & (lt1 < 20 & lt1 >= 5), 'ESS',
'None')))))
ABjt = ABjt %>% mutate(ENSOphase = ifelse(soi >= 1,'La',ifelse(soi <= -1,'El','Normal')))
ABjt = ABjt[!(ABjt$Section == 'None' | ABjt$ENSOphase == 'Normal'),]
Section = ABjt %>% group_by(Section,ENSOphase) %>% summarise(count = n())
## `summarise()` has grouped output by 'Section'. You can override using the
## `.groups` argument.
CountEast = Section[1:4,] %>% group_by(ENSOphase) %>% mutate(sum = sum(count))
CountWest = Section[5:8,] %>% group_by(ENSOphase) %>% mutate(sum = sum(count))
CountEast = CountEast %>% mutate(pro = round(count/sum,2)*100)
CountWest = CountWest %>% mutate(pro = round(count/sum,2)*100)
Section = rbind(CountEast,CountWest)
# Plot Result
library(maps)
library(akima)
library(RColorBrewer)
# Color
colist_negative = colorRampPalette(brewer.pal(6,'Blues'))(7)
colist_positive = colorRampPalette(brewer.pal(6,'YlOrRd'))(7)
colist = c(rev(colist_negative),colist_positive)
A1 = OLR_clim[,1] ; A2 = OLR_clim[,2] ; A3 = OLR_clim[,3]
par(mar=c(6,7,2,2))
plot(-999,-999,xlim=c(100,225),ylim=c(0,50),xlab='',ylab='',axes=F,xaxs='i',yaxs='i')
m=map('world',col=colors()[213],interior=F,add=T,lty=0,xlim=c(100,220),ylim=c(0,50))
map(m,boundary=T,interior=F,add=T,col=colors()[213],lwd=1.5,xlim=c(100,220),ylim=c(0,50))
abline(v=seq(0,360,20),lty=1,lwd=0.7,col=grey(0.8))
abline(h=seq(-90,90,10),lty=1,lwd=0.7,col=grey(0.8))
Section = as.data.frame(Section)
rect(100,5,130,30,lty=1,lwd=1.5,border=grey(0.5),xpd=T,lend='square',ljoin='mitre',
col=adjustcolor('white',alpha.f=0.7))
rect(130,5,175,30,lty=1,lwd=1.5,border=grey(0.5),xpd=T,lend='square',ljoin='mitre',
col=adjustcolor('white',alpha.f=0.7))
# 1. ENS El Nino
rect(median(seq(130,175,2.5))+10,20,
median(seq(130,175,2.5))+4,20+seq(1,10,length=100)[Section[1,5]],
xpd=T,col='#FA6A59',border='#FA6A59') # WNS_El Nino
rect(median(seq(130,175,2.5))+10,20,
median(seq(130,175,2.5))+4,30,
xpd=T,border='#FA6A59',lwd=1.5) # WNS_El Nino
text(median(seq(130,175,2.5))+7,32,
paste0(Section[1,5],' %'),cex=1,xpd=T,adj=0.5)
# 2. ENS La Nina
rect(median(seq(130,175,2.5))-10,20,
median(seq(130,175,2.5))-4,20+seq(1,10,length=100)[Section[2,5]],
xpd=T,col='darkblue',border='darkblue') # WNS_La Nina
rect(median(seq(130,175,2.5))-10,20,
median(seq(130,175,2.5))-4,30,
xpd=T,border='darkblue',lwd=1.5) # WNS_La Nina
text(median(seq(130,175,2.5))-7,32,
paste0(Section[2,5],' %'),cex=1,xpd=T,adj=0.5)
# 3. ESS El Nino
rect(median(seq(130,175,2.5))+10,20,
median(seq(130,175,2.5))+4,20-seq(1,10,length=100)[Section[3,5]],
xpd=T,col='#FA6A59',border='#FA6A59') # WSS_El Nino
rect(median(seq(130,175,2.5))+10,20,
median(seq(130,175,2.5))+4,10,
xpd=T,border='#FA6A59',lwd=1.5) # WSS_El Nino
text(median(seq(130,175,2.5))+7,8,
paste0(Section[3,5],' %'),cex=1,xpd=T,adj=0.5)
# 4. ESS La Nina
rect(median(seq(130,175,2.5))-10,20,
median(seq(130,175,2.5))-4,20-seq(1,10,length=100)[Section[4,5]],
xpd=T,col='darkblue',border='darkblue') # WSS_La Nina
rect(median(seq(130,175,2.5))-10,20,
median(seq(130,175,2.5))-4,10,
xpd=T,border='darkblue',lwd=1.5) # WSS_La Nina
text(median(seq(130,175,2.5))-7,8,
paste0(Section[4,5],' %'),cex=1,xpd=T,adj=0.5)
# 5. WNS El Nino
rect(median(seq(100,130,2.5))+10,20,
median(seq(100,130,2.5))+4,20+seq(1,10,length=100)[Section[5,5]],
xpd=T,col='#FA6A59',border='#FA6A59') # WNS_El Nino
rect(median(seq(100,130,2.5))+10,20,
median(seq(100,130,2.5))+4,30,
xpd=T,border='#FA6A59',lwd=1.5) # WNS_El Nino
text(median(seq(100,130,2.5))+7,32,
paste0(Section[5,5],' %'),cex=1,xpd=T,adj=0.5)
# 6. WNS La Nina
rect(median(seq(100,130,2.5))-10,20,
median(seq(100,130,2.5))-4,20+seq(1,10,length=100)[Section[6,5]],
xpd=T,col='darkblue',border='darkblue') # WNS_La Nina
rect(median(seq(100,130,2.5))-10,20,
median(seq(100,130,2.5))-4,30,
xpd=T,border='darkblue',lwd=1.5) # WNS_La Nina
text(median(seq(100,130,2.5))-7,32,
paste0(Section[6,5],' %'),cex=1,xpd=T,adj=0.5)
# 7. WSS El Nino
rect(median(seq(100,130,2.5))+10,20,
median(seq(100,130,2.5))+4,20-seq(1,10,length=100)[Section[7,5]],
xpd=T,col='#FA6A59',border='#FA6A59') # WSS_El Nino
rect(median(seq(100,130,2.5))+10,20,
median(seq(100,130,2.5))+4,10,
xpd=T,border='#FA6A59',lwd=1.5) # WSS_El Nino
text(median(seq(100,130,2.5))+7,8,
paste0(Section[7,5],' %'),cex=1,xpd=T,adj=0.5)
# 8. WSS La Nina
rect(median(seq(100,130,2.5))-10,20,
median(seq(100,130,2.5))-4,20-seq(1,10,length=100)[Section[8,5]],
xpd=T,col='darkblue',border='darkblue') # WSS_La Nina
rect(median(seq(100,130,2.5))-10,20,
median(seq(100,130,2.5))-4,10,
xpd=T,border='darkblue',lwd=1.5) # WSS_La Nina
text(median(seq(100,130,2.5))-7,8,
paste0(Section[8,5],' %'),cex=1,xpd=T,adj=0.5)
lines(c(median(seq(130,175,2.5))+2,median(seq(130,175,2.5))+12),c(20,20),lwd=2.0,col='green')
lines(c(median(seq(130,175,2.5))-2,median(seq(130,175,2.5))-12),c(20,20),lwd=2.0,col='green')
lines(c(median(seq(100,130,2.5))+2,median(seq(100,130,2.5))+12),c(20,20),lwd=2.0,col='green')
lines(c(median(seq(100,130,2.5))-2,median(seq(100,130,2.5))-12),c(20,20),lwd=2.0,col='green')
axis(1,at=seq(100,220,20),
labels=expression(100~degree~E,120~degree~E,140~degree~E,160~degree~E,180~degree,
160~degree~W,140~degree~W),cex.axis=1.2)
axis(2,at=seq(0,50,10),
labels=expression(EQ.,10~degree~N,20~degree~N,30~degree~N,40~degree~N,50~degree~N),cex.axis=1.2,las=1)
rect(220,0,225,50,col='white',border='white',xpd=T)
mtext('Longitude',at=c(160,-10),side=1,line=3.5,cex=1.5)
mtext('Latitude',side=2,line=4.8,cex=1.5)
rect(100,0,220,50,lty=1,lwd=1,xpd=T,lend='square',ljoin='mitre') # it same box()
Figure 4. Meridional comparison of the proportions of tropical storms. Comparison of tropical-storm occurrences between the lower and higher latitudes under (blue) La Niña and (red) El Niño environments. The tropical storm is defined as TC whose maximum sustained wind speed exceeds 17\(~\)m\(~\)s\(^{-1}\). The 100\(~\)% value indicates the number of total occurrences each under the La Niña and El Niño environments. The cases under the La Niña environment are chosen from the years when standardized SOI is equal to or greater than 1.5, whereas those under the El Niño environment are chosen from the years when standardized SOI is equal to or less than -1.5.
library(ncdf4)
library(tidyverse)
library(dplyr)
library(rsq)
filter = dplyr::filter
# Seasonal extraction -----------------------------------------------------
# type : monthly, seasonal, annual
# dname : data name of ncdf4
# degree : 2.5 or 0.25 (NCEP)
# beforemonth : before first month of season (e.g. Jun-Nov -> 5)
# monthlyrange : range of season (e.g. Jun-Nov -> 6)
extseason = function(data,year,degree,beforemonth,monthlyrange){
vec = as.vector(data) ; EndMon = seq(beforemonth,(length(year)*12),12) ; imsi = NULL
for(i in EndMon){imsi = c(imsi,vec[(10512*i+1):(10512*(i+monthlyrange))])}
imsimat = matrix(imsi,nrow=(dim(data)[1]*dim(data)[2]))
extraction.imsi = matrix(rep(NA,length(year)*dim(data)[1]*dim(data)[2]),ncol=length(year))
for(j in 1:(dim(data)[1]*dim(data)[2])){for(k in 1:length(year)){extraction.imsi[j,k] = mean(imsimat[j,monthlyrange*(k-1)+c(1:monthlyrange)])}}
if(degree == 2.5){extraction.imsi = data.frame(rep(seq(0,357.5,2.5),73),rep(seq(90,-90,-2.5),each=144),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
if(degree == 0.25){extraction.imsi = data.frame(rep(seq(0,357.5,0.25),1431),rep(seq(90,-90,-0.25),each=721),extraction.imsi)
names(extraction.imsi) = c('lon','lat',year)}
return(extraction.imsi)
}
# Setting -----------------------------------------------------------------
year=1985:2020
# 1. Base data
gmsst=read.table('gmsst_jja_1854to2022.dat',T)[(year[1]-1853):(rev(year)[1]-1853),1]
soi=read.table('soi_jja_1951to2022.dat',T)[(year[1]-1950):(rev(year)[1]-1950),1]
# 2. OLR (period = 1974/06~, setting for start in Jan, 1975)
OLR = ncvar_get(nc_open(paste0(getwd(),"/","olr.mon.mean.nc")),"olr")[,,-(1:7)]
OLR = OLR[,,((year[1]-1975)*12+1):((rev(year)[1]-1975)*12+12)]
OLR = extseason(OLR,year,2.5,5,3)
OLR = OLR[c(order(OLR[,1],-OLR[,2])),] ; rownames(OLR) = NULL
OLR_2030 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(20,30,2.5))
OLR_2030 = as.data.frame(pivot_longer(OLR_2030, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_Clim_OS = OLR_2030 %>% group_by(lon) %>% summarize(Clim = mean(OLR))
OLR_Clim_OS = as.data.frame(OLR_Clim_OS)
OLR_2030 = OLR_2030 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_2030 = pivot_wider(OLR_2030,names_from=year,values_from=Mean)
OLR_520 = OLR %>% filter(lon %in% seq(100,220,2.5) & lat %in% seq(5,20,2.5))
OLR_520 = as.data.frame(pivot_longer(OLR_520, cols=-c(lon,lat),names_to='year',values_to='OLR'))
OLR_Clim_OT = OLR_520 %>% group_by(lon) %>% summarize(Clim = mean(OLR))
OLR_Clim_OT = as.data.frame(OLR_Clim_OT)
OLR_520 = OLR_520 %>% group_by(lon,year) %>% summarize(Mean = mean(OLR))
## `summarise()` has grouped output by 'lon'. You can override using the `.groups`
## argument.
OLR_520 = pivot_wider(OLR_520,names_from=year,values_from=Mean)
# 3. Regression
x1 = scale(soi)[,1] ; x2 = scale(gmsst)[,1]
yS = OLR_2030 # Subtropical
yT = OLR_520 # Tropical
# SOI, Subtropical
sigma = round(seq(2,-2,-0.1),1) ; SOIpred_S = NULL
for(i in 1:49)
{
y = as.numeric(yS[i,2:(length(year)+1)])
model = lm(y~x1)
SOIpred_S = cbind(SOIpred_S,predict(model,data.frame(x1=sigma)))
}
# SOI + GMSST, Subtropical
sigma = round(seq(2,-2,-0.1),1) ; MLpred_S = NULL
for(i in 1:49)
{
y = as.numeric(yS[i,2:(length(year)+1)])
model = lm(y~x1+x2)
MLpred_S = cbind(MLpred_S,predict(model,data.frame(x1=sigma,x2=1.5)))
}
# SOI, tropical
sigma = round(seq(2,-2,-0.1),1) ; SOIpred_T = NULL
for(i in 1:49)
{
y = as.numeric(yT[i,2:(length(year)+1)])
model = lm(y~x1)
SOIpred_T = cbind(SOIpred_T,predict(model,data.frame(x1=sigma)))
}
# SOI + GMSST, tropical
sigma = round(seq(2,-2,-0.1),1) ; MLpred_T = NULL
for(i in 1:49)
{
y = as.numeric(yT[i,2:(length(year)+1)])
model = lm(y~x1+x2)
MLpred_T = cbind(MLpred_T,predict(model,data.frame(x1=sigma,x2=1.5)))
}
# -------------------------------------------------------------------------
ClimS = SOIpred_S[which(seq(2,-2,-0.1) == 0),]
ClimT = SOIpred_T[which(seq(2,-2,-0.1) == 0),]
devS = NULL ; for(i in 1:41){devS = rbind(devS,SOIpred_S[i,]-ClimS)}
devT = NULL ; for(i in 1:41){devT = rbind(devT,SOIpred_T[i,]-ClimT)}
devSL = devS[which(seq(2,-2,-0.1) == 1.5),]
devSE = devS[which(seq(2,-2,-0.1) == -1.5),]
devTL = devT[which(seq(2,-2,-0.1) == 1.5),]
devTE = devT[which(seq(2,-2,-0.1) == -1.5),]
diffS = NULL ; for(i in 1:41){diffS = rbind(diffS,MLpred_S[i,]-ClimS)}
diffT = NULL ; for(i in 1:41){diffT = rbind(diffT,MLpred_T[i,]-ClimT)}
diffSL = diffS[which(seq(2,-2,-0.1) == 1.5),]
diffSE = diffS[which(seq(2,-2,-0.1) == -1.5),]
diffTL = diffT[which(seq(2,-2,-0.1) == 1.5),]
diffTE = diffT[which(seq(2,-2,-0.1) == -1.5),]
# Observation of Subtropics -----------------------------------------------
OLR_dev_S = data.frame(OLR_2030[,1])
# Deviation : y - y bar
for(i in 2:length(OLR_2030[1,])){OLR_dev_S = cbind(OLR_dev_S,OLR_2030[,i] - OLR_Clim_OS[,2])}
names(OLR_dev_S) = c('lon',year)
sd = scale(OLR_dev_S[,2:length(OLR_dev_S[1,])])
Fit1 = NULL
for(i in 1:length(year))
{
imsi = NULL
for(j in 1:49){imsi = c(imsi,predict(lm(as.numeric(sd[j,])~x1+x2),data.frame(x1=x1[i], x2=1.5)))}
Fit1 = cbind(Fit1,imsi)
}
Fit1 = data.frame(OLR_2030[,1],Fit1) ; names(Fit1) = c('lon',year) ; rownames(Fit1) = NULL
rsq1 = NULL
for(j in 1:49){model = lm(sd[j,]~x1+x2) ; rsq1 = c(rsq1,rsq(model))}
pval1 = NULL
for(j in 1:49){pval1 = c(pval1,cor.test(as.numeric(sd[j,]),as.numeric(Fit1[j,2:length(OLR_dev_S[1,])]))$p.value)}
# Observation of Tropics --------------------------------------------------
OLR_dev_T = data.frame(OLR_520[,1])
# Deviation : y - y bar
for(i in 2:length(OLR_520[1,])){OLR_dev_T = cbind(OLR_dev_T,OLR_520[,i] - OLR_Clim_OT[,2])}
names(OLR_dev_T) = c('lon',year)
sd = scale(OLR_dev_T[,2:length(OLR_dev_T[1,])])
Fit2 = NULL
for(i in 1:length(year))
{
imsi = NULL
for(j in 1:49){imsi = c(imsi,predict(lm(as.numeric(sd[j,])~x1+x2),data.frame(x1=x1[i], x2=1.5)))}
Fit2 = cbind(Fit2,imsi)
}
Fit2 = data.frame(OLR_520[,1],Fit2) ; names(Fit2) = c('lon',year) ; rownames(Fit2) = NULL
rsq2 = NULL
for(j in 1:49){model = lm(sd[j,]~x1+x2) ; rsq2 = c(rsq2,rsq(model))}
pval2 = NULL
for(j in 1:49){pval2 = c(pval2,cor.test(as.numeric(sd[j,]),as.numeric(Fit2[j,2:length(OLR_dev_T[1,])]))$p.value)}
library(maps)
library(akima)
library(RColorBrewer)
par(mar=c(4,4.5,3,2))
colist_negative = colorRampPalette(brewer.pal(9,'Blues'))(10)
colist_positive = colorRampPalette(brewer.pal(9,'YlOrRd'))(10)
colist = c(rev(colist_negative),colist_positive)
pval = data.frame(lon = seq(100,220,2.5),Subtropics = pval1,Tropics=pval2)
pval_S = filter(pval,Subtropics <= 0.1)
pval_T = filter(pval,Tropics <= 0.1)
endS = which(diff(pval_S[,1]) != 2.5)
startS = which(diff(pval_S[,1]) != 2.5)+1
endT = which(diff(pval_T[,1]) != 2.5)
startT = which(diff(pval_T[,1]) != 2.5)+1
plot(-999,-999,xlim=c(100,220),ylim=c(-5.5,7.5),xlab='',ylab='',axes=F,xaxs='i',yaxs='i')
lonlev = pval_S[,1]
lines(seq(100,220,2.5),devSL,lwd=2,col='darkblue') # subtropics
lines(seq(100,220,2.5),devTL,lwd=4,col='darkblue') # tropics
polygon(c(seq(100,220,2.5),seq(220,100,-2.5)),c(diffSL,rev(devSL)),border=NA,col=adjustcolor('darkblue',alpha=0.3))
polygon(c(seq(100,220,2.5),seq(220,100,-2.5)),c(diffTL,rev(devTL)),border=NA,col=adjustcolor('darkblue',alpha=0.6))
polygon(c(100,130,130,100),c(10,10,-10,-10),border=NA,col=adjustcolor('white',alpha.f=0.5))
polygon(c(175,220,220,175),c(10,10,-10,-10),border=NA,col=adjustcolor('white',alpha.f=0.5))
abline(v=c(130,175),col='black',lwd=1)
abline(h=0,lty=2)
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = seq(-8,8,2),labels = seq(-8,8,2),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext(expression(paste('OLR (',W,' ',m^-2,')')),side=2,line=2.5, cex=0.8)
text(90,9,'a',cex=2.5,font=2,xpd=T)
box()
par(mar=c(4,4.5,3,2))
colist_negative = colorRampPalette(brewer.pal(9,'Blues'))(10)
colist_positive = colorRampPalette(brewer.pal(9,'YlOrRd'))(10)
colist = c(rev(colist_negative),colist_positive)
pval = data.frame(lon = seq(100,220,2.5),Subtropics = pval1,Tropics=pval2)
pval_S = filter(pval,Subtropics <= 0.1)
pval_T = filter(pval,Tropics <= 0.1)
endS = which(diff(pval_S[,1]) != 2.5)
startS = which(diff(pval_S[,1]) != 2.5)+1
endT = which(diff(pval_T[,1]) != 2.5)
startT = which(diff(pval_T[,1]) != 2.5)+1
plot(-999,-999,xlim=c(100,220),ylim=c(-5.5,7.5),xlab='',ylab='',axes=F,xaxs='i',yaxs='i')
lonlev = pval_S[,1]
lines(seq(100,220,2.5),devSE,lwd=2,col='#FA8072') # subtropics
lines(seq(100,220,2.5),devTE,lwd=4,col='#FA8072') # tropics
polygon(c(seq(100,220,2.5),seq(220,100,-2.5)),c(diffSE,rev(devSE)),border=NA,col=adjustcolor('darkorange',alpha=0.3))
polygon(c(seq(100,220,2.5),seq(220,100,-2.5)),c(diffTE,rev(devTE)),border=NA,col=adjustcolor('darkorange',alpha=0.6))
polygon(c(100,130,130,100),c(10,10,-10,-10),border=NA,col=adjustcolor('white',alpha.f=0.5))
polygon(c(175,220,220,175),c(10,10,-10,-10),border=NA,col=adjustcolor('white',alpha.f=0.5))
abline(v=c(130,175),col='black',lwd=1)
abline(h=0,lty=2)
axis(1,at=seq(100,220,10),labels=expression(100~degree~E,'',120~degree~E,'',140~degree~E,'',160~degree~E,'',180~degree,
'',160~degree~W,'',140~degree~W),cex.axis=0.8)
axis(2, at = seq(-8,8,2),labels = seq(-8,8,2),las=1)
mtext('Longitude',side=1,line=2.5, cex=1)
mtext(expression(paste('OLR (',W,' ',m^-2,')')),side=2,line=2.5, cex=0.8)
text(90,9,'b',cex=2.5,font=2,xpd=T)
box()
Figure 5. Response of OLR modulated by global warming. Modulation of OLR in (a) a warmer La Nina environment, and (b) a warmer El Nino environment, which is shaded in transparent colors. The lines in (a) and (b) denote the zonal anomalies of predicted OLR at each 1.5 and -1.5 standardized SOI from Eq.(1) [see also Fig. 3(b)]. The thick and thin lines represent the respective cases for 5\(^\circ\)–20\(^\circ\)N and 20\(^\circ\)–30\(^\circ\)N. According to Eq.(2), every edge of the shaded area indicates the zonal anomalies of predicted OLR at 1.5 standardized SOI and 1.5 standardized GMSST for a warmer La Nina environment, and at -1.5 standardized SOI and 1.5 standardized GMSST for a warmer El Nino environment. Longitudinal ranges, except at 130\(^\circ\)–175\(^\circ\)E where the meridional seesaw pattern is clearest, are shown in more transparent colors.