Violent typhoons continue to have catastrophic impacts on economies and welfare but how they are responding to global warming has yet to be understood. Here we use an empirical framework to explain physically why observations support a strong connection between increasing ocean warmth and the increasing intensity of super typhoons in the western North Pacific. We show that energy needed for deep convection is on the rise with greater heat and moisture in the lower tropical troposphere but that this energy remains untapped when air pressure is high over the same region. Accordingly tropical cyclone formation is becoming less common but those that do form are likely to reach extreme intensities from the sudden discharge of stored energy. These thermodynamic changes to the environment are pronounced at the extreme portion of typhoon intensities indicating that super typhoons are likely to be stronger at the expense of overall tropical cyclone occurrences in the western North Pacific.
par(mar=c(0,0,0,0))
require(png)
## Loading required package: png
require(grid)
## Loading required package: grid
img <- readPNG('f_TCI.png')
grid.raster(img)
Fig. 1 Schematic of our TC climate framework. The positive diagonal (positive slope) captures the level of coherence between FRQ and INT. The negative diagonal (negative slope) captures the level of discordance. The state of the TC climate can be represented as a single point in the two-dimensional variability space. A variability direction of TC climate is shown as a thick black line. \(\theta\) is the angle starting from INT. ACT and EINT are the special cases when \(\theta\) is $+$45 and $-$45, respectively. ACT and EINT are formed by equal weighting of INT and FRQ.
rm(list=ls())
#1.set option
#------------------------------------------------
Year=1984:2013
signif = 0.05 # 95% significance level
#2.read files
#------------------------------------------------
FRQjt=read.table('Data/frqwnp.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
INTjt=read.table('Data/intwnp10.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
FRQjm=read.table('Data/frqjm.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
INTjm=read.table('Data/intjm10.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
FRQ=(scale(FRQjt)+scale(FRQjm))/sqrt(2)
INT=(scale(INTjt)+scale(INTjm))/sqrt(2)
gmsst=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[(Year[1]-1853):(rev(Year)[1]-1853),1]
#3. compute correlations & p-values
#------------------------------------------------
theta = seq(1, 180, by=1)
#Compute the correlation between TC climate and global SST
cGMSST = NULL ; pGMSST = NULL
for (k in theta){
C1 = cos(k * pi/180)
C2 = sin(k * pi/180)
TCclim = C1 * scale(INT) + C2 * scale(FRQ)
ctest = cor.test(TCclim, gmsst)
cGMSST = c(cGMSST, as.numeric(ctest$estimate))
pGMSST = c(pGMSST, as.numeric(ctest$p.value))
}
cGMSSTjt = NULL
for (k in theta){
C1 = cos(k * pi/180)
C2 = sin(k * pi/180)
TCclim = C1 * scale(INTjt) + C2 * scale(FRQjt)
ctest = cor.test(TCclim, gmsst)
cGMSSTjt = c(cGMSSTjt, as.numeric(ctest$estimate))
}
cGMSSTjm = NULL
for (k in theta){
C1 = cos(k * pi/180)
C2 = sin(k * pi/180)
TCclim = C1 * scale(INTjm) + C2 * scale(FRQjm)
ctest = cor.test(TCclim, gmsst)
cGMSSTjm = c(cGMSSTjm, as.numeric(ctest$estimate))
}
#6.plot
#------------------------------------------------
plot(-10, -10, xlim=c(-1, 1.3), ylim=c(-1, 1.3), axes=FALSE, xlab='', ylab='', main='')
i = seq(0, 360, .5)
Outl = cbind(cos(i * pi/180), sin(i * pi/180))
Innl = cbind(.5 * cos(i * pi/180), .5 * sin(i * pi/180))
polygon(Outl[, 1], Outl[, 2], border=colors()[229], col='white', lwd=2)
polygon(Innl[, 1], Innl[, 2], border=colors()[229], col=NULL, lwd=2)
Line.xcord = c(-1, 1, NA, 0, 0, NA, -cos(pi/4), cos(pi/4), NA, -cos(pi/4), cos(pi/4))
Line.ycord = c(0, 0, NA, -1, 1, NA, sin(pi/4), -sin(pi/4), NA, -sin(pi/4), sin(pi/4))
lines(Line.xcord, Line.ycord, col=colors()[229], lwd=1)
text(par('usr')[2] - 0.29, 0.0, srt=0, adj = 0, labels = 'INT', xpd = TRUE, cex=2)
text(par('usr')[2] - 0.6, 0.81, srt=0, adj = 0, labels = 'ACT', xpd = TRUE, cex=2)
text(par('usr')[2] - 1.52, 1.17, srt=0, adj = 0, labels = 'FRQ', xpd = TRUE, cex=2)
text(par('usr')[2] - 0.6, -0.81, srt=0, adj = 0, labels = 'EINT', xpd = TRUE, cex=2)
text(par('usr')[2] - 2.85, 0.0, srt=0, adj = 0, labels = '-INT', xpd = TRUE, cex=2, col=colors()[229])
text(par('usr')[2] - 2.55, -0.81, srt=0, adj = 0, labels = '-ACT', xpd = TRUE, cex=2, col=colors()[229])
text(par('usr')[2] - 1.6, -1.17, srt=0, adj = 0, labels = '-FRQ', xpd = TRUE, cex=2, col=colors()[229])
text(par('usr')[2] - 2.55, 0.81, srt=0, adj = 0, labels = '-EINT', xpd = TRUE, cex=2, col=colors()[229])
text(0,0.55, '0.5', cex=1.4, col=colors()[229])
text(0,1.05, '1.0', cex=1.4, col=colors()[229])
dg = theta
polygon(cGMSSTjt * cos(dg * pi/180), cGMSSTjt * sin(dg * pi/180), border=colors()[453], lty=1,lwd=2, col=NULL)
polygon(cGMSSTjm * cos(dg * pi/180), cGMSSTjm * sin(dg * pi/180), border=colors()[257], lty=1,lwd=2, col=NULL)
polygon(cGMSST * cos(dg * pi/180), cGMSST * sin(dg * pi/180), border="#ff9900", lwd=7, col=NULL)
cGMSST2 = c(cGMSST[which.max(pGMSST):length(pGMSST)], cGMSST[1:(which.max(pGMSST) - 1)])
pGMSST2 = c(pGMSST[which.max(pGMSST):length(pGMSST)], pGMSST[1:(which.max(pGMSST) - 1)])
dg2 = c(dg[which.max(pGMSST):length(pGMSST)], dg[1:(which.max(pGMSST) - 1)])
lines(cGMSST2[pGMSST2 <= signif] * cos(dg2[pGMSST2 <= signif] * pi/180),
cGMSST2[pGMSST2 <= signif] * sin(dg2[pGMSST2 <= signif] * pi/180), col="#cc3300", lwd=7)
cGMSST3 = cGMSST[which.max(abs(cGMSST))]
dg3 = dg[which.max(abs(cGMSST))]
points(cGMSST3 * cos(dg3 * pi/180),
cGMSST3 * sin(dg3 * pi/180), col="#cc3300", pch=16, cex=3)
text((cGMSST3 * cos(dg3 * pi/180) + 0.1),
(cGMSST3 * sin(dg3 * pi/180) - 0.14), abs(round(cGMSST3, 2)), cex=1.6, col="#cc3300")
Fig. 2 Correlation screen of global ocean warmth and climate variabilities using the strongest 10 % of all TCs in the western North Pacific. Global mean SST is used as global ocean warmth indicator. All indicators used for correlations are annual values averaged for JJASON over 30 years (1984-2013). Center, inner circle and outer circle indicate the correlation levels of 0, 0.5 and 1, respectively. Correlations for JTWC and JMA are shown in purple and green loops, respectively. The orange line shows the correlation of global ocean warmth with composite TC climate from JTWC and JMA. The red portion of the line represents the significant (\(\alpha \leq 0.05\)) range of correlations. The dot indicates the direction of the largest correlation.
rm(list=ls())
# 1. set option
#------------------------------------------------
Year=1984:2013
# 2. read files
# (t, q, z, rh)
#------------------------------------------------
t=read.table('Data/temp.1984to2013.wnp.txt',FALSE) # temperature (K)-273.15
q=read.table('Data/shum.1984to2013.wnp.txt',FALSE) # specific humidity (g/kg)
z=read.table('Data/hgt.1984to2013.wnp.txt',FALSE) # geopotential height (m)
gmsst=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[(Year[1]-1853):(rev(Year)[1]-1853),1]
nsoi=-1*read.table('Data/SOIann_jjason_1951to2013.dat',TRUE)[(Year[1]-1950):(rev(Year)[1]-1950),1]
partial1=lm(gmsst~nsoi)$res # to use in the calculation of partial correlations
# 3.calculate annual MSE (Moist Static Energy)
#------------------------------------------------
Cp= 1004 # specific heat capacity of air (J/(kg.K))
L= LHV=2.5*10^6 # latent heat of vaporization (J/kg)
g= 9.81 # accerelation due to gravity (m/s2)
SHann=Cp*(t+273.15) # sensible heating energy (J/kg)
LHann=L*q/1000 # latent heating (J/kg) # q over 300hPa is ignored to 0
PEann=g*z # potential energy (m2/s2)=(J/kg)
MSEann=SHann+PEann+LHann #annual Moist static energy
# 4.make each profile (30S~30N area-weighted mean values)
#------------------------------------------------
Lat=seq(-90,90,2.5)
n1=which(Lat==0) ; n2=which(Lat==30) # tropics
var=t
profileset=NULL #dim[time,lev]
for (lev in 1:17) # for each lev (1000 hPa to 10 hPa)
{tmsrs=NULL
annualnumset= seq(lev,length(Year)*17,17)[1:length(Year)]
for (ann in annualnumset) # time-series of area weight mean (0~30N)
{area_weighted_value=var[ann,(n1:n2)]*cos(Lat[n1:n2]*pi/180)/sum(cos(Lat[n1:n2]*pi/180))
tmsrs=c(tmsrs, sum(as.numeric(area_weighted_value)))}
profileset=rbind(profileset,tmsrs)
}
profileset_t=profileset
var=q
profileset=NULL #dim[time,lev]
for (lev in 1:8) # for each lev (1000 hPa to 10 hPa)
{tmsrs=NULL
annualnumset= seq(lev,length(Year)*17,17)[1:length(Year)]
for (ann in annualnumset) # time-series of area weight mean (0~30N)
{area_weighted_value=var[ann,(n1:n2)]*cos(Lat[n1:n2]*pi/180)/sum(cos(Lat[n1:n2]*pi/180))
tmsrs=c(tmsrs, sum(as.numeric(area_weighted_value)))}
profileset=rbind(profileset,tmsrs)
}
profileset_q=profileset
var=z
profileset=NULL #dim[time,lev]
for (lev in 1:17) # for each lev (1000 hPa to 10 hPa)
{tmsrs=NULL
annualnumset= seq(lev,length(Year)*17,17)[1:length(Year)]
for (ann in annualnumset) # time-series of area weight mean (0~30N)
{area_weighted_value=var[ann,(n1:n2)]*cos(Lat[n1:n2]*pi/180)/sum(cos(Lat[n1:n2]*pi/180))
tmsrs=c(tmsrs, sum(as.numeric(area_weighted_value)))}
profileset=rbind(profileset,tmsrs)
}
profileset_z=profileset
var=MSEann
profileset=NULL #dim[time,lev]
for (lev in 1:17) # for each lev (1000 hPa to 10 hPa)
{tmsrs=NULL
annualnumset= seq(lev,length(Year)*17,17)[1:length(Year)]
for (ann in annualnumset) # time-series of area weight mean (0~30N)
{area_weighted_value=var[ann,(n1:n2)]*cos(Lat[n1:n2]*pi/180)/sum(cos(Lat[n1:n2]*pi/180))
tmsrs=c(tmsrs, sum(as.numeric(area_weighted_value)))}
profileset=rbind(profileset,tmsrs)
}
profileset_MSE=profileset
# 5.calculate correlations
#------------------------------------------------
prof=profileset_z ; corr=NULL
for (lev in 1:17)
{partial2=lm(prof[lev,]~nsoi)$res
ctest=cor.test(partial1,partial2)
corr=rbind(corr,c(ctest$estimate,as.numeric(ctest$conf.int)))}
corr_z=corr
prof=profileset_MSE ; corr=NULL
for (lev in 1:17)
{partial2=lm(prof[lev,]~nsoi)$res
ctest=cor.test(partial1,partial2)
corr=rbind(corr,c(ctest$estimate,as.numeric(ctest$conf.int)))}
corr_MSE=corr
# 6. plot------------------------------------------------
p=log(c(1000,925,850,700,600,500,400,300,250,200,150,100,70,50,30,20,10))
p2=c(1000,925,850,700,600,500,400,300,250,200,150,100,70,50,30,20,10)
collist1=c('#ff0000','#330099')
collist2=c('#ff000066','#33009966')
par(mar=c(7,6,5,3))
plot(-999,-999,xlim=c(-1,1),ylim=c(log(1000),log(150)),xlab='',ylab='',axes=FALSE)
polygon(c(corr_MSE[,2],rev(corr_MSE[,3])),c(p,rev(p)),border=NA,col=collist2[1])
polygon(c(corr_z[,2],rev(corr_z[,3])),c(p,rev(p)),border=NA,col=collist2[2])
lines(corr_MSE[,1],p,lwd=5,col=collist1[1])
lines(corr_z[,1],p,lwd=5,col=collist1[2])
abline(v=0,col=1,lty=2)
axis(1,at=seq(-1,1,0.2),labels=seq(-1,1,0.2),cex.axis=1)
axis(2,at=p,labels=p2,cex.axis=1,las=1)
axis(3,at =seq(-1,1,0.2),labels=rep('',length=length(seq(-1,1,0.2))))
axis(4, at=p,labels=rep('',length(p)))
mtext('Correlation Coefficient',side=1,line=3, cex=1.4)
mtext('Pressure (hPa)',side=2,line=3.5, cex=1.4)
legend(-1.1,600, bty='n', c('Moist Static Energy','Geopotential Height')
,text.col=collist1,cex=1 ,lty=1,lwd=4,col=collist1, ncol=1)
box()
Fig. 3 Correlation profile of global ocean warmth with moist static energy and geopotential height in the tropical region (0–30N) of the western North Pacific. The significant correlation range (\(\alpha \leq 0.05\)) at each pressure level is shaded. All values are averaged for JJASON over 30 years (1984–2013) to calculate correlations. The ENSO influence is removed separately from each variable so the values represent the partial correlation. The graph shows the thermodynamics of a globally warm environment at the same ENSO condition.
rm(list=ls())
par(mfrow=c(1,2),mar=c(3,4,3.5,2),cex.axis=1.4)
#1.set option
#------------------------------------------------
Year=1984:2013
#2.read files
#------------------------------------------------
hgt=read.table('Data/hgt500WNP_jjason_1984to2013.dat',TRUE)
sst=read.table('Data/sstann_jjason_1984to2013.dat',TRUE)
gmsst=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[(Year[1]-1853):(rev(Year)[1]-1853),1]
nsoi=-1*read.table('Data/SOIann_jjason_1951to2013.dat',TRUE)[(Year[1]-1950):(rev(Year)[1]-1950),1]
partial1=lm(gmsst~nsoi)$res # to use in the calculation of partial correlations
#3.Loop for GPH500 and SST
#------------------------------------------------
for(k in c('sst','hgt'))
{if(k=='sst') {var=sst}
if(k=='hgt') {var=hgt}
# Compute correlations
Lon=var[,1] ; Lat=var[,2]
corr=NULL
for (i in 1:length(Lon))
{vect=as.numeric(var[i,3:(2+length(Year))])
if (min(vect) != max(vect)) # remove cases having flat values (near polar regions)
{partial2=lm(vect~nsoi)$res
corr=rbind(corr,c(Lon[i],Lat[i],round(cor(partial1,partial2),2)))
}
}
# Plot image and contours
require(maps) # for map --> map
require(akima) # for interp --> interpolation
require(RColorBrewer) # for Brewer.pal --> color
A1=corr[,1] # longitude
A2=corr[,2] # latitude
A3=corr[,3] # correlation
colist_negative=colorRampPalette(brewer.pal(9,'BuPu'))(10) # extract 10 from Blue-Purple
colist_positive=colorRampPalette(brewer.pal(9,'YlOrRd'))(10) # extract 10 from Yellow-Orange-Red
colist=c(rev(colist_negative),colist_positive) # comprise 20 color set
image(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=500),yo=seq(min(A2),max(A2),length=500),duplicate='mean'),
axes=F, xlab='', ylab='', breaks=seq(-1,1,0.1), col=colist, main='', xlim=c(100,180),ylim=c(0,50))
contour(interp(A1,A2,A3,
xo=seq(min(A1),max(A1),length=500),yo=seq(min(A2),max(A2),length=500),duplicate='mean'),
levels=seq(-1,1,0.2), lwd=1, add = TRUE, col='#666699',labcex=0.9)
# Basin boundaries
m=map('world',fill=T,col=colors()[177],lty=0,add=T)
map('world',boundary=F,interior=T,col=colors()[177], lwd=2,add=T)
m$x=m$x+360 # when using global map
map(m,fill=T, lty=0, col=colors()[177],add=T) # plot America on right side of the map.
map(m,boundary=F,interior=T,col=colors()[177],lwd=2,add=T)
axis(1,at=seq(100,180,20), labels=expression
(100~degree,120~degree~E,140~degree~E,160~degree~E,180~degree),cex.axis=1.2)
axis(2,at=seq(0,50,10), labels=expression
(0~degree,10~degree~N,20~degree~N,30~degree~N,40~degree~N,50~degree~N),cex.axis=1.2,las=1)
axis(3,at=seq(100,180,20), labels=rep('',5))
axis(4,at=seq(0,50,10), labels=rep('',6))
if (k=='sst') mtext('(a)',side=3,line=1.4,cex=1.5,adj=-0.15,font=1)
if (k=='sst') mtext('Sea surface temperature',side=3,line=1.4,cex=1.5,adj=-0.07)
if (k=='hgt') mtext('(b)',side=3,line=1.4,cex=1.5,adj=-0.15,font=1)
if (k=='hgt') mtext('Geopotential height at 500 hPa',line=1.4,cex=1.5,adj=-0.2)
box()
} # end of the loop
## Loading required package: maps
## Loading required package: akima
## Loading required package: RColorBrewer
Fig. 4 Correlation maps of global ocean warmth with (a) regional SST and (b) regional geopotential height at 500 hPa. At each grid point, ENSO influence is removed from the variables and partial correlation is calculated. Thus, the result shows the regional pattern of a globally warm environment at the same ENSO condition. Global mean SST is used as an indicator of global ocean warmth. All values are averaged for JJASON over 30 years (1984–2013) to calculate correlations.
rm(list=ls())
par(mfrow=c(1,2),mar=c(3,4,3.5,2),cex.axis=1.4)
#1.set option
#------------------------------------------------
Year=1984:2013
#2.read files
#------------------------------------------------
hgt=read.table('Data/hgt500WNP_jjason_1984to2013.dat',TRUE)
sst=read.table('Data/sstann_jjason_1984to2013.dat',TRUE)
gmsst=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[(Year[1]-1853):(rev(Year)[1]-1853),1]
#3.Loop for GPH500 and SST
#------------------------------------------------
for(k in c('sst','hgt'))
{if(k=='sst') {var=sst}
if(k=='hgt') {var=hgt}
# Compute correlations
Lon=var[,1] ; Lat=var[,2]
corr=NULL
for (i in 1:length(Lon))
{vect=as.numeric(var[i,3:(2+length(Year))])
if (min(vect) != max(vect)) # remove cases having flat values (near polar regions)
{corr=rbind(corr,c(Lon[i],Lat[i],round(cor(vect,gmsst),2)))
}
}
# Plot image and contours
require(maps) # for map --> map
require(akima) # for interp --> interpolation
require(RColorBrewer) # for Brewer.pal --> color
A1=corr[,1] # longitude
A2=corr[,2] # latitude
A3=corr[,3] # correlation
colist_negative=colorRampPalette(brewer.pal(9,'BuPu'))(10) # extract 10 from Blue-Purple
colist_positive=colorRampPalette(brewer.pal(9,'YlOrRd'))(10) # extract 10 from Yellow-Orange-Red
colist=c(rev(colist_negative),colist_positive) # comprise 20 color set
image(interp(A1,A2,A3,xo=seq(min(A1),max(A1),length=500),yo=seq(min(A2),max(A2),length=500),duplicate='mean'),
axes=F, xlab='', ylab='', breaks=seq(-1,1,0.1), col=colist, main='', xlim=c(100,180),ylim=c(0,50))
contour(interp(A1,A2,A3,
xo=seq(min(A1),max(A1),length=500),yo=seq(min(A2),max(A2),length=500),duplicate='mean'),
levels=seq(-1,1,0.2), lwd=1, add = TRUE, col='#666699',labcex=0.9)
# Basin boundaries
m=map('world',fill=T,col=colors()[177],lty=0,add=T)
map('world',boundary=F,interior=T,col=colors()[177], lwd=2,add=T)
m$x=m$x+360 # when using global map
map(m,fill=T, lty=0, col=colors()[177],add=T) # plot America on right side of the map.
map(m,boundary=F,interior=T,col=colors()[177],lwd=2,add=T)
axis(1,at=seq(100,180,20), labels=expression
(100~degree,120~degree~E,140~degree~E,160~degree~E,180~degree),cex.axis=1.2)
axis(2,at=seq(0,50,10), labels=expression
(0~degree,10~degree~N,20~degree~N,30~degree~N,40~degree~N,50~degree~N),cex.axis=1.2,las=1)
axis(3,at=seq(100,180,20), labels=rep('',5))
axis(4,at=seq(0,50,10), labels=rep('',6))
if (k=='sst') mtext('(a)',side=3,line=1.4,cex=1.5,adj=-0.15,font=1)
if (k=='sst') mtext('Sea surface temperature',side=3,line=1.4,cex=1.5,adj=-0.07)
if (k=='hgt') mtext('(b)',side=3,line=1.4,cex=1.5,adj=-0.15,font=1)
if (k=='hgt') mtext('Geopotential height at 500 hPa',line=1.4,cex=1.5,adj=-0.2)
box()
} # end of the loop
Fig. 5 Same as Fig.4 but for without partial correlation. This is to see the influence of ENSO variation.
rm(list=ls())
par(mfrow=c(1,1),mar=c(5,5,5,6))
#1.set option
#------------------------------------------------
Year=1984:2013
#2.read files
#------------------------------------------------
FRQjt=read.table('Data/frqwnp.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
INTjt=read.table('Data/intwnp10.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
FRQjm=read.table('Data/frqjm.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
INTjm=read.table('Data/intjm10.txt')[(Year[1]-1983):(rev(Year)[1]-1983),1]
FRQ=(scale(FRQjt)+scale(FRQjm))/sqrt(2)
INT=(scale(INTjt)+scale(INTjm))/sqrt(2)
EINT=(scale(INT)-scale(FRQ))/sqrt(2)
gmsstExtended=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[1:(rev(Year)[1]-1853),1]
gmsst=read.table('Data/gmsst_jjason_1854to2013.dat',TRUE)[(Year[1]-1853):(rev(Year)[1]-1853),1]
#3.reference statistics
#------------------------------------------------
A = gmsstExtended
B = gmsst
# correlation between EINT and GMSST
cor.test(EINT, B) # check correlation
##
## Pearson's product-moment correlation
##
## data: EINT and B
## t = 5.9265, df = 28, p-value = 2.23e-06
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## 0.5274062 0.8718967
## sample estimates:
## cor
## 0.7459405
# trends and standard errors (s. e.) of GMSST
trend30Y_GMSST = summary(lm(B ~ Year))$coeff[2, 1] * 30
trend30Yse_GMSST = summary(lm(B ~ Year))$coeff[2, 2] * 30
trend30Y_GMSST ; trend30Yse_GMSST
## [1] 0.3300767
## [1] 0.04414797
# trends and standard errors (s. e.)
NormA2B = (A - mean(B))/sd(B) #fit gmsstLong to gmsst scale
trend30Y_normalizedGMSST = summary(lm(scale(B) ~ Year))$coeff[2, 1] * 30
trend30Yse_normalizedGMSST = summary(lm(scale(B) ~ Year))$coeff[2, 2] * 30
trend30Y_NormA2B = summary(lm(NormA2B[(1901-1853):(rev(Year)[1]-1853)] ~ c(1901:rev(Year)[1])))$coeff[2, 1] * 30
trend30Yse_NormA2B = summary(lm(NormA2B[(1901-1853):(rev(Year)[1]-1853)] ~ c(1901:rev(Year)[1])))$coeff[2, 2] * 30
trend30Y_normalizedEINT = summary(lm(scale(EINT) ~ Year))$coeff[2, 1] * 30
trend30Yse_normalizedEINT = summary(lm(scale(EINT) ~ Year))$coeff[2, 2] * 30
trend30Y_normalizedGMSST ; trend30Yse_normalizedGMSST
## [1] 2.7816
## [1] 0.3720408
trend30Y_NormA2B ; trend30Yse_NormA2B
## [1] 1.75184
## [1] 0.06273306
trend30Y_normalizedEINT ; trend30Yse_normalizedEINT
## [1] 2.01976
## [1] 0.5187028
# correlation between EINT and GMSST
cor.test(EINT,B)
##
## Pearson's product-moment correlation
##
## data: EINT and B
## t = 5.9265, df = 28, p-value = 2.23e-06
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## 0.5274062 0.8718967
## sample estimates:
## cor
## 0.7459405
# correlation between detrended residuals
EINTdt = residuals(lm(EINT ~ Year))
Bdt = residuals(lm(B ~ Year))
cor.test(EINTdt, Bdt)
##
## Pearson's product-moment correlation
##
## data: EINTdt and Bdt
## t = 3.6086, df = 28, p-value = 0.001187
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## 0.2548851 0.7678307
## sample estimates:
## cor
## 0.5634172
#4.plot
#------------------------------------------------
par(mar=c(4, 4, 4, 5))
plot(-10, -10, xlim=c(1901, 2020), ylim=c(-10, 4), axes=FALSE, xlab='', ylab='')
abline(h=0, lty=3, col='grey')
lines(1901:rev(Year)[1], NormA2B[(1901-1853):(rev(Year)[1]-1853)], lwd=3, col="#ff330099") # Global SST
lines(Year, scale(EINT), lwd=5, col="#00660055") # Efficiency of TC Intensity
modelB = lm(scale(B) ~ Year) # Global SST
lines(Year, predict(modelB), col="#ff330099", lwd=2) # Global SST
modelA = lm(scale(EINT) ~ Year) # Efficiency of TC Intensity
lines(Year, predict(modelA), col="#00660077", lwd=2) # Efficiency of TC Intensity
#Year2=1901:rev(Year)[1]
#modelB = lm(NormA2B[Year2] ~ Year2) # Global SST
#lines(Year2, predict(modelB), col="#ff330099", lwd=2) # Global SST
# axis
axis(1, at = seq(1900, 2020, 20), labels=seq(1900, 2020, 20), cex.axis=1)
axis(2, at = seq(-10, 4, 2), labels=seq(-10, 4, 2), las=1, cex.axis=1, col="#00660099", col.axis="#00660099")
scale = seq(-10, 4, 2) ; as.character(round(scale * sd(B) + mean(B), 1))
## [1] "17.1" "17.3" "17.6" "17.8" "18" "18.3" "18.5" "18.7"
Tp=c( "17.1", "17.3", "17.6", "17.8", "18.0", "18.3", "18.5", "18.7")
axis(4, at = seq(-10, 4, 2), labels=Tp, las=1, cex.axis=1, col="#ff330099", col.axis="#ff330099")
mtext('Year', side=1, line=2.6, cex=1)
mtext('Standardized EINT (s.d.)', side=2, line=2.5, cex=.95, col="#00660099")
text(par("usr")[2] +17, 0.8, srt=-90, adj = 0, labels = expression(Global~SST~(degree~C))
, cex=.95, col="#ff330099", xpd = TRUE)
Fig. 6 Time series of global ocean warmth (red line) and EINT (green line) in the western North Pacific. Global mean SST is used to indicate global ocean warmth. Values are standardized to the 30-year period (1984–2013) and the scale for the SST values are presented on the right axis.
require(png)
require(grid)
img <- readPNG('f_DiagTCClim.png')
grid.raster(img)
Fig. 7 Schematic diagram of global warming’s influence on TC climate in the western North Pacific. Intensity variation is the linear combination of activity and the efficiency of intensity, while frequency variation is that of activity and the negative efficiency of intensity. Global warming influences the efficiency of intensity rather than activity. In a linear perspective, this climate change results in record-breaking intensities with fewer tropical cyclones.